hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
aba1bb54577caa2267c63476b46b9eefcad1d9d2
2,058
// Copyright 2020 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use common_datavalues::DataSchema; use common_datavalues::DataSchemaRef; use common_metatypes::MetaId; use common_metatypes::MetaVersion; use crate::Extras; use crate::Partitions; use crate::ScanPlan; use crate::Statistics; // TODO: Delete the scan plan field, but it depends on plan_parser:L394 #[derive(serde::Serialize, serde::Deserialize, Clone, Debug, PartialEq)] pub struct ReadDataSourcePlan { pub db: String, pub table: String, pub table_id: MetaId, pub table_version: Option<MetaVersion>, pub schema: DataSchemaRef, pub parts: Partitions, pub statistics: Statistics, pub description: String, pub scan_plan: Arc<ScanPlan>, pub remote: bool, } impl ReadDataSourcePlan { pub fn empty(table_id: u64, table_version: Option<u64>) -> ReadDataSourcePlan { ReadDataSourcePlan { db: "".to_string(), table: "".to_string(), table_id, table_version, schema: Arc::from(DataSchema::empty()), parts: vec![], statistics: Statistics::default(), description: "".to_string(), scan_plan: Arc::new(ScanPlan::with_table_id(table_id, table_version)), remote: false, } } pub fn schema(&self) -> DataSchemaRef { self.schema.clone() } /// Get the push downs. pub fn get_push_downs(&self) -> Extras { self.scan_plan.push_downs.clone() } }
30.716418
83
0.671526
e95c606d73e72a26a5a47f754339642659c8fe30
6,384
use crate::core::algebra::{Vector2, Vector3}; use crate::{ core::pool::Handle, resource::{ fbx::scene, fbx::{ document::{FbxNode, FbxNodeContainer}, error::FbxError, scene::{FbxComponent, FbxContainer, FbxScene}, }, }, scene::mesh::surface::{VertexWeight, VertexWeightSet}, }; pub struct FbxGeometry { // Only vertices and indices are required. pub vertices: Vec<Vector3<f32>>, pub indices: Vec<i32>, // Normals, UVs, etc. are optional. pub normals: Option<FbxContainer<Vector3<f32>>>, pub uvs: Option<FbxContainer<Vector2<f32>>>, pub materials: Option<FbxContainer<i32>>, pub tangents: Option<FbxContainer<Vector3<f32>>>, pub binormals: Option<FbxContainer<Vector3<f32>>>, pub deformers: Vec<Handle<FbxComponent>>, } fn read_vertices( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Vec<Vector3<f32>>, FbxError> { let vertices_node_handle = nodes.find(geom_node_handle, "Vertices")?; let vertices_array_node = nodes.get_by_name(vertices_node_handle, "a")?; let mut vertices = Vec::with_capacity(vertices_array_node.attrib_count() / 3); for vertex in vertices_array_node.attributes().chunks_exact(3) { vertices.push(Vector3::new( vertex[0].as_f32()?, vertex[1].as_f32()?, vertex[2].as_f32()?, )); } Ok(vertices) } fn read_indices( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Vec<i32>, FbxError> { let indices_node_handle = nodes.find(geom_node_handle, "PolygonVertexIndex")?; let indices_array_node = nodes.get_by_name(indices_node_handle, "a")?; let mut indices = Vec::with_capacity(indices_array_node.attrib_count()); for index in indices_array_node.attributes() { indices.push(index.as_i32()?); } Ok(indices) } fn read_normals( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Option<FbxContainer<Vector3<f32>>>, FbxError> { if let Ok(layer_element_normal) = nodes.find(geom_node_handle, "LayerElementNormal") { Ok(Some(scene::make_vec3_container( nodes, layer_element_normal, "Normals", )?)) } else { Ok(None) } } fn read_tangents( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Option<FbxContainer<Vector3<f32>>>, FbxError> { if let Ok(layer_element_tangent) = nodes.find(geom_node_handle, "LayerElementTangent") { Ok(Some(scene::make_vec3_container( nodes, layer_element_tangent, "Tangents", )?)) } else { Ok(None) } } fn read_binormals( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Option<FbxContainer<Vector3<f32>>>, FbxError> { if let Ok(layer_element_tangent) = nodes.find(geom_node_handle, "LayerElementBinormal") { Ok(Some(scene::make_vec3_container( nodes, layer_element_tangent, "Binormals", )?)) } else { Ok(None) } } fn read_uvs( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Option<FbxContainer<Vector2<f32>>>, FbxError> { if let Ok(layer_element_uv) = nodes.find(geom_node_handle, "LayerElementUV") { Ok(Some(FbxContainer::new( nodes, layer_element_uv, "UV", |attributes| { let mut uvs = Vec::with_capacity(attributes.len() / 2); for uv in attributes.chunks_exact(2) { uvs.push(Vector2::new(uv[0].as_f32()?, uv[1].as_f32()?)); } Ok(uvs) }, )?)) } else { Ok(None) } } fn read_materials( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<Option<FbxContainer<i32>>, FbxError> { if let Ok(layer_element_material_node_handle) = nodes.find(geom_node_handle, "LayerElementMaterial") { Ok(Some(FbxContainer::new( nodes, layer_element_material_node_handle, "Materials", |attributes| { let mut materials = Vec::with_capacity(attributes.len()); for attribute in attributes { materials.push(attribute.as_i32()?); } Ok(materials) }, )?)) } else { Ok(None) } } impl FbxGeometry { pub(in crate::resource::fbx) fn read( geom_node_handle: Handle<FbxNode>, nodes: &FbxNodeContainer, ) -> Result<FbxGeometry, FbxError> { Ok(FbxGeometry { vertices: read_vertices(geom_node_handle, nodes)?, indices: read_indices(geom_node_handle, nodes)?, normals: read_normals(geom_node_handle, nodes)?, uvs: read_uvs(geom_node_handle, nodes)?, materials: read_materials(geom_node_handle, nodes)?, tangents: read_tangents(geom_node_handle, nodes)?, binormals: read_binormals(geom_node_handle, nodes)?, deformers: Vec::new(), }) } pub(in crate::resource::fbx) fn get_skin_data( &self, scene: &FbxScene, ) -> Result<Vec<VertexWeightSet>, FbxError> { let mut out = vec![VertexWeightSet::default(); self.vertices.len()]; for &deformer_handle in self.deformers.iter() { for &sub_deformer_handle in scene .get(deformer_handle) .as_deformer()? .sub_deformers .iter() { let sub_deformer = scene.get(sub_deformer_handle).as_sub_deformer()?; for (index, weight) in sub_deformer.weights.iter() { let bone_set = out .get_mut(*index as usize) .ok_or(FbxError::IndexOutOfBounds)?; if !bone_set.push(VertexWeight { value: *weight, effector: sub_deformer.model.into(), }) { // Re-normalize weights if there are more than 4 bones per vertex. bone_set.normalize(); } } } } Ok(out) } }
32.080402
93
0.580827
2fc0a9df822795da8798b3c97f60817ec725b3e7
368
use thiserror::Error; #[derive(Error, Debug)] pub enum Error { #[error("Invalid query")] QueryParseError, #[error("Search Error: {0}")] TantivyError(tantivy::TantivyError), #[error(transparent)] Other(#[from] anyhow::Error), } impl From<tantivy::TantivyError> for Error { fn from(t: tantivy::TantivyError) -> Error { Error::TantivyError(t) } }
18.4
46
0.665761
87c81392530686ac0e356746fbccd6c7fd2fd2a2
3,177
#![doc = "generated by AutoRust 0.1.0"] #[cfg(feature = "package-2020-08")] mod package_2020_08; #[cfg(feature = "package-2020-08")] pub use package_2020_08::{models, operations, API_VERSION}; #[cfg(feature = "package-2020-03")] mod package_2020_03; #[cfg(feature = "package-2020-03")] pub use package_2020_03::{models, operations, API_VERSION}; #[cfg(feature = "package-2015-08")] mod package_2015_08; #[cfg(feature = "package-2015-08")] pub use package_2015_08::{models, operations, API_VERSION}; #[cfg(feature = "package-2015-02")] mod package_2015_02; #[cfg(feature = "package-2015-02")] pub use package_2015_02::{models, operations, API_VERSION}; #[cfg(feature = "package-2020-08-preview")] mod package_2020_08_preview; #[cfg(feature = "package-2020-08-preview")] pub use package_2020_08_preview::{models, operations, API_VERSION}; #[cfg(feature = "package-2019-10-preview")] mod package_2019_10_preview; use azure_core::setters; #[cfg(feature = "package-2019-10-preview")] pub use package_2019_10_preview::{models, operations, API_VERSION}; pub fn config( http_client: std::sync::Arc<std::boxed::Box<dyn azure_core::HttpClient>>, token_credential: Box<dyn azure_core::TokenCredential>, ) -> OperationConfigBuilder { OperationConfigBuilder { api_version: None, http_client, base_path: None, token_credential, token_credential_resource: None, } } pub struct OperationConfigBuilder { api_version: Option<String>, http_client: std::sync::Arc<std::boxed::Box<dyn azure_core::HttpClient>>, base_path: Option<String>, token_credential: Box<dyn azure_core::TokenCredential>, token_credential_resource: Option<String>, } impl OperationConfigBuilder { setters! { api_version : String => Some (api_version) , base_path : String => Some (base_path) , token_credential_resource : String => Some (token_credential_resource) , } pub fn build(self) -> OperationConfig { OperationConfig { api_version: self.api_version.unwrap_or(API_VERSION.to_owned()), http_client: self.http_client, base_path: self.base_path.unwrap_or("https://management.azure.com".to_owned()), token_credential: Some(self.token_credential), token_credential_resource: self.token_credential_resource.unwrap_or("https://management.azure.com/".to_owned()), } } } pub struct OperationConfig { api_version: String, http_client: std::sync::Arc<std::boxed::Box<dyn azure_core::HttpClient>>, base_path: String, token_credential: Option<Box<dyn azure_core::TokenCredential>>, token_credential_resource: String, } impl OperationConfig { pub fn api_version(&self) -> &str { self.api_version.as_str() } pub fn http_client(&self) -> &dyn azure_core::HttpClient { self.http_client.as_ref().as_ref() } pub fn base_path(&self) -> &str { self.base_path.as_str() } pub fn token_credential(&self) -> Option<&dyn azure_core::TokenCredential> { self.token_credential.as_deref() } pub fn token_credential_resource(&self) -> &str { self.token_credential_resource.as_str() } }
38.743902
175
0.70255
ddaf49dcbac561bcae0dc5d0456bc5ae7b47c525
34,733
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use super::big; use super::big::Big; use super::fp::FP; use super::rom; use std::fmt; use std::str::SplitWhitespace; pub use super::rom::{AESKEY, CURVETYPE, CURVE_PAIRING_TYPE, HASH_TYPE, SEXTIC_TWIST, SIGN_OF_X}; pub use crate::types::CurveType; #[derive(Clone)] pub struct ECP { x: FP, y: FP, z: FP, } impl PartialEq for ECP { fn eq(&self, other: &ECP) -> bool { self.equals(other) } } impl Eq for ECP {} impl fmt::Display for ECP { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ECP: [ {}, {}, {} ]", self.x, self.y, self.z) } } impl fmt::Debug for ECP { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ECP: [ {}, {}, {} ]", self.x, self.y, self.z) } } #[allow(non_snake_case)] impl ECP { /// Projective New /// /// Creates a new projective elliptic curve point at infinity (0, 1, 0). #[inline(always)] pub fn pnew() -> ECP { ECP { x: FP::new(), y: FP::new_int(1), z: FP::new(), } } /// New /// /// Creates a new ECP at infinity #[inline(always)] pub fn new() -> ECP { let mut E = ECP::pnew(); if CURVETYPE == CurveType::Edwards { E.z.one(); } return E; } /// New Bigs /// /// Set (x,y) from two Bigs /// Set to infinity if not on curve. #[inline(always)] pub fn new_bigs(ix: &Big, iy: &Big) -> ECP { let mut E = ECP::new(); E.x.bcopy(ix); E.y.bcopy(iy); E.z.one(); E.x.norm(); let rhs = ECP::rhs(&E.x); if CURVETYPE == CurveType::Montgomery { if rhs.jacobi() != 1 { E.inf(); } } else { let mut y2 = E.y.clone(); y2.sqr(); if !y2.equals(&rhs) { E.inf(); } } return E; } /// New BigInt /// /// Set (x, y) from x and sign of y. /// Set to infinity if not on curve. #[inline(always)] pub fn new_bigint(ix: &Big, s: isize) -> ECP { let mut E = ECP::new(); E.x.bcopy(ix); E.x.norm(); E.z.one(); let mut rhs = ECP::rhs(&E.x); if rhs.jacobi() == 1 { let mut ny = rhs.sqrt(); if ny.redc().parity() != s { ny.neg() } E.y = ny; } else { E.inf() } E } /// New Big /// /// Create point from x, calculates y from curve equation /// Set to infinity if not on curve. #[inline(always)] #[allow(non_snake_case)] pub fn new_big(ix: &Big) -> ECP { let mut E = ECP::new(); E.x.bcopy(ix); E.x.norm(); E.z.one(); let mut rhs = ECP::rhs(&E.x); if rhs.jacobi() == 1 { if CURVETYPE != CurveType::Montgomery { E.y = rhs.sqrt() } } else { E.inf(); } return E; } /// New Fp's /// /// Constructs from (x,y). /// Set to infinity if not on curve. #[inline(always)] pub fn new_fps(x: FP, y: FP) -> ECP { let mut point = ECP { x, y, z: FP::new_int(1), }; let rhs = ECP::rhs(&point.x); let mut y2 = point.y.clone(); y2.sqr(); if !y2.equals(&rhs) { point.inf(); } point } /// New Projective /// /// Create new point from (X, Y, Z). /// Assumes coordinates are valid. #[inline(always)] pub fn new_projective(x: FP, y: FP, z: FP) -> ECP { ECP { x, y, z } } /// Infinity /// /// Set self to infinity. pub fn inf(&mut self) { self.x.zero(); if CURVETYPE != CurveType::Montgomery { self.y.one(); } if CURVETYPE != CurveType::Edwards { self.z.zero(); } else { self.z.one() } } /// Right Hand Side /// /// Calculate RHS of curve equation. fn rhs(x: &FP) -> FP { let mut r = x.clone(); r.sqr(); if CURVETYPE == CurveType::Weierstrass { // x^3+Ax+B let b = FP::new_big(Big::new_ints(&rom::CURVE_B)); r.mul(x); if rom::CURVE_A == -3 { let mut cx = x.clone(); cx.imul(3); cx.neg(); cx.norm(); r.add(&cx); } r.add(&b); } if CURVETYPE == CurveType::Edwards { // (Ax^2-1)/(Bx^2-1) let mut b = FP::new_big(Big::new_ints(&rom::CURVE_B)); let one = FP::new_int(1); b.mul(&r); b.sub(&one); b.norm(); if rom::CURVE_A == -1 { r.neg() } r.sub(&one); r.norm(); b.inverse(); r.mul(&b); } if CURVETYPE == CurveType::Montgomery { // x^3+Ax^2+x let mut x3 = r.clone(); x3.mul(x); r.imul(rom::CURVE_A); r.add(&x3); r.add(&x); } r.reduce(); return r; } /// Is Infinity /// /// self == infinity pub fn is_infinity(&self) -> bool { match CURVETYPE { CurveType::Edwards => self.x.is_zilch() && self.y.equals(&self.z), CurveType::Weierstrass => self.x.is_zilch() && self.z.is_zilch(), CurveType::Montgomery => self.z.is_zilch(), } } /// Conditional Swap /// /// Conditional swap of self and Q dependant on d pub fn cswap(&mut self, Q: &mut ECP, d: isize) { self.x.cswap(&mut Q.x, d); if CURVETYPE != CurveType::Montgomery { self.y.cswap(&mut Q.y, d) } self.z.cswap(&mut Q.z, d); } /// Conditional Move /// /// Conditional move of Q to self dependant on d pub fn cmove(&mut self, Q: &ECP, d: isize) { self.x.cmove(&Q.x, d); if CURVETYPE != CurveType::Montgomery { self.y.cmove(&Q.y, d) } self.z.cmove(&Q.z, d); } /// ConstantTime Equals /// /// Return 1 if b == c, no branching fn teq(b: i32, c: i32) -> isize { let mut x = b ^ c; x -= 1; // if x=0, x now -1 return ((x >> 31) & 1) as isize; } /// Negation /// /// self = -self pub fn neg(&mut self) { if CURVETYPE == CurveType::Weierstrass { self.y.neg(); self.y.norm(); } if CURVETYPE == CurveType::Edwards { self.x.neg(); self.x.norm(); } return; } /// Multiply X /// /// Multiplies the X coordinate pub fn mulx(&mut self, c: &mut FP) { self.x.mul(c); } /// Selector /// /// Constant time select from pre-computed table. fn selector(&mut self, W: &[ECP], b: i32) { let m = b >> 31; let mut babs = (b ^ m) - m; babs = (babs - 1) / 2; self.cmove(&W[0], ECP::teq(babs, 0)); // conditional move self.cmove(&W[1], ECP::teq(babs, 1)); self.cmove(&W[2], ECP::teq(babs, 2)); self.cmove(&W[3], ECP::teq(babs, 3)); self.cmove(&W[4], ECP::teq(babs, 4)); self.cmove(&W[5], ECP::teq(babs, 5)); self.cmove(&W[6], ECP::teq(babs, 6)); self.cmove(&W[7], ECP::teq(babs, 7)); let mut MP = self.clone(); MP.neg(); self.cmove(&MP, (m & 1) as isize); } /// Equals /// /// self == Q pub fn equals(&self, Q: &ECP) -> bool { let mut a = self.getpx(); a.mul(&Q.z); let mut b = Q.getpx(); b.mul(&self.z); if !a.equals(&b) { return false; } if CURVETYPE != CurveType::Montgomery { a = self.getpy(); a.mul(&Q.z); b = Q.getpy(); b.mul(&self.z); if !a.equals(&b) { return false; } } return true; } /// Affine /// /// Set to affine, from (X, Y, Z) to (x, y). pub fn affine(&mut self) { if self.is_infinity() { return; } let one = FP::new_int(1); if self.z.equals(&one) { return; } self.z.inverse(); self.x.mul(&self.z); self.x.reduce(); if CURVETYPE != CurveType::Montgomery { self.y.mul(&self.z); self.y.reduce(); } self.z = one; } /// Get X /// /// Extract affine x as a Big. pub fn getx(&self) -> Big { let mut W = self.clone(); W.affine(); return W.x.redc(); } /// Get Y /// /// Extract affine y as a Big. pub fn gety(&self) -> Big { let mut W = self.clone(); W.affine(); return W.y.redc(); } /// Get Sign Y /// /// Returns the sign of Y. pub fn gets(&self) -> isize { let y = self.gety(); return y.parity(); } /// Get Proejctive X /// /// Extract X as an FP. pub fn getpx(&self) -> FP { self.x.clone() } /// Get Projective Y /// /// Extract Y as an FP. pub fn getpy(&self) -> FP { self.y.clone() } /// Get Porjective Z /// /// Extract Z as an FP. pub fn getpz(&self) -> FP { self.z.clone() } /// To Bytes /// /// Convert to byte array /// Panics if byte array is insufficient length. pub fn to_bytes(&self, b: &mut [u8], compress: bool) { let mb = big::MODBYTES as usize; let mut t: [u8; big::MODBYTES as usize] = [0; big::MODBYTES as usize]; let mut W = self.clone(); W.affine(); W.x.redc().to_bytes(&mut t); for i in 0..mb { b[i + 1] = t[i] } if CURVETYPE == CurveType::Montgomery { b[0] = 0x06; return; } if compress { b[0] = 0x02; if W.y.redc().parity() == 1 { b[0] = 0x03 } return; } b[0] = 0x04; W.y.redc().to_bytes(&mut t); for i in 0..mb { b[i + mb + 1] = t[i] } } /// From Bytes /// /// Convert from byte array to point /// Panics if input bytes are less than required bytes. #[inline(always)] pub fn from_bytes(b: &[u8]) -> ECP { let mut t: [u8; big::MODBYTES as usize] = [0; big::MODBYTES as usize]; let mb = big::MODBYTES as usize; let p = Big::new_ints(&rom::MODULUS); for i in 0..mb { t[i] = b[i + 1] } let px = Big::from_bytes(&t); if Big::comp(&px, &p) >= 0 { return ECP::new(); } if CURVETYPE == CurveType::Montgomery { return ECP::new_big(&px); } if b[0] == 0x04 { for i in 0..mb { t[i] = b[i + mb + 1] } let py = Big::from_bytes(&t); if Big::comp(&py, &p) >= 0 { return ECP::new(); } return ECP::new_bigs(&px, &py); } if b[0] == 0x02 || b[0] == 0x03 { return ECP::new_bigint(&px, (b[0] & 1) as isize); } return ECP::new(); } /// To String /// /// Converts `ECP` to a hex string. pub fn to_string(&self) -> String { let mut W = self.clone(); W.affine(); if W.is_infinity() { return String::from("infinity"); } if CURVETYPE == CurveType::Montgomery { return format!("({})", W.x.redc().to_string()); } else { return format!("({},{})", W.x.redc().to_string(), W.y.redc().to_string()); }; } /// To Hex /// /// Converts the projectives to a hex string separated by a space. pub fn to_hex(&self) -> String { format!( "{} {} {}", self.x.to_hex(), self.y.to_hex(), self.z.to_hex() ) } /// From Hex Iterator #[inline(always)] pub fn from_hex_iter(iter: &mut SplitWhitespace) -> ECP { ECP { x: FP::from_hex_iter(iter), y: FP::from_hex_iter(iter), z: FP::from_hex_iter(iter), } } /// From Hex #[inline(always)] pub fn from_hex(val: String) -> ECP { let mut iter = val.split_whitespace(); return ECP::from_hex_iter(&mut iter); } /// Double /// /// self *= 2 pub fn dbl(&mut self) { if CURVETYPE == CurveType::Weierstrass { if rom::CURVE_A == 0 { let mut t0 = self.y.clone(); t0.sqr(); let mut t1 = self.y.clone(); t1.mul(&self.z); let mut t2 = self.z.clone(); t2.sqr(); self.z = t0.clone(); self.z.add(&t0); self.z.norm(); self.z.dbl(); self.z.dbl(); self.z.norm(); t2.imul(3 * rom::CURVE_B_I); let mut x3 = t2.clone(); x3.mul(&self.z); let mut y3 = t0.clone(); y3.add(&t2); y3.norm(); self.z.mul(&t1); t1 = t2.clone(); t1.add(&t2); t2.add(&t1); t0.sub(&t2); t0.norm(); y3.mul(&t0); y3.add(&x3); t1 = self.getpx(); t1.mul(&self.y); self.x = t0.clone(); self.x.norm(); self.x.mul(&t1); self.x.dbl(); self.x.norm(); self.y = y3.clone(); self.y.norm(); } else { let mut t0 = self.x.clone(); let mut t1 = self.y.clone(); let mut t2 = self.z.clone(); let mut t3 = self.x.clone(); let mut z3 = self.z.clone(); let mut b = FP::new(); if rom::CURVE_B_I == 0 { b = FP::new_big(Big::new_ints(&rom::CURVE_B)); } t0.sqr(); //1 x^2 t1.sqr(); //2 y^2 t2.sqr(); //3 t3.mul(&self.y); //4 t3.dbl(); t3.norm(); //5 z3.mul(&self.x); //6 z3.dbl(); z3.norm(); //7 let mut y3 = t2.clone(); if rom::CURVE_B_I == 0 { y3.mul(&b); //8 } else { y3.imul(rom::CURVE_B_I); } y3.sub(&z3); //9 *** let mut x3 = y3.clone(); x3.add(&y3); x3.norm(); //10 y3.add(&x3); //11 x3 = t1.clone(); x3.sub(&y3); x3.norm(); //12 y3.add(&t1); y3.norm(); //13 y3.mul(&x3); //14 x3.mul(&t3); //15 t3 = t2.clone(); t3.add(&t2); //16 t2.add(&t3); //17 if rom::CURVE_B_I == 0 { z3.mul(&b); //18 } else { z3.imul(rom::CURVE_B_I); } z3.sub(&t2); //19 z3.sub(&t0); z3.norm(); //20 *** t3 = z3.clone(); t3.add(&z3); //21 z3.add(&t3); z3.norm(); //22 t3 = t0.clone(); t3.add(&t0); //23 t0.add(&t3); //24 t0.sub(&t2); t0.norm(); //25 t0.mul(&z3); //26 y3.add(&t0); //27 t0 = self.getpy(); t0.mul(&self.z); //28 t0.dbl(); t0.norm(); //29 z3.mul(&t0); //30 x3.sub(&z3); //31 t0.dbl(); t0.norm(); //32 t1.dbl(); t1.norm(); //33 z3 = t0.clone(); z3.mul(&t1); //34 self.x = x3.clone(); self.x.norm(); self.y = y3.clone(); self.y.norm(); self.z = z3.clone(); self.z.norm(); } } if CURVETYPE == CurveType::Edwards { let mut c = self.x.clone(); let mut d = self.y.clone(); let mut h = self.z.clone(); self.x.mul(&self.y); self.x.dbl(); self.x.norm(); c.sqr(); d.sqr(); if rom::CURVE_A == -1 { c.neg() } self.y = c.clone(); self.y.add(&d); self.y.norm(); h.sqr(); h.dbl(); self.z = self.getpy(); let mut j = self.getpy(); j.sub(&h); j.norm(); self.x.mul(&j); c.sub(&d); c.norm(); self.y.mul(&c); self.z.mul(&j); } if CURVETYPE == CurveType::Montgomery { let mut a = self.x.clone(); let mut b = self.x.clone(); a.add(&self.z); a.norm(); let mut aa = a.clone(); aa.sqr(); b.sub(&self.z); b.norm(); let mut bb = b.clone(); bb.sqr(); let mut c = aa.clone(); c.sub(&bb); c.norm(); self.x = aa.clone(); self.x.mul(&bb); a = c.clone(); a.imul((rom::CURVE_A + 2) / 4); bb.add(&a); bb.norm(); self.z = bb; self.z.mul(&c); } } /// Addition /// /// self += Q pub fn add(&mut self, Q: &ECP) { if CURVETYPE == CurveType::Weierstrass { if rom::CURVE_A == 0 { let b = 3 * rom::CURVE_B_I; let mut t0 = self.x.clone(); t0.mul(&Q.x); let mut t1 = self.y.clone(); t1.mul(&Q.y); let mut t2 = self.z.clone(); t2.mul(&Q.z); let mut t3 = self.x.clone(); t3.add(&self.y); t3.norm(); let mut t4 = Q.x.clone(); t4.add(&Q.y); t4.norm(); t3.mul(&t4); t4 = t0.clone(); t4.add(&t1); t3.sub(&t4); t3.norm(); t4 = self.getpy(); t4.add(&self.z); t4.norm(); let mut x3 = Q.y.clone(); x3.add(&Q.z); x3.norm(); t4.mul(&x3); x3 = t1.clone(); x3.add(&t2); t4.sub(&x3); t4.norm(); x3 = self.getpx(); x3.add(&self.z); x3.norm(); let mut y3 = Q.x.clone(); y3.add(&Q.z); y3.norm(); x3.mul(&y3); y3 = t0.clone(); y3.add(&t2); y3.rsub(&x3); y3.norm(); x3 = t0.clone(); x3.add(&t0); t0.add(&x3); t0.norm(); t2.imul(b); let mut z3 = t1.clone(); z3.add(&t2); z3.norm(); t1.sub(&t2); t1.norm(); y3.imul(b); x3 = y3.clone(); x3.mul(&t4); t2 = t3.clone(); t2.mul(&t1); x3.rsub(&t2); y3.mul(&t0); t1.mul(&z3); y3.add(&t1); t0.mul(&t3); z3.mul(&t4); z3.add(&t0); self.x = x3.clone(); self.x.norm(); self.y = y3.clone(); self.y.norm(); self.z = z3.clone(); self.z.norm(); } else { let mut t0 = self.x.clone(); let mut t1 = self.y.clone(); let mut t2 = self.z.clone(); let mut t3 = self.x.clone(); let mut t4 = Q.x.clone(); let mut y3 = Q.x.clone(); let mut x3 = Q.y.clone(); let mut b = FP::new(); if rom::CURVE_B_I == 0 { b = FP::new_big(Big::new_ints(&rom::CURVE_B)); } t0.mul(&Q.x); //1 t1.mul(&Q.y); //2 t2.mul(&Q.z); //3 t3.add(&self.y); t3.norm(); //4 t4.add(&Q.y); t4.norm(); //5 t3.mul(&t4); //6 t4 = t0.clone(); t4.add(&t1); //7 t3.sub(&t4); t3.norm(); //8 t4 = self.getpy(); t4.add(&self.z); t4.norm(); //9 x3.add(&Q.z); x3.norm(); //10 t4.mul(&x3); //11 x3 = t1.clone(); x3.add(&t2); //12 t4.sub(&x3); t4.norm(); //13 x3 = self.getpx(); x3.add(&self.z); x3.norm(); //14 y3.add(&Q.z); y3.norm(); //15 x3.mul(&y3); //16 y3 = t0.clone(); y3.add(&t2); //17 y3.rsub(&x3); y3.norm(); //18 let mut z3 = t2.clone(); if rom::CURVE_B_I == 0 { z3.mul(&b); //18 } else { z3.imul(rom::CURVE_B_I); } x3 = y3.clone(); x3.sub(&z3); x3.norm(); //20 z3 = x3.clone(); z3.add(&x3); //21 x3.add(&z3); //22 z3 = t1.clone(); z3.sub(&x3); z3.norm(); //23 x3.add(&t1); x3.norm(); //24 if rom::CURVE_B_I == 0 { y3.mul(&b); //18 } else { y3.imul(rom::CURVE_B_I); } t1 = t2.clone(); t1.add(&t2); //t1.norm();//26 t2.add(&t1); //27 y3.sub(&t2); //28 y3.sub(&t0); y3.norm(); //29 t1 = y3.clone(); t1.add(&y3); //30 y3.add(&t1); y3.norm(); //31 t1 = t0.clone(); t1.add(&t0); //32 t0.add(&t1); //33 t0.sub(&t2); t0.norm(); //34 t1 = t4.clone(); t1.mul(&y3); //35 t2 = t0.clone(); t2.mul(&y3); //36 y3 = x3.clone(); y3.mul(&z3); //37 y3.add(&t2); //y3.norm();//38 x3.mul(&t3); //39 x3.sub(&t1); //40 z3.mul(&t4); //41 t1 = t3.clone(); t1.mul(&t0); //42 z3.add(&t1); self.x = x3.clone(); self.x.norm(); self.y = y3.clone(); self.y.norm(); self.z = z3.clone(); self.z.norm(); } } if CURVETYPE == CurveType::Edwards { let bb = FP::new_big(Big::new_ints(&rom::CURVE_B)); let mut a = self.z.clone(); let mut c = self.x.clone(); let mut d = self.y.clone(); a.mul(&Q.z); let mut b = a.clone(); b.sqr(); c.mul(&Q.x); d.mul(&Q.y); let mut e = c.clone(); e.mul(&d); e.mul(&bb); let mut f = b.clone(); f.sub(&e); let mut g = b.clone(); g.add(&e); if rom::CURVE_A == 1 { e = d.clone(); e.sub(&c); } c.add(&d); b = self.getpx(); b.add(&self.y); d = Q.getpx(); d.add(&Q.y); b.norm(); d.norm(); b.mul(&d); b.sub(&c); b.norm(); f.norm(); b.mul(&f); self.x = a.clone(); self.x.mul(&b); g.norm(); if rom::CURVE_A == 1 { e.norm(); c = e.clone(); c.mul(&g); } if rom::CURVE_A == -1 { c.norm(); c.mul(&g); } self.y = a.clone(); self.y.mul(&c); self.z = f.clone(); self.z.mul(&g); } return; } /// Differential Add for Montgomery curves. /// /// self += Q /// where W is (self - Q) and is affine pub fn dadd(&mut self, Q: &ECP, W: &ECP) { let mut a = self.x.clone(); let mut b = self.x.clone(); let mut c = Q.x.clone(); let mut d = Q.x.clone(); a.add(&self.z); b.sub(&self.z); c.add(&Q.z); d.sub(&Q.z); a.norm(); d.norm(); let mut da = d.clone(); da.mul(&a); c.norm(); b.norm(); let mut cb = c.clone(); cb.mul(&b); a = da.clone(); a.add(&cb); a.norm(); a.sqr(); b = da.clone(); b.sub(&cb); b.norm(); b.sqr(); self.x = a.clone(); self.z = W.getpx(); self.z.mul(&b); } /// Subtraction /// /// self -= Q pub fn sub(&mut self, Q: &ECP) { let mut NQ = Q.clone(); NQ.neg(); self.add(&NQ); } /// Pin Multiplication /// /// Constant time multiply by small integer of length bts - use ladder #[inline(always)] pub fn pinmul(&self, e: i32, bts: i32) -> ECP { if CURVETYPE == CurveType::Montgomery { return self.mul(&mut Big::new_int(e as isize)); } else { let mut R0 = ECP::new(); let mut R1 = self.clone(); for i in (0..bts).rev() { let b = ((e >> i) & 1) as isize; let mut P = R1.clone(); P.add(&R0); R0.cswap(&mut R1, b); R1 = P.clone(); R0.dbl(); R0.cswap(&mut R1, b); } let mut P = R0.clone(); P.affine(); P } } /// Multiplication /// /// Return e * self #[inline(always)] pub fn mul(&self, e: &Big) -> ECP { if e.is_zilch() || self.is_infinity() { return ECP::new(); } let mut T = if CURVETYPE == CurveType::Montgomery { /* use Ladder */ let mut R0 = self.clone(); let mut R1 = self.clone(); R1.dbl(); let mut D = self.clone(); D.affine(); let nb = e.nbits(); for i in (0..nb - 1).rev() { let b = e.bit(i); let mut P = R1.clone(); P.dadd(&mut R0, &D); R0.cswap(&mut R1, b); R1 = P.clone(); R0.dbl(); R0.cswap(&mut R1, b); } R0.clone() } else { let mut W: [ECP; 8] = [ ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ]; const CT: usize = 1 + (big::NLEN * (big::BASEBITS as usize) + 3) / 4; let mut w: [i8; CT] = [0; CT]; let mut Q = self.clone(); Q.dbl(); W[0] = self.clone(); for i in 1..8 { W[i] = W[i - 1].clone(); W[i].add(&Q); } // make exponent odd - add 2P if even, P if odd let mut t = e.clone(); let s = t.parity(); t.inc(1); t.norm(); let ns = t.parity(); let mut mt = t.clone(); mt.inc(1); mt.norm(); t.cmove(&mt, s); Q.cmove(&self, ns); let C = Q.clone(); let nb = 1 + (t.nbits() + 3) / 4; // convert exponent to signed 4-bit window for i in 0..nb { w[i] = (t.lastbits(5) - 16) as i8; t.dec(w[i] as isize); t.norm(); t.fshr(4); } w[nb] = t.lastbits(5) as i8; let mut P = W[((w[nb] as usize) - 1) / 2].clone(); for i in (0..nb).rev() { Q.selector(&W, w[i] as i32); P.dbl(); P.dbl(); P.dbl(); P.dbl(); P.add(&Q); } P.sub(&C); /* apply correction */ P }; T.affine(); T } /// Multiply two points by scalars /// /// Return e * self + f * Q #[inline(always)] pub fn mul2(&self, e: &Big, Q: &ECP, f: &Big) -> ECP { let mut W: [ECP; 8] = [ ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ECP::new(), ]; const CT: usize = 1 + (big::NLEN * (big::BASEBITS as usize) + 1) / 2; let mut w: [i8; CT] = [0; CT]; let mut te = e.clone(); let mut tf = f.clone(); // precompute table W[1] = self.clone(); W[1].sub(Q); W[2] = self.clone(); W[2].add(Q); let mut S = Q.clone(); S.dbl(); let mut C = W[1].clone(); W[0] = C.clone(); W[0].sub(&S); // copy to C is stupid Rust thing.. C = W[2].clone(); W[3] = C.clone(); W[3].add(&S); let mut T = self.clone(); T.dbl(); C = W[1].clone(); W[5] = C.clone(); W[5].add(&T); C = W[2].clone(); W[6] = C.clone(); W[6].add(&T); C = W[5].clone(); W[4] = C.clone(); W[4].sub(&S); C = W[6].clone(); W[7] = C.clone(); W[7].add(&S); // if multiplier is odd, add 2, else add 1 to multiplier, and add 2P or P to correction let mut s = te.parity(); te.inc(1); te.norm(); let mut ns = te.parity(); let mut mt = te.clone(); mt.inc(1); mt.norm(); te.cmove(&mt, s); T.cmove(&self, ns); C = T.clone(); s = tf.parity(); tf.inc(1); tf.norm(); ns = tf.parity(); mt = tf.clone(); mt.inc(1); mt.norm(); tf.cmove(&mt, s); S.cmove(&Q, ns); C.add(&S); mt = te.clone(); mt.add(&tf); mt.norm(); let nb = 1 + (mt.nbits() + 1) / 2; // convert exponent to signed 2-bit window for i in 0..nb { let a = te.lastbits(3) - 4; te.dec(a); te.norm(); te.fshr(2); let b = tf.lastbits(3) - 4; tf.dec(b); tf.norm(); tf.fshr(2); w[i] = (4 * a + b) as i8; } w[nb] = (4 * te.lastbits(3) + tf.lastbits(3)) as i8; S = W[((w[nb] as usize) - 1) / 2].clone(); for i in (0..nb).rev() { T.selector(&W, w[i] as i32); S.dbl(); S.dbl(); S.add(&T); } S.sub(&C); /* apply correction */ S.affine(); return S; } // Multiply itself by cofactor of the curve pub fn cfp(&mut self) { let cf = rom::CURVE_COF_I; if cf == 1 { return; } if cf == 4 { self.dbl(); self.dbl(); return; } if cf == 8 { self.dbl(); self.dbl(); self.dbl(); return; } let c = Big::new_ints(&rom::CURVE_COF); let P = self.mul(&c); *self = P.clone(); } /// Map It /// /// Maps bytes to a curve point using hash and test. /// Not conformant to hash-to-curve standards. #[allow(non_snake_case)] #[inline(always)] pub fn mapit(h: &[u8]) -> ECP { let q = Big::new_ints(&rom::MODULUS); let mut x = Big::from_bytes(h); x.rmod(&q); let mut P: ECP; loop { loop { if CURVETYPE != CurveType::Montgomery { P = ECP::new_bigint(&x, 0); } else { P = ECP::new_big(&x); } x.inc(1); x.norm(); if !P.is_infinity() { break; } } P.cfp(); if !P.is_infinity() { break; } } return P; } /// Generator /// /// Returns the generator of the group. #[inline(always)] pub fn generator() -> ECP { let G: ECP; let gx = Big::new_ints(&rom::CURVE_GX); if CURVETYPE != CurveType::Montgomery { let gy = Big::new_ints(&rom::CURVE_GY); G = ECP::new_bigs(&gx, &gy); } else { G = ECP::new_big(&gx); } return G; } }
25.862249
96
0.37437
f92f050e7cd6eb5601529387067553a6bc3d644e
6,632
// Copyright (c) Facebook, Inc. and its affiliates. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. mod emit_memoize_function; use ast_scope::{self as ast_scope, Scope, ScopeItem}; use emit_body::{self as emit_body}; use env::emitter::Emitter; use ffi::{Slice, Str}; use hhas_attribute::{self as hhas_attribute, HhasAttribute}; use hhas_coeffects::HhasCoeffects; use hhas_function::{self as hhas_function, HhasFunction}; use hhas_pos::HhasSpan; use hhbc_id::{class::ClassType, function::FunctionType, Id}; use instruction_sequence::{instr, Result}; use naming_special_names_rust::user_attributes as ua; use ocamlrep::rc::RcOc; use oxidized::{ast, ast_defs}; use itertools::Either; pub fn emit_function<'a, 'arena, 'decl>( e: &mut Emitter<'arena, 'decl>, fd: &'a ast::FunDef, ) -> Result<Vec<HhasFunction<'arena>>> { use ast_defs::FunKind; use hhas_function::HhasFunctionFlags; let alloc = e.alloc; let f = &fd.fun; let original_id = FunctionType::from_ast_name(alloc, &f.name.1); let mut flags = HhasFunctionFlags::empty(); flags.set( HhasFunctionFlags::ASYNC, matches!(f.fun_kind, FunKind::FAsync | FunKind::FAsyncGenerator), ); let mut attrs: Vec<HhasAttribute<'arena>> = emit_attribute::from_asts(e, &f.user_attributes)?; attrs.extend(emit_attribute::add_reified_attribute(alloc, &f.tparams)); let memoized = attrs .iter() .any(|a| ua::is_memoized(a.name.unsafe_as_str())); flags.set(HhasFunctionFlags::MEMOIZE_IMPL, memoized); flags.set( HhasFunctionFlags::NO_INJECTION, hhas_attribute::is_no_injection(&attrs), ); let renamed_id = { if memoized { FunctionType::add_suffix(alloc, &original_id, emit_memoize_helpers::MEMOIZE_SUFFIX) } else { original_id } }; flags.set( HhasFunctionFlags::INTERCEPTABLE, emit_memoize_function::is_interceptable(e.options()), ); let is_meth_caller = f.name.1.starts_with("\\MethCaller$"); let call_context = if is_meth_caller { match &f.user_attributes[..] { [ast::UserAttribute { name: ast_defs::Id(_, ref s), params, }] if s == "__MethCaller" => match &params[..] { [ast::Expr(_, _, ast::Expr_::String(ref ctx))] if !ctx.is_empty() => Some( ClassType::from_ast_name( alloc, // FIXME: This is not safe--string literals are binary strings. // There's no guarantee that they're valid UTF-8. unsafe { std::str::from_utf8_unchecked(ctx.as_slice()) }, ) .to_raw_string() .into(), ), _ => None, }, _ => None, } } else { None }; let is_debug_main = match f.user_attributes.as_slice() { [ast::UserAttribute { name, params }] if name.1 == "__DebuggerMain" && params.is_empty() => { true } _ => false, }; let mut scope = Scope::toplevel(); if !is_debug_main { scope.push_item(ScopeItem::Function(ast_scope::Fun::new_ref(&fd))); } let mut coeffects = HhasCoeffects::from_ast(alloc, &f.ctxs, &f.params, &f.tparams, vec![]); if is_meth_caller { coeffects = coeffects.with_caller(alloc) } if e.systemlib() && (f.name.1 == "\\HH\\Coeffects\\backdoor" || f.name.1 == "\\HH\\Coeffects\\backdoor_async") { coeffects = coeffects.with_backdoor(alloc) } let ast_body = &f.body.fb_ast; let deprecation_info = hhas_attribute::deprecation_info(attrs.iter()); let (body, is_gen, is_pair_gen) = { let deprecation_info = if memoized { None } else { deprecation_info }; let native = attrs.iter().any(|a| ua::is_native(a.name.unsafe_as_str())); use emit_body::{Args as EmitBodyArgs, Flags as EmitBodyFlags}; let mut body_flags = EmitBodyFlags::empty(); body_flags.set( EmitBodyFlags::ASYNC, flags.contains(HhasFunctionFlags::ASYNC), ); body_flags.set(EmitBodyFlags::NATIVE, native); body_flags.set(EmitBodyFlags::MEMOIZE, memoized); body_flags.set( EmitBodyFlags::SKIP_AWAITABLE, f.fun_kind == ast_defs::FunKind::FAsync, ); body_flags.set( EmitBodyFlags::HAS_COEFFECTS_LOCAL, coeffects.has_coeffects_local(), ); emit_body::emit_body( alloc, e, RcOc::clone(&fd.namespace), Either::Right(ast_body), instr::null(alloc), scope, EmitBodyArgs { flags: body_flags, deprecation_info: &deprecation_info, default_dropthrough: None, doc_comment: f.doc_comment.clone(), pos: &f.span, ret: f.ret.1.as_ref(), ast_params: &f.params, call_context, immediate_tparams: &f.tparams, class_tparam_names: &[], }, )? }; let is_readonly_return = f.readonly_ret.is_some(); flags.set(HhasFunctionFlags::GENERATOR, is_gen); flags.set(HhasFunctionFlags::PAIR_GENERATOR, is_pair_gen); flags.set(HhasFunctionFlags::READONLY_RETURN, is_readonly_return); let memoize_wrapper = if memoized { Some(emit_memoize_function::emit_wrapper_function( e, original_id, &renamed_id, &deprecation_info, &fd, )?) } else { None }; let normal_function = HhasFunction { attributes: Slice::fill_iter(alloc, attrs.into_iter()), name: FunctionType(Str::new_str(alloc, renamed_id.to_raw_string())), span: HhasSpan::from_pos(&f.span), coeffects, body, flags, }; Ok(if let Some(memoize_wrapper) = memoize_wrapper { vec![normal_function, memoize_wrapper] } else { vec![normal_function] }) } pub fn emit_functions_from_program<'a, 'arena, 'decl>( e: &mut Emitter<'arena, 'decl>, ast: &'a [ast::Def], ) -> Result<Vec<HhasFunction<'arena>>> { Ok(ast .iter() .filter_map(|d| d.as_fun().map(|f| emit_function(e, f))) .collect::<Result<Vec<Vec<_>>>>()? .into_iter() .flatten() .collect::<Vec<_>>()) }
34.010256
98
0.58278
0aacd686546347100afe8954e1caa9a6734bbddc
19,220
use clap::{App as ClapApp, AppSettings, Arg, ArgGroup, SubCommand}; use std::path::Path; pub fn build_app(interactive_output: bool) -> ClapApp<'static, 'static> { let clap_color_setting = if interactive_output { AppSettings::ColoredHelp } else { AppSettings::ColorNever }; let app = ClapApp::new(crate_name!()) .version(crate_version!()) .global_setting(clap_color_setting) .global_setting(AppSettings::DeriveDisplayOrder) .global_setting(AppSettings::UnifiedHelpMessage) .global_setting(AppSettings::HidePossibleValuesInHelp) .setting(AppSettings::ArgsNegateSubcommands) .setting(AppSettings::AllowExternalSubcommands) .setting(AppSettings::DisableHelpSubcommand) .setting(AppSettings::VersionlessSubcommands) .max_term_width(100) .about( "A cat(1) clone with wings.\n\n\ Use '--help' instead of '-h' to see a more detailed version of the help text.", ) .long_about("A cat(1) clone with syntax highlighting and Git integration.") .arg( Arg::with_name("FILE") .help("File(s) to print / concatenate. Use '-' for standard input.") .long_help( "File(s) to print / concatenate. Use a dash ('-') or no argument at all \ to read from standard input.", ) .multiple(true) .empty_values(false), ) .arg( Arg::with_name("language") .short("l") .long("language") .overrides_with("language") .help("Set the language for syntax highlighting.") .long_help( "Explicitly set the language for syntax highlighting. The language can be \ specified as a name (like 'C++' or 'LaTeX') or possible file extension \ (like 'cpp', 'hpp' or 'md'). Use '--list-languages' to show all supported \ language names and file extensions.", ) .takes_value(true), ) .arg( Arg::with_name("list-languages") .long("list-languages") .short("L") .conflicts_with("list-themes") .help("Display all supported languages.") .long_help("Display a list of supported languages for syntax highlighting."), ) .arg( Arg::with_name("map-syntax") .short("m") .long("map-syntax") .multiple(true) .takes_value(true) .number_of_values(1) .value_name("from:to") .help("Map a file extension or name to an existing syntax.") .long_help( "Map a file extension or file name to an existing syntax. For example, \ to highlight *.conf files with the INI syntax, use '-m conf:ini'. \ To highlight files named '.myignore' with the Git Ignore syntax, use \ '-m .myignore:gitignore'.", ) .takes_value(true), ) .arg( Arg::with_name("theme") .long("theme") .overrides_with("theme") .takes_value(true) .help("Set the color theme for syntax highlighting.") .long_help( "Set the theme for syntax highlighting. Use '--list-themes' to \ see all available themes. To set a default theme, add the \ '--theme=\"...\"' option to the configuration file or export the \ BAT_THEME environment variable (e.g.: export \ BAT_THEME=\"...\").", ), ) .arg( Arg::with_name("list-themes") .long("list-themes") .help("Display all supported highlighting themes.") .long_help("Display a list of supported themes for syntax highlighting."), ) .arg( Arg::with_name("style") .long("style") .value_name("style-components") // Need to turn this off for overrides_with to work as we want. See the bottom most // example at https://docs.rs/clap/2.32.0/clap/struct.Arg.html#method.overrides_with .use_delimiter(false) .takes_value(true) .overrides_with("style") .overrides_with("plain") .overrides_with("number") // Cannot use clap's built in validation because we have to turn off clap's delimiters .validator(|val| { let mut invalid_vals = val.split(',').filter(|style| { !&[ "auto", "full", "plain", "changes", "header", "grid", "numbers", "snip" ] .contains(style) }); if let Some(invalid) = invalid_vals.next() { Err(format!("Unknown style, '{}'", invalid)) } else { Ok(()) } }) .help( "Comma-separated list of style elements to display \ (*auto*, full, plain, changes, header, grid, numbers, snip).", ) .long_help( "Configure which elements (line numbers, file headers, grid \ borders, Git modifications, ..) to display in addition to the \ file contents. The argument is a comma-separated list of \ components to display (e.g. 'numbers,changes,grid') or a \ pre-defined style ('full'). To set a default style, add the \ '--style=\"..\"' option to the configuration file or export the \ BAT_STYLE environment variable (e.g.: export BAT_STYLE=\"..\"). \ Possible values: *auto*, full, plain, changes, header, grid, numbers, snip.", ), ) .arg( Arg::with_name("plain") .overrides_with("plain") .overrides_with("number") .short("p") .long("plain") .multiple(true) .help("Show plain style (alias for '--style=plain').") .long_help( "Only show plain style, no decorations. This is an alias for \ '--style=plain'. When '-p' is used twice ('-pp'), it also disables \ automatic paging (alias for '--style=plain --pager=never').", ), ) .arg( Arg::with_name("number") .long("number") .overrides_with("number") .short("n") .help("Show line numbers (alias for '--style=numbers').") .long_help( "Only show line numbers, no other decorations. This is an alias for \ '--style=numbers'", ), ) .arg( Arg::with_name("show-all") .long("show-all") .alias("show-nonprintable") .short("A") .conflicts_with("language") .help("Show non-printable characters (space, tab, newline, ..).") .long_help( "Show non-printable characters like space, tab or newline. \ This option can also be used to print binary files. \ Use '--tabs' to control the width of the tab-placeholders." ), ) .arg( Arg::with_name("line-range") .long("line-range") .short("r") .multiple(true) .takes_value(true) .number_of_values(1) .value_name("N:M") .help("Only print the lines from N to M.") .long_help( "Only print the specified range of lines for each file. \ For example:\n \ '--line-range 30:40' prints lines 30 to 40\n \ '--line-range :40' prints lines 1 to 40\n \ '--line-range -5:' prints lines from the 5th last to the end of the file\n \ '--line-range :-2' prints lines 1 to 2nd last line of the file\n \ '--line-range 40:' prints lines 40 to the end of the file", ), ) .arg( Arg::with_name("highlight-line") .long("highlight-line") .short("H") .takes_value(true) .number_of_values(1) .multiple(true) .value_name("N") .help("Highlight the given line.") .long_help( "Highlight the N-th line with a different background color", ), ) .arg( Arg::with_name("color") .long("color") .overrides_with("color") .takes_value(true) .value_name("when") .possible_values(&["auto", "never", "always"]) .hide_default_value(true) .default_value("auto") .help("When to use colors (*auto*, never, always).") .long_help( "Specify when to use colored output. The automatic mode \ only enables colors if an interactive terminal is detected. \ Possible values: *auto*, never, always.", ), ) .arg( Arg::with_name("italic-text") .long("italic-text") .takes_value(true) .value_name("when") .possible_values(&["always", "never"]) .default_value("never") .hide_default_value(true) .help("Use italics in output (always, *never*)") .long_help("Specify when to use ANSI sequences for italic text in the output. Possible values: always, *never*."), ) .arg( Arg::with_name("decorations") .long("decorations") .overrides_with("decorations") .takes_value(true) .value_name("when") .possible_values(&["auto", "never", "always"]) .default_value("auto") .hide_default_value(true) .help("When to show the decorations (*auto*, never, always).") .long_help( "Specify when to use the decorations that have been specified \ via '--style'. The automatic mode only enables decorations if \ an interactive terminal is detected. Possible values: *auto*, never, always.", ), ) .arg( Arg::with_name("paging") .long("paging") .overrides_with("paging") .takes_value(true) .value_name("when") .possible_values(&["auto", "never", "always"]) .default_value("auto") .hide_default_value(true) .help("Specify when to use the pager (*auto*, never, always).") .long_help( "Specify when to use the pager. To control which pager \ is used, set the PAGER or BAT_PAGER environment \ variables (the latter takes precedence) or use the '--pager' option. \ To disable the pager permanently, set BAT_PAGER to an empty string \ or set '--paging=never' in the configuration file. \ Possible values: *auto*, never, always.", ), ) .arg( Arg::with_name("pager") .long("pager") .overrides_with("pager") .takes_value(true) .value_name("command") .hidden_short_help(true) .help("Determine which pager to use.") .long_help( "Determine which pager is used. This option will overwrite \ the PAGER and BAT_PAGER environment variables. The default \ pager is 'less'. To disable the pager completely, use the \ '--paging' option. \ Example: '--pager \"less -RF\"'.", ), ) .arg( Arg::with_name("wrap") .long("wrap") .overrides_with("wrap") .takes_value(true) .value_name("mode") .possible_values(&["auto", "never", "character"]) .default_value("auto") .hide_default_value(true) .help("Specify the text-wrapping mode (*auto*, never, character).") .long_help("Specify the text-wrapping mode (*auto*, never, character). \ The '--terminal-width' option can be used in addition to \ control the output width."), ) .arg( Arg::with_name("tabs") .long("tabs") .overrides_with("tabs") .takes_value(true) .value_name("T") .validator( |t| { t.parse::<u32>() .map_err(|_t| "must be a number") .map(|_t| ()) // Convert to Result<(), &str> .map_err(|e| e.to_string()) }, // Convert to Result<(), String> ) .help("Set the tab width to T spaces.") .long_help( "Set the tab width to T spaces. Use a width of 0 to pass tabs through \ directly", ), ) .arg( Arg::with_name("unbuffered") .short("u") .long("unbuffered") .hidden_short_help(true) .long_help( "This option exists for POSIX-compliance reasons ('u' is for \ 'unbuffered'). The output is always unbuffered - this option \ is simply ignored.", ), ) .arg( Arg::with_name("terminal-width") .long("terminal-width") .takes_value(true) .value_name("width") .hidden_short_help(true) .allow_hyphen_values(true) .validator( |t| { let is_offset = t.starts_with('+') || t.starts_with('-'); t.parse::<i32>() .map_err(|_e| "must be an offset or number") .and_then(|v| if v == 0 && !is_offset { Err("terminal width cannot be zero".into()) } else { Ok(()) }) .map_err(|e| e.to_string()) }) .help( "Explicitly set the width of the terminal instead of determining it \ automatically. If prefixed with '+' or '-', the value will be treated \ as an offset to the actual terminal width. See also: '--wrap'.", ), ) .arg( Arg::with_name("no-config") .long("no-config") .hidden(true) .help("Do not use the configuration file"), ) .arg( Arg::with_name("config-file") .long("config-file") .conflicts_with("list-languages") .conflicts_with("list-themes") .hidden(true) .help("Show path to the configuration file."), ) .arg( Arg::with_name("config-dir") .long("config-dir") .hidden(true) .help("Show bat's configuration directory."), ) .arg( Arg::with_name("cache-dir") .long("cache-dir") .hidden(true) .help("Show bat's cache directory."), ) .help_message("Print this help message.") .version_message("Show version information."); // Check if the current directory contains a file name cache. Otherwise, // enable the 'bat cache' subcommand. if Path::new("cache").exists() { app } else { app.subcommand( SubCommand::with_name("cache") .about("Modify the syntax-definition and theme cache") .arg( Arg::with_name("build") .long("build") .short("b") .help("Initialize (or update) the syntax/theme cache.") .long_help( "Initialize (or update) the syntax/theme cache by loading from \ the source directory (default: the configuration directory).", ), ) .arg( Arg::with_name("clear") .long("clear") .short("c") .help("Remove the cached syntax definitions and themes."), ) .group( ArgGroup::with_name("cache-actions") .args(&["build", "clear"]) .required(true), ) .arg( Arg::with_name("source") .long("source") .requires("build") .takes_value(true) .value_name("dir") .help("Use a different directory to load syntaxes and themes from."), ) .arg( Arg::with_name("target") .long("target") .requires("build") .takes_value(true) .value_name("dir") .help( "Use a different directory to store the cached syntax and theme set.", ), ) .arg( Arg::with_name("blank") .long("blank") .requires("build") .help( "Create completely new syntax and theme sets \ (instead of appending to the default sets).", ), ), ) } }
43.386005
130
0.450832
5da77b9f9466fcd24814acbebd80bb4cdc12c0a2
44,242
//! Implementation of lint checking. //! //! The lint checking is mostly consolidated into one pass which runs //! after all other analyses. Throughout compilation, lint warnings //! can be added via the `add_lint` method on the Session structure. This //! requires a span and an ID of the node that the lint is being added to. The //! lint isn't actually emitted at that time because it is unknown what the //! actual lint level at that location is. //! //! To actually emit lint warnings/errors, a separate pass is used. //! A context keeps track of the current state of all lint levels. //! Upon entering a node of the ast which can modify the lint settings, the //! previous lint state is pushed onto a stack and the ast is then recursed //! upon. As the ast is traversed, this keeps track of the current lint level //! for all lint attributes. use self::TargetLint::*; use crate::levels::LintLevelsBuilder; use crate::passes::{EarlyLintPassObject, LateLintPassObject}; use rustc_ast::util::unicode::TEXT_FLOW_CONTROL_CHARS; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::sync; use rustc_errors::{struct_span_err, Applicability, SuggestionStyle}; use rustc_hir as hir; use rustc_hir::def::Res; use rustc_hir::def_id::{CrateNum, DefId}; use rustc_hir::definitions::{DefPathData, DisambiguatedDefPathData}; use rustc_middle::lint::LintDiagnosticBuilder; use rustc_middle::middle::privacy::AccessLevels; use rustc_middle::middle::stability; use rustc_middle::ty::layout::{LayoutError, LayoutOfHelpers, TyAndLayout}; use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::{self, print::Printer, subst::GenericArg, RegisteredTools, Ty, TyCtxt}; use rustc_serialize::json::Json; use rustc_session::lint::{BuiltinLintDiagnostics, ExternDepSpec}; use rustc_session::lint::{FutureIncompatibleInfo, Level, Lint, LintBuffer, LintId}; use rustc_session::Session; use rustc_span::lev_distance::find_best_match_for_name; use rustc_span::symbol::{sym, Ident, Symbol}; use rustc_span::{BytePos, MultiSpan, Span, DUMMY_SP}; use rustc_target::abi; use tracing::debug; use std::cell::Cell; use std::iter; use std::slice; /// Information about the registered lints. /// /// This is basically the subset of `Context` that we can /// build early in the compile pipeline. pub struct LintStore { /// Registered lints. lints: Vec<&'static Lint>, /// Constructor functions for each variety of lint pass. /// /// These should only be called once, but since we want to avoid locks or /// interior mutability, we don't enforce this (and lints should, in theory, /// be compatible with being constructed more than once, though not /// necessarily in a sane manner. This is safe though.) pub pre_expansion_passes: Vec<Box<dyn Fn() -> EarlyLintPassObject + sync::Send + sync::Sync>>, pub early_passes: Vec<Box<dyn Fn() -> EarlyLintPassObject + sync::Send + sync::Sync>>, pub late_passes: Vec<Box<dyn Fn() -> LateLintPassObject + sync::Send + sync::Sync>>, /// This is unique in that we construct them per-module, so not once. pub late_module_passes: Vec<Box<dyn Fn() -> LateLintPassObject + sync::Send + sync::Sync>>, /// Lints indexed by name. by_name: FxHashMap<String, TargetLint>, /// Map of registered lint groups to what lints they expand to. lint_groups: FxHashMap<&'static str, LintGroup>, } /// The target of the `by_name` map, which accounts for renaming/deprecation. #[derive(Debug)] enum TargetLint { /// A direct lint target Id(LintId), /// Temporary renaming, used for easing migration pain; see #16545 Renamed(String, LintId), /// Lint with this name existed previously, but has been removed/deprecated. /// The string argument is the reason for removal. Removed(String), /// A lint name that should give no warnings and have no effect. /// /// This is used by rustc to avoid warning about old rustdoc lints before rustdoc registers them as tool lints. Ignored, } pub enum FindLintError { NotFound, Removed, } struct LintAlias { name: &'static str, /// Whether deprecation warnings should be suppressed for this alias. silent: bool, } struct LintGroup { lint_ids: Vec<LintId>, from_plugin: bool, depr: Option<LintAlias>, } pub enum CheckLintNameResult<'a> { Ok(&'a [LintId]), /// Lint doesn't exist. Potentially contains a suggestion for a correct lint name. NoLint(Option<Symbol>), /// The lint refers to a tool that has not been registered. NoTool, /// The lint is either renamed or removed. This is the warning /// message, and an optional new name (`None` if removed). Warning(String, Option<String>), /// The lint is from a tool. If the Option is None, then either /// the lint does not exist in the tool or the code was not /// compiled with the tool and therefore the lint was never /// added to the `LintStore`. Otherwise the `LintId` will be /// returned as if it where a rustc lint. Tool(Result<&'a [LintId], (Option<&'a [LintId]>, String)>), } impl LintStore { pub fn new() -> LintStore { LintStore { lints: vec![], pre_expansion_passes: vec![], early_passes: vec![], late_passes: vec![], late_module_passes: vec![], by_name: Default::default(), lint_groups: Default::default(), } } pub fn get_lints<'t>(&'t self) -> &'t [&'static Lint] { &self.lints } pub fn get_lint_groups<'t>( &'t self, ) -> impl Iterator<Item = (&'static str, Vec<LintId>, bool)> + 't { // This function is not used in a way which observes the order of lints. #[cfg_attr(not(bootstrap), allow(rustc::potential_query_instability))] self.lint_groups .iter() .filter(|(_, LintGroup { depr, .. })| { // Don't display deprecated lint groups. depr.is_none() }) .map(|(k, LintGroup { lint_ids, from_plugin, .. })| { (*k, lint_ids.clone(), *from_plugin) }) } pub fn register_early_pass( &mut self, pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync, ) { self.early_passes.push(Box::new(pass)); } /// Used by clippy. pub fn register_pre_expansion_pass( &mut self, pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync, ) { self.pre_expansion_passes.push(Box::new(pass)); } pub fn register_late_pass( &mut self, pass: impl Fn() -> LateLintPassObject + 'static + sync::Send + sync::Sync, ) { self.late_passes.push(Box::new(pass)); } pub fn register_late_mod_pass( &mut self, pass: impl Fn() -> LateLintPassObject + 'static + sync::Send + sync::Sync, ) { self.late_module_passes.push(Box::new(pass)); } // Helper method for register_early/late_pass pub fn register_lints(&mut self, lints: &[&'static Lint]) { for lint in lints { self.lints.push(lint); let id = LintId::of(lint); if self.by_name.insert(lint.name_lower(), Id(id)).is_some() { bug!("duplicate specification of lint {}", lint.name_lower()) } if let Some(FutureIncompatibleInfo { reason, .. }) = lint.future_incompatible { if let Some(edition) = reason.edition() { self.lint_groups .entry(edition.lint_name()) .or_insert(LintGroup { lint_ids: vec![], from_plugin: lint.is_plugin, depr: None, }) .lint_ids .push(id); } else { // Lints belonging to the `future_incompatible` lint group are lints where a // future version of rustc will cause existing code to stop compiling. // Lints tied to an edition don't count because they are opt-in. self.lint_groups .entry("future_incompatible") .or_insert(LintGroup { lint_ids: vec![], from_plugin: lint.is_plugin, depr: None, }) .lint_ids .push(id); } } } } pub fn register_group_alias(&mut self, lint_name: &'static str, alias: &'static str) { self.lint_groups.insert( alias, LintGroup { lint_ids: vec![], from_plugin: false, depr: Some(LintAlias { name: lint_name, silent: true }), }, ); } pub fn register_group( &mut self, from_plugin: bool, name: &'static str, deprecated_name: Option<&'static str>, to: Vec<LintId>, ) { let new = self .lint_groups .insert(name, LintGroup { lint_ids: to, from_plugin, depr: None }) .is_none(); if let Some(deprecated) = deprecated_name { self.lint_groups.insert( deprecated, LintGroup { lint_ids: vec![], from_plugin, depr: Some(LintAlias { name, silent: false }), }, ); } if !new { bug!("duplicate specification of lint group {}", name); } } /// This lint should give no warning and have no effect. /// /// This is used by rustc to avoid warning about old rustdoc lints before rustdoc registers them as tool lints. #[track_caller] pub fn register_ignored(&mut self, name: &str) { if self.by_name.insert(name.to_string(), Ignored).is_some() { bug!("duplicate specification of lint {}", name); } } /// This lint has been renamed; warn about using the new name and apply the lint. #[track_caller] pub fn register_renamed(&mut self, old_name: &str, new_name: &str) { let target = match self.by_name.get(new_name) { Some(&Id(lint_id)) => lint_id, _ => bug!("invalid lint renaming of {} to {}", old_name, new_name), }; self.by_name.insert(old_name.to_string(), Renamed(new_name.to_string(), target)); } pub fn register_removed(&mut self, name: &str, reason: &str) { self.by_name.insert(name.into(), Removed(reason.into())); } pub fn find_lints(&self, mut lint_name: &str) -> Result<Vec<LintId>, FindLintError> { match self.by_name.get(lint_name) { Some(&Id(lint_id)) => Ok(vec![lint_id]), Some(&Renamed(_, lint_id)) => Ok(vec![lint_id]), Some(&Removed(_)) => Err(FindLintError::Removed), Some(&Ignored) => Ok(vec![]), None => loop { return match self.lint_groups.get(lint_name) { Some(LintGroup { lint_ids, depr, .. }) => { if let Some(LintAlias { name, .. }) = depr { lint_name = name; continue; } Ok(lint_ids.clone()) } None => Err(FindLintError::Removed), }; }, } } /// Checks the validity of lint names derived from the command line. pub fn check_lint_name_cmdline( &self, sess: &Session, lint_name: &str, level: Level, registered_tools: &RegisteredTools, ) { let (tool_name, lint_name_only) = parse_lint_and_tool_name(lint_name); if lint_name_only == crate::WARNINGS.name_lower() && level == Level::ForceWarn { return struct_span_err!( sess, DUMMY_SP, E0602, "`{}` lint group is not supported with ´--force-warn´", crate::WARNINGS.name_lower() ) .emit(); } let db = match self.check_lint_name(lint_name_only, tool_name, registered_tools) { CheckLintNameResult::Ok(_) => None, CheckLintNameResult::Warning(ref msg, _) => Some(sess.struct_warn(msg)), CheckLintNameResult::NoLint(suggestion) => { let mut err = struct_span_err!(sess, DUMMY_SP, E0602, "unknown lint: `{}`", lint_name); if let Some(suggestion) = suggestion { err.help(&format!("did you mean: `{}`", suggestion)); } Some(err) } CheckLintNameResult::Tool(result) => match result { Err((Some(_), new_name)) => Some(sess.struct_warn(&format!( "lint name `{}` is deprecated \ and does not have an effect anymore. \ Use: {}", lint_name, new_name ))), _ => None, }, CheckLintNameResult::NoTool => Some(struct_span_err!( sess, DUMMY_SP, E0602, "unknown lint tool: `{}`", tool_name.unwrap() )), }; if let Some(mut db) = db { let msg = format!( "requested on the command line with `{} {}`", match level { Level::Allow => "-A", Level::Warn => "-W", Level::ForceWarn => "--force-warn", Level::Deny => "-D", Level::Forbid => "-F", }, lint_name ); db.note(&msg); db.emit(); } } /// True if this symbol represents a lint group name. pub fn is_lint_group(&self, lint_name: Symbol) -> bool { debug!( "is_lint_group(lint_name={:?}, lint_groups={:?})", lint_name, self.lint_groups.keys().collect::<Vec<_>>() ); let lint_name_str = lint_name.as_str(); self.lint_groups.contains_key(lint_name_str) || { let warnings_name_str = crate::WARNINGS.name_lower(); lint_name_str == warnings_name_str } } /// Checks the name of a lint for its existence, and whether it was /// renamed or removed. Generates a DiagnosticBuilder containing a /// warning for renamed and removed lints. This is over both lint /// names from attributes and those passed on the command line. Since /// it emits non-fatal warnings and there are *two* lint passes that /// inspect attributes, this is only run from the late pass to avoid /// printing duplicate warnings. pub fn check_lint_name( &self, lint_name: &str, tool_name: Option<Symbol>, registered_tools: &RegisteredTools, ) -> CheckLintNameResult<'_> { if let Some(tool_name) = tool_name { // FIXME: rustc and rustdoc are considered tools for lints, but not for attributes. if tool_name != sym::rustc && tool_name != sym::rustdoc && !registered_tools.contains(&Ident::with_dummy_span(tool_name)) { return CheckLintNameResult::NoTool; } } let complete_name = if let Some(tool_name) = tool_name { format!("{}::{}", tool_name, lint_name) } else { lint_name.to_string() }; // If the lint was scoped with `tool::` check if the tool lint exists if let Some(tool_name) = tool_name { match self.by_name.get(&complete_name) { None => match self.lint_groups.get(&*complete_name) { // If the lint isn't registered, there are two possibilities: None => { // 1. The tool is currently running, so this lint really doesn't exist. // FIXME: should this handle tools that never register a lint, like rustfmt? tracing::debug!("lints={:?}", self.by_name.keys().collect::<Vec<_>>()); let tool_prefix = format!("{}::", tool_name); return if self.by_name.keys().any(|lint| lint.starts_with(&tool_prefix)) { self.no_lint_suggestion(&complete_name) } else { // 2. The tool isn't currently running, so no lints will be registered. // To avoid giving a false positive, ignore all unknown lints. CheckLintNameResult::Tool(Err((None, String::new()))) }; } Some(LintGroup { lint_ids, .. }) => { return CheckLintNameResult::Tool(Ok(&lint_ids)); } }, Some(&Id(ref id)) => return CheckLintNameResult::Tool(Ok(slice::from_ref(id))), // If the lint was registered as removed or renamed by the lint tool, we don't need // to treat tool_lints and rustc lints different and can use the code below. _ => {} } } match self.by_name.get(&complete_name) { Some(&Renamed(ref new_name, _)) => CheckLintNameResult::Warning( format!("lint `{}` has been renamed to `{}`", complete_name, new_name), Some(new_name.to_owned()), ), Some(&Removed(ref reason)) => CheckLintNameResult::Warning( format!("lint `{}` has been removed: {}", complete_name, reason), None, ), None => match self.lint_groups.get(&*complete_name) { // If neither the lint, nor the lint group exists check if there is a `clippy::` // variant of this lint None => self.check_tool_name_for_backwards_compat(&complete_name, "clippy"), Some(LintGroup { lint_ids, depr, .. }) => { // Check if the lint group name is deprecated if let Some(LintAlias { name, silent }) = depr { let LintGroup { lint_ids, .. } = self.lint_groups.get(name).unwrap(); return if *silent { CheckLintNameResult::Ok(&lint_ids) } else { CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string()))) }; } CheckLintNameResult::Ok(&lint_ids) } }, Some(&Id(ref id)) => CheckLintNameResult::Ok(slice::from_ref(id)), Some(&Ignored) => CheckLintNameResult::Ok(&[]), } } fn no_lint_suggestion(&self, lint_name: &str) -> CheckLintNameResult<'_> { let name_lower = lint_name.to_lowercase(); if lint_name.chars().any(char::is_uppercase) && self.find_lints(&name_lower).is_ok() { // First check if the lint name is (partly) in upper case instead of lower case... return CheckLintNameResult::NoLint(Some(Symbol::intern(&name_lower))); } // ...if not, search for lints with a similar name let groups = self.lint_groups.keys().copied().map(Symbol::intern); let lints = self.lints.iter().map(|l| Symbol::intern(&l.name_lower())); let names: Vec<Symbol> = groups.chain(lints).collect(); let suggestion = find_best_match_for_name(&names, Symbol::intern(&name_lower), None); CheckLintNameResult::NoLint(suggestion) } fn check_tool_name_for_backwards_compat( &self, lint_name: &str, tool_name: &str, ) -> CheckLintNameResult<'_> { let complete_name = format!("{}::{}", tool_name, lint_name); match self.by_name.get(&complete_name) { None => match self.lint_groups.get(&*complete_name) { // Now we are sure, that this lint exists nowhere None => self.no_lint_suggestion(lint_name), Some(LintGroup { lint_ids, depr, .. }) => { // Reaching this would be weird, but let's cover this case anyway if let Some(LintAlias { name, silent }) = depr { let LintGroup { lint_ids, .. } = self.lint_groups.get(name).unwrap(); return if *silent { CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) } else { CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string()))) }; } CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) } }, Some(&Id(ref id)) => { CheckLintNameResult::Tool(Err((Some(slice::from_ref(id)), complete_name))) } Some(other) => { tracing::debug!("got renamed lint {:?}", other); CheckLintNameResult::NoLint(None) } } } } /// Context for lint checking outside of type inference. pub struct LateContext<'tcx> { /// Type context we're checking in. pub tcx: TyCtxt<'tcx>, /// Current body, or `None` if outside a body. pub enclosing_body: Option<hir::BodyId>, /// Type-checking results for the current body. Access using the `typeck_results` /// and `maybe_typeck_results` methods, which handle querying the typeck results on demand. // FIXME(eddyb) move all the code accessing internal fields like this, // to this module, to avoid exposing it to lint logic. pub(super) cached_typeck_results: Cell<Option<&'tcx ty::TypeckResults<'tcx>>>, /// Parameter environment for the item we are in. pub param_env: ty::ParamEnv<'tcx>, /// Items accessible from the crate being checked. pub access_levels: &'tcx AccessLevels, /// The store of registered lints and the lint levels. pub lint_store: &'tcx LintStore, pub last_node_with_lint_attrs: hir::HirId, /// Generic type parameters in scope for the item we are in. pub generics: Option<&'tcx hir::Generics<'tcx>>, /// We are only looking at one module pub only_module: bool, } /// Context for lint checking of the AST, after expansion, before lowering to HIR. pub struct EarlyContext<'a> { pub builder: LintLevelsBuilder<'a>, pub buffered: LintBuffer, } pub trait LintPassObject: Sized {} impl LintPassObject for EarlyLintPassObject {} impl LintPassObject for LateLintPassObject {} pub trait LintContext: Sized { type PassObject: LintPassObject; fn sess(&self) -> &Session; fn lints(&self) -> &LintStore; fn lookup_with_diagnostics( &self, lint: &'static Lint, span: Option<impl Into<MultiSpan>>, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), diagnostic: BuiltinLintDiagnostics, ) { self.lookup(lint, span, |lint| { // We first generate a blank diagnostic. let mut db = lint.build(""); // Now, set up surrounding context. let sess = self.sess(); match diagnostic { BuiltinLintDiagnostics::UnicodeTextFlow(span, content) => { let spans: Vec<_> = content .char_indices() .filter_map(|(i, c)| { TEXT_FLOW_CONTROL_CHARS.contains(&c).then(|| { let lo = span.lo() + BytePos(2 + i as u32); (c, span.with_lo(lo).with_hi(lo + BytePos(c.len_utf8() as u32))) }) }) .collect(); let (an, s) = match spans.len() { 1 => ("an ", ""), _ => ("", "s"), }; db.span_label(span, &format!( "this comment contains {}invisible unicode text flow control codepoint{}", an, s, )); for (c, span) in &spans { db.span_label(*span, format!("{:?}", c)); } db.note( "these kind of unicode codepoints change the way text flows on \ applications that support them, but can cause confusion because they \ change the order of characters on the screen", ); if !spans.is_empty() { db.multipart_suggestion_with_style( "if their presence wasn't intentional, you can remove them", spans.into_iter().map(|(_, span)| (span, "".to_string())).collect(), Applicability::MachineApplicable, SuggestionStyle::HideCodeAlways, ); } }, BuiltinLintDiagnostics::Normal => (), BuiltinLintDiagnostics::AbsPathWithModule(span) => { let (sugg, app) = match sess.source_map().span_to_snippet(span) { Ok(ref s) => { // FIXME(Manishearth) ideally the emitting code // can tell us whether or not this is global let opt_colon = if s.trim_start().starts_with("::") { "" } else { "::" }; (format!("crate{}{}", opt_colon, s), Applicability::MachineApplicable) } Err(_) => ("crate::<path>".to_string(), Applicability::HasPlaceholders), }; db.span_suggestion(span, "use `crate`", sugg, app); } BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(span) => { db.span_label( span, "names from parent modules are not accessible without an explicit import", ); } BuiltinLintDiagnostics::MacroExpandedMacroExportsAccessedByAbsolutePaths( span_def, ) => { db.span_note(span_def, "the macro is defined here"); } BuiltinLintDiagnostics::UnknownCrateTypes(span, note, sugg) => { db.span_suggestion(span, &note, sugg, Applicability::MaybeIncorrect); } BuiltinLintDiagnostics::UnusedImports(message, replaces, in_test_module) => { if !replaces.is_empty() { db.tool_only_multipart_suggestion( &message, replaces, Applicability::MachineApplicable, ); } if let Some(span) = in_test_module { let def_span = self.sess().source_map().guess_head_span(span); db.span_help( span.shrink_to_lo().to(def_span), "consider adding a `#[cfg(test)]` to the containing module", ); } } BuiltinLintDiagnostics::RedundantImport(spans, ident) => { for (span, is_imported) in spans { let introduced = if is_imported { "imported" } else { "defined" }; db.span_label( span, format!("the item `{}` is already {} here", ident, introduced), ); } } BuiltinLintDiagnostics::DeprecatedMacro(suggestion, span) => { stability::deprecation_suggestion(&mut db, "macro", suggestion, span) } BuiltinLintDiagnostics::UnusedDocComment(span) => { db.span_label(span, "rustdoc does not generate documentation for macro invocations"); db.help("to document an item produced by a macro, \ the macro must produce the documentation as part of its expansion"); } BuiltinLintDiagnostics::PatternsInFnsWithoutBody(span, ident) => { db.span_suggestion(span, "remove `mut` from the parameter", ident.to_string(), Applicability::MachineApplicable); } BuiltinLintDiagnostics::MissingAbi(span, default_abi) => { db.span_label(span, "ABI should be specified here"); db.help(&format!("the default ABI is {}", default_abi.name())); } BuiltinLintDiagnostics::LegacyDeriveHelpers(span) => { db.span_label(span, "the attribute is introduced here"); } BuiltinLintDiagnostics::ExternDepSpec(krate, loc) => { let json = match loc { ExternDepSpec::Json(json) => { db.help(&format!("remove unnecessary dependency `{}`", krate)); json } ExternDepSpec::Raw(raw) => { db.help(&format!("remove unnecessary dependency `{}` at `{}`", krate, raw)); db.span_suggestion_with_style( DUMMY_SP, "raw extern location", raw.clone(), Applicability::Unspecified, SuggestionStyle::CompletelyHidden, ); Json::String(raw) } }; db.tool_only_suggestion_with_metadata( "json extern location", Applicability::Unspecified, json ); } BuiltinLintDiagnostics::ProcMacroBackCompat(note) => { db.note(&note); } BuiltinLintDiagnostics::OrPatternsBackCompat(span,suggestion) => { db.span_suggestion(span, "use pat_param to preserve semantics", suggestion, Applicability::MachineApplicable); } BuiltinLintDiagnostics::ReservedPrefix(span) => { db.span_label(span, "unknown prefix"); db.span_suggestion_verbose( span.shrink_to_hi(), "insert whitespace here to avoid this being parsed as a prefix in Rust 2021", " ".into(), Applicability::MachineApplicable, ); } BuiltinLintDiagnostics::UnusedBuiltinAttribute { attr_name, macro_name, invoc_span } => { db.span_note( invoc_span, &format!("the built-in attribute `{attr_name}` will be ignored, since it's applied to the macro invocation `{macro_name}`") ); } BuiltinLintDiagnostics::TrailingMacro(is_trailing, name) => { if is_trailing { db.note("macro invocations at the end of a block are treated as expressions"); db.note(&format!("to ignore the value produced by the macro, add a semicolon after the invocation of `{name}`")); } } BuiltinLintDiagnostics::BreakWithLabelAndLoop(span) => { db.multipart_suggestion( "wrap this expression in parentheses", vec![(span.shrink_to_lo(), "(".to_string()), (span.shrink_to_hi(), ")".to_string())], Applicability::MachineApplicable ); } BuiltinLintDiagnostics::NamedAsmLabel(help) => { db.help(&help); db.note("see the asm section of Rust By Example <https://doc.rust-lang.org/nightly/rust-by-example/unsafe/asm.html#labels> for more information"); } } // Rewrap `db`, and pass control to the user. decorate(LintDiagnosticBuilder::new(db)); }); } // FIXME: These methods should not take an Into<MultiSpan> -- instead, callers should need to // set the span in their `decorate` function (preferably using set_span). fn lookup<S: Into<MultiSpan>>( &self, lint: &'static Lint, span: Option<S>, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), ); fn struct_span_lint<S: Into<MultiSpan>>( &self, lint: &'static Lint, span: S, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), ) { self.lookup(lint, Some(span), decorate); } /// Emit a lint at the appropriate level, with no associated span. fn lint(&self, lint: &'static Lint, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>)) { self.lookup(lint, None as Option<Span>, decorate); } } impl<'a> EarlyContext<'a> { pub(crate) fn new( sess: &'a Session, warn_about_weird_lints: bool, lint_store: &'a LintStore, registered_tools: &'a RegisteredTools, buffered: LintBuffer, ) -> EarlyContext<'a> { EarlyContext { builder: LintLevelsBuilder::new( sess, warn_about_weird_lints, lint_store, registered_tools, ), buffered, } } } impl LintContext for LateContext<'_> { type PassObject = LateLintPassObject; /// Gets the overall compiler `Session` object. fn sess(&self) -> &Session { &self.tcx.sess } fn lints(&self) -> &LintStore { &*self.lint_store } fn lookup<S: Into<MultiSpan>>( &self, lint: &'static Lint, span: Option<S>, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), ) { let hir_id = self.last_node_with_lint_attrs; match span { Some(s) => self.tcx.struct_span_lint_hir(lint, hir_id, s, decorate), None => self.tcx.struct_lint_node(lint, hir_id, decorate), } } } impl LintContext for EarlyContext<'_> { type PassObject = EarlyLintPassObject; /// Gets the overall compiler `Session` object. fn sess(&self) -> &Session { &self.builder.sess() } fn lints(&self) -> &LintStore { self.builder.lint_store() } fn lookup<S: Into<MultiSpan>>( &self, lint: &'static Lint, span: Option<S>, decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), ) { self.builder.struct_lint(lint, span.map(|s| s.into()), decorate) } } impl<'tcx> LateContext<'tcx> { /// Gets the type-checking results for the current body, /// or `None` if outside a body. pub fn maybe_typeck_results(&self) -> Option<&'tcx ty::TypeckResults<'tcx>> { self.cached_typeck_results.get().or_else(|| { self.enclosing_body.map(|body| { let typeck_results = self.tcx.typeck_body(body); self.cached_typeck_results.set(Some(typeck_results)); typeck_results }) }) } /// Gets the type-checking results for the current body. /// As this will ICE if called outside bodies, only call when working with /// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies). #[track_caller] pub fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> { self.maybe_typeck_results().expect("`LateContext::typeck_results` called outside of body") } /// Returns the final resolution of a `QPath`, or `Res::Err` if unavailable. /// Unlike `.typeck_results().qpath_res(qpath, id)`, this can be used even outside /// bodies (e.g. for paths in `hir::Ty`), without any risk of ICE-ing. pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res { match *qpath { hir::QPath::Resolved(_, ref path) => path.res, hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self .maybe_typeck_results() .filter(|typeck_results| typeck_results.hir_owner == id.owner) .or_else(|| { if self.tcx.has_typeck_results(id.owner.to_def_id()) { Some(self.tcx.typeck(id.owner)) } else { None } }) .and_then(|typeck_results| typeck_results.type_dependent_def(id)) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), } } /// Check if a `DefId`'s path matches the given absolute type path usage. /// /// Anonymous scopes such as `extern` imports are matched with `kw::Empty`; /// inherent `impl` blocks are matched with the name of the type. /// /// Instead of using this method, it is often preferable to instead use /// `rustc_diagnostic_item` or a `lang_item`. This is less prone to errors /// as paths get invalidated if the target definition moves. /// /// # Examples /// /// ```rust,ignore (no context or def id available) /// if cx.match_def_path(def_id, &[sym::core, sym::option, sym::Option]) { /// // The given `def_id` is that of an `Option` type /// } /// ``` /// /// Used by clippy, but should be replaced by diagnostic items eventually. pub fn match_def_path(&self, def_id: DefId, path: &[Symbol]) -> bool { let names = self.get_def_path(def_id); names.len() == path.len() && iter::zip(names, path).all(|(a, &b)| a == b) } /// Gets the absolute path of `def_id` as a vector of `Symbol`. /// /// # Examples /// /// ```rust,ignore (no context or def id available) /// let def_path = cx.get_def_path(def_id); /// if let &[sym::core, sym::option, sym::Option] = &def_path[..] { /// // The given `def_id` is that of an `Option` type /// } /// ``` pub fn get_def_path(&self, def_id: DefId) -> Vec<Symbol> { pub struct AbsolutePathPrinter<'tcx> { pub tcx: TyCtxt<'tcx>, } impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> { type Error = !; type Path = Vec<Symbol>; type Region = (); type Type = (); type DynExistential = (); type Const = (); fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } fn print_region(self, _region: ty::Region<'_>) -> Result<Self::Region, Self::Error> { Ok(()) } fn print_type(self, _ty: Ty<'tcx>) -> Result<Self::Type, Self::Error> { Ok(()) } fn print_dyn_existential( self, _predicates: &'tcx ty::List<ty::Binder<'tcx, ty::ExistentialPredicate<'tcx>>>, ) -> Result<Self::DynExistential, Self::Error> { Ok(()) } fn print_const(self, _ct: &'tcx ty::Const<'tcx>) -> Result<Self::Const, Self::Error> { Ok(()) } fn path_crate(self, cnum: CrateNum) -> Result<Self::Path, Self::Error> { Ok(vec![self.tcx.crate_name(cnum)]) } fn path_qualified( self, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<Self::Path, Self::Error> { if trait_ref.is_none() { if let ty::Adt(def, substs) = self_ty.kind() { return self.print_def_path(def.did, substs); } } // This shouldn't ever be needed, but just in case: with_no_trimmed_paths(|| { Ok(vec![match trait_ref { Some(trait_ref) => Symbol::intern(&format!("{:?}", trait_ref)), None => Symbol::intern(&format!("<{}>", self_ty)), }]) }) } fn path_append_impl( self, print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>, _disambiguated_data: &DisambiguatedDefPathData, self_ty: Ty<'tcx>, trait_ref: Option<ty::TraitRef<'tcx>>, ) -> Result<Self::Path, Self::Error> { let mut path = print_prefix(self)?; // This shouldn't ever be needed, but just in case: path.push(match trait_ref { Some(trait_ref) => with_no_trimmed_paths(|| { Symbol::intern(&format!( "<impl {} for {}>", trait_ref.print_only_trait_path(), self_ty )) }), None => { with_no_trimmed_paths(|| Symbol::intern(&format!("<impl {}>", self_ty))) } }); Ok(path) } fn path_append( self, print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>, disambiguated_data: &DisambiguatedDefPathData, ) -> Result<Self::Path, Self::Error> { let mut path = print_prefix(self)?; // Skip `::{{extern}}` blocks and `::{{constructor}}` on tuple/unit structs. if let DefPathData::ForeignMod | DefPathData::Ctor = disambiguated_data.data { return Ok(path); } path.push(Symbol::intern(&disambiguated_data.data.to_string())); Ok(path) } fn path_generic_args( self, print_prefix: impl FnOnce(Self) -> Result<Self::Path, Self::Error>, _args: &[GenericArg<'tcx>], ) -> Result<Self::Path, Self::Error> { print_prefix(self) } } AbsolutePathPrinter { tcx: self.tcx }.print_def_path(def_id, &[]).unwrap() } } impl<'tcx> abi::HasDataLayout for LateContext<'tcx> { #[inline] fn data_layout(&self) -> &abi::TargetDataLayout { &self.tcx.data_layout } } impl<'tcx> ty::layout::HasTyCtxt<'tcx> for LateContext<'tcx> { #[inline] fn tcx(&self) -> TyCtxt<'tcx> { self.tcx } } impl<'tcx> ty::layout::HasParamEnv<'tcx> for LateContext<'tcx> { #[inline] fn param_env(&self) -> ty::ParamEnv<'tcx> { self.param_env } } impl<'tcx> LayoutOfHelpers<'tcx> for LateContext<'tcx> { type LayoutOfResult = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>; #[inline] fn handle_layout_err(&self, err: LayoutError<'tcx>, _: Span, _: Ty<'tcx>) -> LayoutError<'tcx> { err } } pub fn parse_lint_and_tool_name(lint_name: &str) -> (Option<Symbol>, &str) { match lint_name.split_once("::") { Some((tool_name, lint_name)) => { let tool_name = Symbol::intern(tool_name); (Some(tool_name), lint_name) } None => (None, lint_name), } }
40.22
166
0.528231
75baaa060860b9476e20d3165fab4a3f1c70025d
12,395
//! The matrix module. //! //! Currently contains all code //! relating to the matrix linear algebra struct. //! //! Most of the logic for manipulating matrices is generically implemented //! via `BaseMatrix` and `BaseMatrixMut` trait. use std::prelude::v1::*; use std; use std::marker::PhantomData; use libnum::Float; use error::{Error, ErrorKind}; use vector::Vector; use utils; pub mod decomposition; mod base; mod deref; mod impl_mat; mod impl_ops; mod iter; mod mat_mul; mod slice; mod permutation_matrix; mod impl_permutation_mul; pub use self::base::{BaseMatrix, BaseMatrixMut}; pub use self::permutation_matrix::{PermutationMatrix, Parity}; /// Matrix dimensions #[derive(Debug, Clone, Copy, Deserialize)] pub enum Axes { /// The row axis. Row, /// The column axis. Col, } /// The `Matrix` struct. /// /// Can be instantiated with any type. #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct Matrix<T> { rows: usize, cols: usize, data: Vec<T>, } /// A `MatrixSlice` /// /// This struct provides a slice into a matrix. /// /// The struct contains the upper left point of the slice /// and the width and height of the slice. #[derive(Debug, Clone, Copy)] pub struct MatrixSlice<'a, T: 'a> { ptr: *const T, rows: usize, cols: usize, row_stride: usize, marker: PhantomData<&'a T>, } /// A mutable `MatrixSliceMut` /// /// This struct provides a mutable slice into a matrix. /// /// The struct contains the upper left point of the slice /// and the width and height of the slice. #[derive(Debug)] pub struct MatrixSliceMut<'a, T: 'a> { ptr: *mut T, rows: usize, cols: usize, row_stride: usize, marker: PhantomData<&'a mut T>, } /// Row of a matrix. /// /// This struct points to a slice making up /// a row in a matrix. You can deref this /// struct to retrieve a `MatrixSlice` of /// the row. /// /// # Example /// /// ``` /// # #[macro_use] extern crate rulinalg; fn main() { /// use rulinalg::matrix::BaseMatrix; /// /// let mat = matrix![1.0, 2.0; /// 3.0, 4.0]; /// /// let row = mat.row(1); /// assert_eq!((*row + 2.0).sum(), 11.0); /// # } /// ``` #[derive(Debug, Clone, Copy)] pub struct Row<'a, T: 'a> { row: MatrixSlice<'a, T>, } /// Mutable row of a matrix. /// /// This struct points to a mutable slice /// making up a row in a matrix. You can deref /// this struct to retrieve a `MatrixSlice` /// of the row. /// /// # Example /// /// ``` /// # #[macro_use] extern crate rulinalg; fn main() { /// use rulinalg::matrix::BaseMatrixMut; /// /// let mut mat = matrix![1.0, 2.0; /// 3.0, 4.0]; /// /// { /// let mut row = mat.row_mut(1); /// *row += 2.0; /// } /// let expected = matrix![1.0, 2.0; /// 5.0, 6.0]; /// assert_matrix_eq!(mat, expected); /// # } /// ``` #[derive(Debug)] pub struct RowMut<'a, T: 'a> { row: MatrixSliceMut<'a, T>, } /// Row iterator. #[derive(Debug)] pub struct Rows<'a, T: 'a> { slice_start: *const T, row_pos: usize, slice_rows: usize, slice_cols: usize, row_stride: isize, _marker: PhantomData<&'a T>, } /// Mutable row iterator. #[derive(Debug)] pub struct RowsMut<'a, T: 'a> { slice_start: *mut T, row_pos: usize, slice_rows: usize, slice_cols: usize, row_stride: isize, _marker: PhantomData<&'a mut T>, } // MAYBE WE SHOULD MOVE SOME OF THIS STUFF OUT impl<'a, T: 'a> Row<'a, T> { /// Returns the row as a slice. pub fn raw_slice(&self) -> &'a [T] { unsafe { std::slice::from_raw_parts(self.row.as_ptr(), self.row.cols()) } } } impl<'a, T: 'a> RowMut<'a, T> { /// Returns the row as a slice. pub fn raw_slice(&self) -> &'a [T] { unsafe { std::slice::from_raw_parts(self.row.as_ptr(), self.row.cols()) } } /// Returns the row as a slice. pub fn raw_slice_mut(&mut self) -> &'a mut [T] { unsafe { std::slice::from_raw_parts_mut(self.row.as_mut_ptr(), self.row.cols()) } } } /// Column of a matrix. /// /// This struct points to a `MatrixSlice` /// making up a column in a matrix. /// You can deref this struct to retrieve /// the raw column `MatrixSlice`. /// /// # Example /// /// ``` /// # #[macro_use] extern crate rulinalg; fn main() { /// use rulinalg::matrix::BaseMatrix; /// /// let mat = matrix![1.0, 2.0; /// 3.0, 4.0]; /// /// let col = mat.col(1); /// assert_eq!((*col + 2.0).sum(), 10.0); /// # } /// ``` #[derive(Debug, Clone, Copy)] pub struct Column<'a, T: 'a> { col: MatrixSlice<'a, T>, } /// Mutable column of a matrix. /// /// This struct points to a `MatrixSliceMut` /// making up a column in a matrix. /// You can deref this struct to retrieve /// the raw column `MatrixSliceMut`. /// /// # Example /// /// ``` /// # #[macro_use] extern crate rulinalg; fn main() { /// use rulinalg::matrix::BaseMatrixMut; /// /// let mut mat = matrix![1.0, 2.0; /// 3.0, 4.0]; /// { /// let mut column = mat.col_mut(1); /// *column += 2.0; /// } /// let expected = matrix![1.0, 4.0; /// 3.0, 6.0]; /// assert_matrix_eq!(mat, expected); /// # } /// ``` #[derive(Debug)] pub struct ColumnMut<'a, T: 'a> { col: MatrixSliceMut<'a, T>, } /// Column iterator. #[derive(Debug)] pub struct Cols<'a, T: 'a> { _marker: PhantomData<&'a T>, col_pos: usize, row_stride: isize, slice_cols: usize, slice_rows: usize, slice_start: *const T, } /// Mutable column iterator. #[derive(Debug)] pub struct ColsMut<'a, T: 'a> { _marker: PhantomData<&'a mut T>, col_pos: usize, row_stride: isize, slice_cols: usize, slice_rows: usize, slice_start: *mut T, } /// Diagonal offset (used by Diagonal iterator). #[derive(Debug, PartialEq)] pub enum DiagOffset { /// The main diagonal of the matrix. Main, /// An offset above the main diagonal. Above(usize), /// An offset below the main diagonal. Below(usize), } /// An iterator over the diagonal elements of a matrix. #[derive(Debug)] pub struct Diagonal<'a, T: 'a, M: 'a + BaseMatrix<T>> { matrix: &'a M, diag_pos: usize, diag_end: usize, _marker: PhantomData<&'a T>, } /// An iterator over the mutable diagonal elements of a matrix. #[derive(Debug)] pub struct DiagonalMut<'a, T: 'a, M: 'a + BaseMatrixMut<T>> { matrix: &'a mut M, diag_pos: usize, diag_end: usize, _marker: PhantomData<&'a mut T>, } /// Iterator for matrix. /// /// Iterates over the underlying slice data /// in row-major order. #[derive(Debug)] pub struct SliceIter<'a, T: 'a> { slice_start: *const T, row_pos: usize, col_pos: usize, slice_rows: usize, slice_cols: usize, row_stride: usize, _marker: PhantomData<&'a T>, } /// Iterator for mutable matrix. /// /// Iterates over the underlying slice data /// in row-major order. #[derive(Debug)] pub struct SliceIterMut<'a, T: 'a> { slice_start: *mut T, row_pos: usize, col_pos: usize, slice_rows: usize, slice_cols: usize, row_stride: usize, _marker: PhantomData<&'a mut T>, } /// Solves the system Ux = y by back substitution. /// /// Here U is an upper triangular matrix and y a vector /// which is dimensionally compatible with U. fn back_substitution<T, M>(u: &M, y: Vector<T>) -> Result<Vector<T>, Error> where T: Float, M: BaseMatrix<T> { assert!(u.rows() == u.cols(), "Matrix U must be square."); assert!(y.size() == u.rows(), "Matrix and RHS vector must be dimensionally compatible."); let mut x = y; let n = u.rows(); for i in (0 .. n).rev() { let row = u.row(i); // TODO: Remove unsafe once `get` is available in `BaseMatrix` let divisor = unsafe { u.get_unchecked([i, i]).clone() }; if divisor.abs() < T::epsilon() { return Err(Error::new(ErrorKind::DivByZero, "Lower triangular matrix is singular to working precision.")); } // We have // u[i, i] x[i] = b[i] - sum_j { u[i, j] * x[j] } // where j = i + 1, ..., (n - 1) // // Note that the right-hand side sum term can be rewritten as // u[i, (i + 1) .. n] * x[(i + 1) .. n] // where * denotes the dot product. // This is handy, because we have a very efficient // dot(., .) implementation! let dot = { let row_part = &row.raw_slice()[(i + 1) .. n]; let x_part = &x.data()[(i + 1) .. n]; utils::dot(row_part, x_part) }; x[i] = (x[i] - dot) / divisor; } Ok(x) } /// Solves the system Lx = y by forward substitution. /// /// Here, L is a square, lower triangular matrix and y /// is a vector which is dimensionally compatible with L. fn forward_substitution<T, M>(l: &M, y: Vector<T>) -> Result<Vector<T>, Error> where T: Float, M: BaseMatrix<T> { assert!(l.rows() == l.cols(), "Matrix L must be square."); assert!(y.size() == l.rows(), "Matrix and RHS vector must be dimensionally compatible."); let mut x = y; for (i, row) in l.row_iter().enumerate() { // TODO: Remove unsafe once `get` is available in `BaseMatrix` let divisor = unsafe { l.get_unchecked([i, i]).clone() }; if divisor.abs() < T::epsilon() { return Err(Error::new(ErrorKind::DivByZero, "Lower triangular matrix is singular to working precision.")); } // We have // l[i, i] x[i] = b[i] - sum_j { l[i, j] * x[j] } // where j = 0, ..., i - 1 // // Note that the right-hand side sum term can be rewritten as // l[i, 0 .. i] * x[0 .. i] // where * denotes the dot product. // This is handy, because we have a very efficient // dot(., .) implementation! let dot = { let row_part = &row.raw_slice()[0 .. i]; let x_part = &x.data()[0 .. i]; utils::dot(row_part, x_part) }; x[i] = (x[i] - dot) / divisor; } Ok(x) } impl<'a, T> ColumnMut<'a, T> where T: Clone { /// Clones the elements of the given slice of compatible size /// into this column. /// /// # Panics /// - The slice does not have the same length as /// the number of rows in the column. pub fn clone_from_slice(&mut self, slice: &[T]) { assert!(slice.len() == self.rows()); let slice_iter = slice.iter().cloned(); for (c, s) in self.iter_mut().zip(slice_iter) { *c = s; } } /// Clones the elements of this column into a /// slice of compatible size. /// /// # Panics /// - The slice does not have the same length as /// the number of rows in the column. pub fn clone_into_slice(&self, slice: &mut [T]) { assert!(slice.len() == self.rows()); let col_iter = self.iter().cloned(); for (s, c) in slice.iter_mut().zip(col_iter) { *s = c; } } } impl<'a, T> Column<'a, T> where T: Clone { /// Clones the elements of this column into a /// slice of compatible size. /// /// # Panics /// - The slice does not have the same length as /// the number of rows in the column. pub fn clone_into_slice(&self, slice: &mut [T]) { assert!(slice.len() == self.rows()); let col_iter = self.iter().cloned(); for (s, c) in slice.iter_mut().zip(col_iter) { *s = c; } } } #[cfg(test)] mod tests { use matrix::{BaseMatrix, BaseMatrixMut}; #[test] fn column_clone_into_slice() { let mat = matrix![1, 2; 3, 4]; let mut v = vec![0, 0]; mat.col(0).clone_into_slice(&mut v); assert_eq!(v, vec![1, 3]); } #[test] fn column_mut_clone_into_slice() { let mut mat = matrix![1, 2; 3, 4]; let mut v = vec![0, 0]; mat.col_mut(0).clone_into_slice(&mut v); assert_eq!(v, vec![1, 3]); } #[test] fn column_mut_clone_from_slice() { let mut mat = matrix![1, 2; 3, 4]; let v = vec![5, 6]; { let mut col = mat.col_mut(0); col.clone_from_slice(&v); } assert_matrix_eq!(mat, matrix![5, 2; 6, 4]); } }
25.715768
89
0.561275
646a9d5b2d1ac701fec666a3f6af17f1bf330f15
3,802
use super::{DeserializeError, Length, Serialize, Tag}; use tinyvec::TinyVec; pub(crate) trait VecExt { fn write(&mut self, slice: &[u8]); fn write_byte(&mut self, byte: u8); } impl VecExt for Vec<u8> { fn write(&mut self, slice: &[u8]) { self.extend_from_slice(slice); } fn write_byte(&mut self, byte: u8) { self.push(byte); } } impl VecExt for TinyVec<[u8; 32]> { fn write(&mut self, slice: &[u8]) { self.extend_from_slice(slice); } fn write_byte(&mut self, byte: u8) { self.push(byte); } } pub(crate) trait ReadExt { fn peek(&mut self) -> Result<u8, DeserializeError>; fn byte(&mut self) -> Result<u8, DeserializeError>; fn uint(&mut self, size: Length) -> Result<u64, DeserializeError>; fn int(&mut self, size: Length) -> Result<i64, DeserializeError>; fn slice(&mut self, size: Length) -> Result<&[u8], DeserializeError>; fn tag(&mut self, expected: Tag) -> Result<Tag, DeserializeError>; fn peek_tag(&mut self, expected: Tag) -> Result<Tag, DeserializeError>; } impl ReadExt for &'_ [u8] { fn peek(&mut self) -> Result<u8, DeserializeError> { Ok(*self.get(0).ok_or(DeserializeError::BufferTooShort)?) } fn byte(&mut self) -> Result<u8, DeserializeError> { let byte = *self.get(0).ok_or(DeserializeError::BufferTooShort)?; *self = &self[1..]; Ok(byte) } fn uint(&mut self, size: Length) -> Result<u64, DeserializeError> { let size = size.0 as usize; if size > 8 { return Err(DeserializeError::IntegerTooLarge); } else if size == 0 { return Ok(0); } let mut bytes = [0; 8]; bytes[8 - size..][..size].copy_from_slice(&self[..size]); *self = &self[size..]; Ok(u64::from_be_bytes(bytes)) } fn int(&mut self, size: Length) -> Result<i64, DeserializeError> { let size = size.0 as usize; if size > 8 { return Err(DeserializeError::IntegerTooLarge); } else if size == 0 { return Ok(0); } let mut bytes = if self[0] & 0x80 == 0x80 { [0xFF; 8] } else { [0x00; 8] }; bytes[8 - size..][..size].copy_from_slice(&self[..size]); *self = &self[size..]; Ok(i64::from_be_bytes(bytes)) } fn slice(&mut self, size: Length) -> Result<&[u8], DeserializeError> { let size = size.0 as usize; if size == 0 { return Ok(&[]); } let slice = &self[..size]; *self = &self[size..]; Ok(slice) } fn tag(&mut self, expected: Tag) -> Result<Tag, DeserializeError> { let tag = Tag::new(self.byte()?); if tag != expected { Err(DeserializeError::BadTag { expected, got: tag }) } else { Ok(tag) } } fn peek_tag(&mut self, expected: Tag) -> Result<Tag, DeserializeError> { let tag = Tag::new(self.peek()?); if tag != expected { Err(DeserializeError::BadTag { expected, got: tag }) } else { Ok(tag) } } } /// This helper function creates a premade size and fills it in after the /// function is complete pub(crate) fn serialize_sequence<F: Fn(&mut dyn VecExt)>(buffer: &mut dyn VecExt, f: F) { let mut temp_vec: TinyVec<[u8; 32]> = TinyVec::new(); f(&mut temp_vec); Length::new(temp_vec.len() as u64).serialize(buffer); buffer.write(&temp_vec); } #[cfg(test)] mod tests { use super::*; use crate::ber::Serialize; #[test] fn serialize_seq() { let mut buffer = Vec::new(); serialize_sequence(&mut buffer, |buffer| { 10i32.serialize(buffer); }); assert_eq!(buffer, &[0x03, 0x02, 0x01, 0x0a]); } }
26.964539
89
0.558916
759eea4e39638bb6ba61a44ead626788eab91ac0
5,094
// File: day_6.rs // Author: Jacob Guenther // Date: December 2020 /* Copyright 2020 Jacob Guenther Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use super::common::*; use grid::Grid; pub struct Challenge { _input: &'static str, instructions: Vec<LineInstruction>, } impl ChallengeT for Challenge { type Output1 = usize; type Output2 = u32; fn day() -> u8 { 6 } fn new() -> Self { let input = include_str!("../inputs/day_6.txt"); Challenge { _input: input, instructions: input .lines() .map(|line| LineInstruction::from_line(line)) .collect(), } } fn part_1(&self) -> Self::Output1 { let mut grid = Grid::<bool>::new(1000, 1000, &false); self.instructions.iter().for_each(|instruction| { instruction.apply_instruction(&mut grid); }); grid.data.iter().filter(|light_on| **light_on).count() } fn part_2(&self) -> Self::Output2 { let mut grid = Grid::<u32>::new(1000, 1000, &0); self.instructions.iter().for_each(|instruction| { instruction.apply_instruction_2(&mut grid); }); grid.data.iter().sum() } } #[derive(Copy, Clone, Debug, PartialEq)] enum Instruction { On, Off, Toggle, } #[derive(Copy, Clone, Debug, PartialEq)] struct LineInstruction { instruction: Instruction, coords: [usize; 4], } impl LineInstruction { fn from_line(line: &str) -> Self { let (instruction, slice) = if line.starts_with("toggle") { (Instruction::Toggle, &line[7..]) } else if line.starts_with("turn on") { (Instruction::On, &line[8..]) } else { (Instruction::Off, &line[9..]) }; let mut coords = [0, 0, 0, 0]; slice .split(" through ") .flat_map(|part| part.split(',')) .map(|n| n.parse::<usize>().unwrap()) .zip(coords.iter_mut()) .for_each(|(n, coord)| { *coord = n; }); Self { instruction: instruction, coords: coords, } } fn row_range(&self) -> std::ops::Range<usize> { self.coords[1]..(self.coords[3] + 1) } fn col_range(&self) -> std::ops::Range<usize> { self.coords[0]..(self.coords[2] + 1) } fn apply_instruction(&self, grid: &mut Grid<bool>) { for y in self.row_range() { for x in self.col_range() { let val = match self.instruction { Instruction::On => true, Instruction::Off => false, Instruction::Toggle => !grid.get(x, y), }; grid.set(x, y, &val); } } } fn apply_instruction_2(&self, grid: &mut Grid<u32>) { for y in self.row_range() { for x in self.col_range() { let mut val = grid.get(x, y); match self.instruction { Instruction::On => val += 1, Instruction::Toggle => val += 2, Instruction::Off => match val { 0 => continue, 1 => val = 0, _ => val -= 1, }, } grid.set(x, y, &val); } } } } #[cfg(test)] mod tests { use super::Challenge; use super::*; use crate::common::ChallengeT; #[test] fn part_1_test_instruction() { let res = LineInstruction::from_line("turn on 0,0 through 10,10"); assert_eq!( LineInstruction { instruction: Instruction::On, coords: [0, 0, 10, 10] }, res ); let res = LineInstruction::from_line("turn off 23,12 through 24,199"); assert_eq!( LineInstruction { instruction: Instruction::Off, coords: [23, 12, 24, 199] }, res ); let res = LineInstruction::from_line("toggle 23,12 through 24,199"); assert_eq!( LineInstruction { instruction: Instruction::Toggle, coords: [23, 12, 24, 199] }, res ); } #[test] fn part_1_test() { let res = Challenge::new().part_1(); assert_eq!(res, 377891); } #[test] fn part_2_test() { let res = Challenge::new().part_2(); assert_eq!(res, 14110788); } use test::Bencher; #[bench] fn part_line_instruction_bench(b: &mut Bencher) { b.iter(|| LineInstruction::from_line("toggle 23,12 through 24,199")) } #[bench] fn part_1_bench(b: &mut Bencher) { b.iter(|| Challenge::new().part_1()) } #[bench] fn part_2_bench(b: &mut Bencher) { b.iter(|| Challenge::new().part_2()) } #[bench] fn both_bench(b: &mut Bencher) { b.iter(|| { let challenge = Challenge::new(); challenge.part_1(); challenge.part_2(); }) } }
24.728155
80
0.64998
6437c5124535a6ddfdd6d28d7c9286afc443c538
9,999
#![allow(clippy::unreadable_literal)] #![cfg(feature = "Default")] #![no_std] #[macro_use] extern crate educe; #[macro_use] extern crate assert_approx_eq; #[test] #[allow(dead_code)] fn basic() { #[derive(Educe)] #[educe(Default)] union Union { f1: u8, } assert_eq!(0, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default)] union Union2 { #[educe(Default)] f1: u8, } assert_eq!(0, unsafe { Union2::default().f1 }); #[derive(Educe)] #[educe(Default)] union Union3 { f1: u8, #[educe(Default)] f2: f64, } assert_approx_eq!(0.0, unsafe { Union3::default().f2 }); } #[test] #[allow(dead_code)] fn type_default_1() { #[derive(Educe)] #[educe(Default(expression = "Union { f1: 1 }"))] union Union { f1: u8, f2: f64, } assert_eq!(1, unsafe { Union::default().f1 }); } #[test] #[allow(dead_code)] fn type_default_2() { #[derive(Educe)] #[educe(Default(expression("Union { f1: 1 }")))] union Union { f1: u8, f2: f64, } assert_eq!(1, unsafe { Union::default().f1 }); } #[test] #[allow(dead_code)] fn field_default_1() { #[derive(Educe)] #[educe(Default)] union Union { #[educe(Default = 1)] f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(1, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default)] union Union2 { f1: u8, #[educe(Default = 11111111111111111111111111111)] f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(11111111111111111111111111111, unsafe { Union2::default().f2 }); #[derive(Educe)] #[educe(Default)] union Union3 { f1: u8, f2: i128, #[educe(Default = 1.1)] f3: f64, f4: bool, f5: &'static str, f6: char, } assert_approx_eq!(1.1, unsafe { Union3::default().f3 }); #[derive(Educe)] #[educe(Default)] union Union4 { f1: u8, f2: i128, f3: f64, #[educe(Default = true)] f4: bool, f5: &'static str, f6: char, } assert_eq!(true, unsafe { Union4::default().f4 }); #[derive(Educe)] #[educe(Default)] union Union5 { f1: u8, f2: i128, f3: f64, f4: bool, #[educe(Default = "Hi")] f5: &'static str, f6: char, } assert_eq!("Hi", unsafe { Union5::default().f5 }); #[derive(Educe)] #[educe(Default)] union Union6 { f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, #[educe(Default = 'M')] f6: char, } assert_eq!('M', unsafe { Union6::default().f6 }); } #[test] #[allow(dead_code)] fn field_default_2() { #[derive(Educe)] #[educe(Default)] union Union { #[educe(Default(1))] f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(1, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default)] union Union2 { f1: u8, #[educe(Default(11111111111111111111111111111))] f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(11111111111111111111111111111, unsafe { Union2::default().f2 }); #[derive(Educe)] #[educe(Default)] union Union3 { f1: u8, f2: i128, #[educe(Default(1.1))] f3: f64, f4: bool, f5: &'static str, f6: char, } assert_approx_eq!(1.1, unsafe { Union3::default().f3 }); #[derive(Educe)] #[educe(Default)] union Union4 { f1: u8, f2: i128, f3: f64, #[educe(Default(true))] f4: bool, f5: &'static str, f6: char, } assert_eq!(true, unsafe { Union4::default().f4 }); #[derive(Educe)] #[educe(Default)] union Union5 { f1: u8, f2: i128, f3: f64, f4: bool, #[educe(Default("Hi"))] f5: &'static str, f6: char, } assert_eq!("Hi", unsafe { Union5::default().f5 }); #[derive(Educe)] #[educe(Default)] union Union6 { f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, #[educe(Default('M'))] f6: char, } assert_eq!('M', unsafe { Union6::default().f6 }); } #[test] #[allow(dead_code)] fn field_default_3() { #[derive(Educe)] #[educe(Default)] union Union { #[educe(Default(expression = "0 + 1"))] f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(1, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default)] union Union2 { f1: u8, #[educe(Default(expression = "-11111111111111111111111111111 * -1"))] f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(11111111111111111111111111111, unsafe { Union2::default().f2 }); #[derive(Educe)] #[educe(Default)] union Union3 { f1: u8, f2: i128, #[educe(Default(expression = "1.0 + 0.1"))] f3: f64, f4: bool, f5: &'static str, f6: char, } assert_approx_eq!(1.1, unsafe { Union3::default().f3 }); #[derive(Educe)] #[educe(Default)] union Union4 { f1: u8, f2: i128, f3: f64, #[educe(Default(expression = "!false"))] f4: bool, f5: &'static str, f6: char, } assert_eq!(true, unsafe { Union4::default().f4 }); #[derive(Educe)] #[educe(Default)] union Union5 { f1: u8, f2: i128, f3: f64, f4: bool, #[educe(Default(expression = "\"Hi\""))] f5: &'static str, f6: char, } assert_eq!("Hi", unsafe { Union5::default().f5 }); #[derive(Educe)] #[educe(Default)] union Union6 { f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, #[educe(Default(expression = "'M'"))] f6: char, } assert_eq!('M', unsafe { Union6::default().f6 }); } #[test] #[allow(dead_code)] fn field_default_4() { #[derive(Educe)] #[educe(Default)] union Union { #[educe(Default(expression("0 + 1")))] f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(1, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default)] union Union2 { f1: u8, #[educe(Default(expression("-11111111111111111111111111111 * -1")))] f2: i128, f3: f64, f4: bool, f5: &'static str, f6: char, } assert_eq!(11111111111111111111111111111, unsafe { Union2::default().f2 }); #[derive(Educe)] #[educe(Default)] union Union3 { f1: u8, f2: i128, #[educe(Default(expression("1.0 + 0.1")))] f3: f64, f4: bool, f5: &'static str, f6: char, } assert_approx_eq!(1.1, unsafe { Union3::default().f3 }); #[derive(Educe)] #[educe(Default)] union Union4 { f1: u8, f2: i128, f3: f64, #[educe(Default(expression("!false")))] f4: bool, f5: &'static str, f6: char, } assert_eq!(true, unsafe { Union4::default().f4 }); #[derive(Educe)] #[educe(Default)] union Union5 { f1: u8, f2: i128, f3: f64, f4: bool, #[educe(Default(expression("\"Hi\"")))] f5: &'static str, f6: char, } assert_eq!("Hi", unsafe { Union5::default().f5 }); #[derive(Educe)] #[educe(Default)] union Union6 { f1: u8, f2: i128, f3: f64, f4: bool, f5: &'static str, #[educe(Default(expression("'M'")))] f6: char, } assert_eq!('M', unsafe { Union6::default().f6 }); } #[test] #[allow(dead_code)] fn bound_1() { #[derive(Educe)] #[educe(Default(bound))] union Union<T: Copy> { f1: T, } assert_eq!(0, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default(bound))] union Union2<T: Copy, K: Copy> { f1: T, #[educe(Default)] f2: K, } assert_approx_eq!(0.0, unsafe { Union2::<u8, f64>::default().f2 }); } #[test] #[allow(dead_code)] fn bound_2() { #[derive(Educe)] #[educe(Default(bound = "T: core::default::Default"))] union Union<T: Copy> { f1: T, } assert_eq!(0, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default(bound = "K: core::default::Default"))] union Union2<T: Copy, K: Copy> { f1: T, #[educe(Default)] f2: K, } assert_approx_eq!(0.0, unsafe { Union2::<u8, f64>::default().f2 }); } #[test] #[allow(dead_code)] fn bound_3() { #[derive(Educe)] #[educe(Default(bound("T: core::default::Default")))] union Union<T: Copy> { f1: T, } assert_eq!(0, unsafe { Union::default().f1 }); #[derive(Educe)] #[educe(Default(bound("K: core::default::Default")))] union Union2<T: Copy, K: Copy> { f1: T, #[educe(Default)] f2: K, } assert_approx_eq!(0.0, unsafe { Union2::<u8, f64>::default().f2 }); } #[test] #[allow(dead_code)] fn new() { #[derive(Educe)] #[educe(Default(new))] union Union { f1: u8, } assert_eq!(0, unsafe { Union::new().f1 }); #[derive(Educe)] #[educe(Default(new))] union Union2 { f1: u8, #[educe(Default)] f2: f64, } assert_approx_eq!(0.0, unsafe { Union2::new().f2 }); }
19.721893
79
0.486649
3adc7a3a9b64f903255ff8c7584c61abff997e46
1,603
pub fn compute() { let mut stretch = stretch2::Stretch::new(); let node0 = stretch .new_node( stretch2::style::Style { flex_grow: 1f32, flex_basis: stretch2::style::Dimension::Percent(0.1f32), max_size: stretch2::geometry::Size { width: stretch2::style::Dimension::Percent(0.6f32), ..Default::default() }, ..Default::default() }, &[], ) .unwrap(); let node1 = stretch .new_node( stretch2::style::Style { flex_grow: 4f32, flex_basis: stretch2::style::Dimension::Percent(0.15f32), max_size: stretch2::geometry::Size { width: stretch2::style::Dimension::Percent(0.2f32), ..Default::default() }, ..Default::default() }, &[], ) .unwrap(); let node = stretch .new_node( stretch2::style::Style { flex_direction: stretch2::style::FlexDirection::Column, size: stretch2::geometry::Size { width: stretch2::style::Dimension::Points(200f32), height: stretch2::style::Dimension::Points(400f32), ..Default::default() }, ..Default::default() }, &[node0, node1], ) .unwrap(); stretch.compute_layout(node, stretch2::geometry::Size::undefined()).unwrap(); }
34.106383
81
0.458515
f46621ae45c7c89aba783e4b6a5e7973083f1608
1,534
use std::fs; use serde::Deserialize; use serenity::{ prelude::*, model::{ gateway::Activity, channel::Message, gateway::Ready, }, framework::{ StandardFramework, standard::macros::group, }, async_trait }; struct Handler; #[async_trait] impl EventHandler for Handler { async fn message(&self, ctx: Context, msg: Message) { if msg.content == "gn ping" { if let Err(why) = msg.channel_id.say(&ctx.http, "shut up").await { println!("error sending message: {:?}", why); } } } async fn ready(&self, ctx: Context, ready: Ready) { println!("Logged in on {}#{} ({}).", &ready.user.name, &ready.user.discriminator, &ready.user.id); ctx.set_activity(Activity::playing("with v0.1.0")).await; } } #[derive(Deserialize)] struct Config<'a, 'b> { token: &'a str, version: &'b str } #[group] struct General; #[tokio::main] async fn main() { let content = fs::read_to_string("config.toml").unwrap(); let config: Config = toml::from_str(&content).unwrap(); let framework = StandardFramework::new() .configure(|c| c.prefix("gn ")).group(&GENERAL_GROUP); let mut client = Client::new(&config.token) .framework(framework) .event_handler(Handler) .await .expect("failed creating the client"); println!("Starting with v{}...", config.version); if let Err(why) = client.start().await { println!("error: {:?}", &why); } }
25.566667
106
0.578879
485da12610694e8945cc7c5457740254c915e2f6
449
pub mod add_guild_member; pub mod get_guild_members; pub mod update_guild_member; mod add_role_to_member; mod get_member; mod remove_member; mod remove_role_from_member; pub use self::{ add_guild_member::AddGuildMember, add_role_to_member::AddRoleToMember, get_guild_members::GetGuildMembers, get_member::GetMember, remove_member::RemoveMember, remove_role_from_member::RemoveRoleFromMember, update_guild_member::UpdateGuildMember, };
29.933333
91
0.832962
79d02c35d5e744ae42482f23d4b64777022cfa47
3,482
use core::hash::{Hash, Hasher}; use std::sync::atomic::{AtomicPtr, HasAtomic, HasAtomicLeast}; #[repr(transparent)] pub struct UserPtr<T: ?Sized>(*mut T); #[repr(transparent)] pub struct HandlePtr<T: ?Sized>(*mut T); impl<T: ?Sized> HandlePtr<T> { pub fn into_kernel_addr(self) -> *mut T { self.0 } pub fn from_kernel_addr(kaddr: *mut T) -> Self { Self(kaddr) } } #[repr(transparent)] pub struct IOPtr<T: ?Sized>(*mut T); macro_rules! impl_traits_for_addr_space { ($($addr_ptr:ident),* $(,)?) => { $( impl<T: ?Sized> Copy for $addr_ptr<T> {} impl<T: ?Sized> Clone for $addr_ptr<T> { fn clone(&self) -> Self { Self(self.0) } } impl<T: ?Sized> PartialEq for $addr_ptr<T> { fn eq(&self, rhs: &Self) -> bool { self.0 == rhs.0 } } impl<T: ?Sized> Eq for $addr_ptr<T> {} impl<T: ?Sized> PartialOrd for $addr_ptr<T> { fn partial_cmp(&self, rhs: &Self) -> Option<core::cmp::Ordering> { self.0.partial_cmp(&rhs.0) } } impl<T: ?Sized> Ord for $addr_ptr<T> { fn cmp(&self, rhs: &Self) -> core::cmp::Ordering { self.0.cmp(&rhs.0) } } impl<T: ?Sized> Hash for $addr_ptr<T>{ fn hash<H: Hasher>(&self, state: &mut H){ self.0.hash(state) } } impl<T: ?Sized> core::fmt::Pointer for $addr_ptr<T>{ fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result{ self.0.fmt(f) } } impl<T: ?Sized> core::fmt::Debug for $addr_ptr<T>{ fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result{ f.debug_tuple(::core::stringify!($addr_ptr)).field(&self.0).finish() } } unsafe impl<T> HasAtomic for $addr_ptr<T> { type Atomic = AtomicPtr<T>; type Storage = [u8; core::mem::size_of::<*mut ()>()]; fn into_base(self) -> *mut T { self.0 } unsafe fn from_base(base: *mut T) -> Self { Self(base) } } unsafe impl<T> HasAtomicLeast for $addr_ptr<T> { type AtomicLeast = AtomicPtr<T>; type Storage = [u8; core::mem::size_of::<*mut ()>()]; fn into_least(self) -> *mut T { self.0 } unsafe fn from_least(base: *mut T) -> Self { Self(base) } } )* }; } impl_traits_for_addr_space!(UserPtr, HandlePtr, IOPtr); #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] #[repr(transparent)] pub struct PhysAddr(*mut ()); impl core::fmt::Pointer for PhysAddr { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { self.0.fmt(f) } } unsafe impl HasAtomic for PhysAddr { type Atomic = AtomicPtr<()>; type Storage = [u8; core::mem::size_of::<*mut ()>()]; fn into_base(self) -> *mut () { self.0 } unsafe fn from_base(base: *mut ()) -> Self { Self(base) } } unsafe impl HasAtomicLeast for PhysAddr { type AtomicLeast = AtomicPtr<()>; type Storage = [u8; core::mem::size_of::<*mut ()>()]; fn into_least(self) -> *mut () { self.0 } unsafe fn from_least(base: *mut ()) -> Self { Self(base) } }
25.602941
84
0.495405
f960d83c2199b0a586602a87799504e7a7226ceb
7,537
use crate::{ control::{ ControlEvent, ControllerEvent, GamepadConfig, GamepadId, }, dimensions::UpDownLeftRight, file::SimpleConfigManager, }; use std::collections::{ HashMap, HashSet, }; use sdl2::{ controller::GameController, event::Event, self }; pub struct GamepadControls { config_manager: SimpleConfigManager<GamepadConfig>, controller_events: Vec<ControllerEvent>, gamepads: HashMap<GamepadId, GameController>, axes: HashMap<GamepadAxis, f32>, currently_pressed: HashSet<GamepadButton>, just_pressed: HashSet<GamepadButton>, just_released: HashSet<GamepadButton>, } impl GamepadControls { pub fn new(config_manager: SimpleConfigManager<GamepadConfig>) -> GamepadControls { GamepadControls { config_manager, controller_events: Vec::new(), gamepads: HashMap::new(), axes: HashMap::new(), currently_pressed: HashSet::new(), just_pressed: HashSet::new(), just_released: HashSet::new(), } } pub fn is_pressed(&self, gamepad_id: GamepadId, event: ControlEvent) -> bool { match self.control_event_to_gamepad_control(event) { GamepadControl::ButtonPress(button) => { let gamepad_button = GamepadButton { gamepad_id, button }; self.currently_pressed.contains(&gamepad_button) }, GamepadControl::AxisAboveThreshold(axis, thresh) => { let gamepad_axis = GamepadAxis { gamepad_id, axis }; if let Some(value) = self.axes.get(&gamepad_axis) { *value > thresh } else { false } }, GamepadControl::AxisBelowThreshold(axis, thresh) => { let gamepad_axis = GamepadAxis { gamepad_id, axis }; if let Some(value) = self.axes.get(&gamepad_axis) { *value < thresh } else { false } } } } pub fn just_pressed(&self, gamepad_id: GamepadId, event: ControlEvent) -> bool { match self.control_event_to_gamepad_control(event) { GamepadControl::ButtonPress(button) => { let gamepad_button = GamepadButton { gamepad_id, button }; self.just_pressed.contains(&gamepad_button) }, _ => false } } pub fn just_released(&self, gamepad_id: GamepadId, event: ControlEvent) -> bool { match self.control_event_to_gamepad_control(event) { GamepadControl::ButtonPress(button) => { let gamepad_button = GamepadButton { gamepad_id, button }; self.just_released.contains(&gamepad_button) }, _ => false } } pub fn controller_events(&self) -> &Vec<ControllerEvent> { &self.controller_events } pub fn ingest_gamepad_events(&mut self, controller_subsystem: &sdl2::GameControllerSubsystem, gamepad_events: Vec<Event>) { self.config_manager.update(); self.controller_events.clear(); self.just_released.clear(); self.just_pressed.clear(); for event in gamepad_events.into_iter() { match event { Event::ControllerDeviceAdded { which, .. } => { if let Ok(game_controller) = controller_subsystem.open(which) { let gamepad_id = GamepadId::from_u32(game_controller.instance_id() as u32); self.gamepads.insert(gamepad_id, game_controller); self.controller_events.push(ControllerEvent::GamepadConnected(gamepad_id)); } else { println!("Couldn't open gamepad {}", which); } }, Event::ControllerDeviceRemoved { which, .. } => { let gamepad_id = GamepadId::from_u32(which); self.gamepads.remove(&gamepad_id); self.controller_events.push(ControllerEvent::GamepadDisconnected(gamepad_id)); }, Event::ControllerButtonDown { which, button, .. } => { let gamepad_id = GamepadId::from_u32(which); let gamepad_button = GamepadButton { gamepad_id, button }; self.currently_pressed.insert(gamepad_button); self.just_pressed.insert(gamepad_button); }, Event::ControllerButtonUp { which, button, .. } => { let gamepad_id = GamepadId::from_u32(which); let gamepad_button = GamepadButton { gamepad_id, button }; self.currently_pressed.remove(&gamepad_button); self.just_released.insert(gamepad_button); }, Event::ControllerAxisMotion { which, axis, value, .. } => { let gamepad_id = GamepadId::from_u32(which); let gamepad_axis = GamepadAxis { gamepad_id, axis }; let normalized_value = f32::from(value) / f32::from(i16::max_value()); self.axes.insert(gamepad_axis, normalized_value); } _ => {} } } } fn control_event_to_gamepad_control(&self, event: ControlEvent) -> GamepadControl { let config = self.config_manager.get(); match event { ControlEvent::PlayerMove(UpDownLeftRight::Up) => GamepadControl::AxisBelowThreshold(sdl2::controller::Axis::LeftY, -config.axis_threshold), ControlEvent::PlayerMove(UpDownLeftRight::Down) => GamepadControl::AxisAboveThreshold(sdl2::controller::Axis::LeftY, config.axis_threshold), ControlEvent::PlayerMove(UpDownLeftRight::Left) => GamepadControl::AxisBelowThreshold(sdl2::controller::Axis::LeftX, -config.axis_threshold), ControlEvent::PlayerMove(UpDownLeftRight::Right) => GamepadControl::AxisAboveThreshold(sdl2::controller::Axis::LeftX, config.axis_threshold), ControlEvent::PlayerFireWeapon => GamepadControl::ButtonPress(sdl2::controller::Button::A), ControlEvent::PlayerFireSpecial => GamepadControl::ButtonPress(sdl2::controller::Button::B), ControlEvent::PlayerSwitchHero => GamepadControl::ButtonPress(sdl2::controller::Button::Y), ControlEvent::RedeployEntities => GamepadControl::ButtonPress(sdl2::controller::Button::Back), } } } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] struct GamepadButton { gamepad_id: GamepadId, button: sdl2::controller::Button, } #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] struct GamepadAxis { gamepad_id: GamepadId, axis: sdl2::controller::Axis, } enum GamepadControl { ButtonPress(sdl2::controller::Button), AxisAboveThreshold(sdl2::controller::Axis, f32), AxisBelowThreshold(sdl2::controller::Axis, f32), }
38.651282
153
0.557516
dd223bfba52187a971797869b88dc1f7519e76c2
5,171
#[doc = "Register `EVENTS_FIELDLOST` reader"] pub struct R(crate::R<EVENTS_FIELDLOST_SPEC>); impl core::ops::Deref for R { type Target = crate::R<EVENTS_FIELDLOST_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<EVENTS_FIELDLOST_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<EVENTS_FIELDLOST_SPEC>) -> Self { R(reader) } } #[doc = "Register `EVENTS_FIELDLOST` writer"] pub struct W(crate::W<EVENTS_FIELDLOST_SPEC>); impl core::ops::Deref for W { type Target = crate::W<EVENTS_FIELDLOST_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<EVENTS_FIELDLOST_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<EVENTS_FIELDLOST_SPEC>) -> Self { W(writer) } } #[doc = "Remote NFC field lost\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum EVENTS_FIELDLOST_A { #[doc = "0: Event not generated"] NOTGENERATED = 0, #[doc = "1: Event generated"] GENERATED = 1, } impl From<EVENTS_FIELDLOST_A> for bool { #[inline(always)] fn from(variant: EVENTS_FIELDLOST_A) -> Self { variant as u8 != 0 } } #[doc = "Field `EVENTS_FIELDLOST` reader - Remote NFC field lost"] pub struct EVENTS_FIELDLOST_R(crate::FieldReader<bool, EVENTS_FIELDLOST_A>); impl EVENTS_FIELDLOST_R { pub(crate) fn new(bits: bool) -> Self { EVENTS_FIELDLOST_R(crate::FieldReader::new(bits)) } #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> EVENTS_FIELDLOST_A { match self.bits { false => EVENTS_FIELDLOST_A::NOTGENERATED, true => EVENTS_FIELDLOST_A::GENERATED, } } #[doc = "Checks if the value of the field is `NOTGENERATED`"] #[inline(always)] pub fn is_not_generated(&self) -> bool { **self == EVENTS_FIELDLOST_A::NOTGENERATED } #[doc = "Checks if the value of the field is `GENERATED`"] #[inline(always)] pub fn is_generated(&self) -> bool { **self == EVENTS_FIELDLOST_A::GENERATED } } impl core::ops::Deref for EVENTS_FIELDLOST_R { type Target = crate::FieldReader<bool, EVENTS_FIELDLOST_A>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `EVENTS_FIELDLOST` writer - Remote NFC field lost"] pub struct EVENTS_FIELDLOST_W<'a> { w: &'a mut W, } impl<'a> EVENTS_FIELDLOST_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: EVENTS_FIELDLOST_A) -> &'a mut W { self.bit(variant.into()) } #[doc = "Event not generated"] #[inline(always)] pub fn not_generated(self) -> &'a mut W { self.variant(EVENTS_FIELDLOST_A::NOTGENERATED) } #[doc = "Event generated"] #[inline(always)] pub fn generated(self) -> &'a mut W { self.variant(EVENTS_FIELDLOST_A::GENERATED) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01); self.w } } impl R { #[doc = "Bit 0 - Remote NFC field lost"] #[inline(always)] pub fn events_fieldlost(&self) -> EVENTS_FIELDLOST_R { EVENTS_FIELDLOST_R::new((self.bits & 0x01) != 0) } } impl W { #[doc = "Bit 0 - Remote NFC field lost"] #[inline(always)] pub fn events_fieldlost(&mut self) -> EVENTS_FIELDLOST_W { EVENTS_FIELDLOST_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Remote NFC field lost\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [events_fieldlost](index.html) module"] pub struct EVENTS_FIELDLOST_SPEC; impl crate::RegisterSpec for EVENTS_FIELDLOST_SPEC { type Ux = u32; } #[doc = "`read()` method returns [events_fieldlost::R](R) reader structure"] impl crate::Readable for EVENTS_FIELDLOST_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [events_fieldlost::W](W) writer structure"] impl crate::Writable for EVENTS_FIELDLOST_SPEC { type Writer = W; } #[doc = "`reset()` method sets EVENTS_FIELDLOST to value 0"] impl crate::Resettable for EVENTS_FIELDLOST_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
32.31875
418
0.62367
5db1c4375a589c79fe4a9a4ed96abcd776b0cabf
702
#![no_std] #![feature(alloc_error_handler)] extern crate alloc; use js_ffi::*; struct Test { fn_log: f64, } impl Default for Test { fn default() -> Self { Test { fn_log: register("console.log"), } } } impl Test { fn log(&self, s: &str) { call_1(UNDEFINED, self.fn_log, TYPE_STRING, to_js_string(s)); } } #[no_mangle] pub fn main() -> () { let t = globals::get::<Test>().lock(); t.log("hello world") } #[global_allocator] static ALLOCATOR:malloc::Allocator = malloc::Allocator; #[panic_handler] fn panic(_info: &core::panic::PanicInfo) -> ! { loop {} } #[alloc_error_handler] fn oom(_: core::alloc::Layout) -> ! { loop {} }
16.714286
69
0.58547
ff56764cebd19c6ce1443c2b0d1011734c4dad50
2,365
//! This module acts as a switchboard to access different repositories managed by this //! page server. use crate::branches; use crate::layered_repository::LayeredRepository; use crate::repository::Repository; use crate::walredo::PostgresRedoManager; use crate::PageServerConf; use anyhow::{anyhow, bail, Result}; use lazy_static::lazy_static; use log::info; use std::collections::HashMap; use std::fs; use std::str::FromStr; use std::sync::{Arc, Mutex}; use zenith_utils::zid::ZTenantId; lazy_static! { pub static ref REPOSITORY: Mutex<HashMap<ZTenantId, Arc<dyn Repository>>> = Mutex::new(HashMap::new()); } pub fn init(conf: &'static PageServerConf) { let mut m = REPOSITORY.lock().unwrap(); for dir_entry in fs::read_dir(conf.tenants_path()).unwrap() { let tenantid = ZTenantId::from_str(dir_entry.unwrap().file_name().to_str().unwrap()).unwrap(); // Set up a WAL redo manager, for applying WAL records. let walredo_mgr = PostgresRedoManager::new(conf, tenantid); // Set up an object repository, for actual data storage. let repo = Arc::new(LayeredRepository::new( conf, Arc::new(walredo_mgr), tenantid, )); LayeredRepository::launch_checkpointer_thread(conf, repo.clone()); info!("initialized storage for tenant: {}", &tenantid); m.insert(tenantid, repo); } } pub fn create_repository_for_tenant( conf: &'static PageServerConf, tenantid: ZTenantId, ) -> Result<()> { let mut m = REPOSITORY.lock().unwrap(); // First check that the tenant doesn't exist already if m.get(&tenantid).is_some() { bail!("tenant {} already exists", tenantid); } let wal_redo_manager = Arc::new(PostgresRedoManager::new(conf, tenantid)); let repo = branches::create_repo(conf, tenantid, wal_redo_manager)?; m.insert(tenantid, repo); Ok(()) } pub fn insert_repository_for_tenant(tenantid: ZTenantId, repo: Arc<dyn Repository>) { let o = &mut REPOSITORY.lock().unwrap(); o.insert(tenantid, repo); } pub fn get_repository_for_tenant(tenantid: &ZTenantId) -> Result<Arc<dyn Repository>> { let o = &REPOSITORY.lock().unwrap(); o.get(tenantid) .map(|repo| Arc::clone(repo)) .ok_or_else(|| anyhow!("repository not found for tenant name {}", tenantid)) }
31.533333
91
0.666808
7ad01e10e8b6130622cee0218d610706178e3fbf
11,163
// Imported from c0dearm/sharks https://github.com/c0dearm/sharks/blob/master/src/field.rs // // Basic operations overrided for the Galois Field 256 (2**8) // Uses pre-calculated tables for 0x11d primitive polynomial (x**8 + x**4 + x**3 + x**2 + 1) use core::{ iter::{Product, Sum}, ops::{Add, Div, Mul, Sub}, }; pub const ORDER: usize = 256; pub const ROOT: u8 = 0x02; // LOG_TABLE[i] = log_{alpha} i, the degree of power over alpha // LOG_TABLE[0] is virtually 0 for simplicity const LOG_TABLE: [u8; 256] = [ 0x00, 0x00, 0x01, 0x19, 0x02, 0x32, 0x1a, 0xc6, 0x03, 0xdf, 0x33, 0xee, 0x1b, 0x68, 0xc7, 0x4b, 0x04, 0x64, 0xe0, 0x0e, 0x34, 0x8d, 0xef, 0x81, 0x1c, 0xc1, 0x69, 0xf8, 0xc8, 0x08, 0x4c, 0x71, 0x05, 0x8a, 0x65, 0x2f, 0xe1, 0x24, 0x0f, 0x21, 0x35, 0x93, 0x8e, 0xda, 0xf0, 0x12, 0x82, 0x45, 0x1d, 0xb5, 0xc2, 0x7d, 0x6a, 0x27, 0xf9, 0xb9, 0xc9, 0x9a, 0x09, 0x78, 0x4d, 0xe4, 0x72, 0xa6, 0x06, 0xbf, 0x8b, 0x62, 0x66, 0xdd, 0x30, 0xfd, 0xe2, 0x98, 0x25, 0xb3, 0x10, 0x91, 0x22, 0x88, 0x36, 0xd0, 0x94, 0xce, 0x8f, 0x96, 0xdb, 0xbd, 0xf1, 0xd2, 0x13, 0x5c, 0x83, 0x38, 0x46, 0x40, 0x1e, 0x42, 0xb6, 0xa3, 0xc3, 0x48, 0x7e, 0x6e, 0x6b, 0x3a, 0x28, 0x54, 0xfa, 0x85, 0xba, 0x3d, 0xca, 0x5e, 0x9b, 0x9f, 0x0a, 0x15, 0x79, 0x2b, 0x4e, 0xd4, 0xe5, 0xac, 0x73, 0xf3, 0xa7, 0x57, 0x07, 0x70, 0xc0, 0xf7, 0x8c, 0x80, 0x63, 0x0d, 0x67, 0x4a, 0xde, 0xed, 0x31, 0xc5, 0xfe, 0x18, 0xe3, 0xa5, 0x99, 0x77, 0x26, 0xb8, 0xb4, 0x7c, 0x11, 0x44, 0x92, 0xd9, 0x23, 0x20, 0x89, 0x2e, 0x37, 0x3f, 0xd1, 0x5b, 0x95, 0xbc, 0xcf, 0xcd, 0x90, 0x87, 0x97, 0xb2, 0xdc, 0xfc, 0xbe, 0x61, 0xf2, 0x56, 0xd3, 0xab, 0x14, 0x2a, 0x5d, 0x9e, 0x84, 0x3c, 0x39, 0x53, 0x47, 0x6d, 0x41, 0xa2, 0x1f, 0x2d, 0x43, 0xd8, 0xb7, 0x7b, 0xa4, 0x76, 0xc4, 0x17, 0x49, 0xec, 0x7f, 0x0c, 0x6f, 0xf6, 0x6c, 0xa1, 0x3b, 0x52, 0x29, 0x9d, 0x55, 0xaa, 0xfb, 0x60, 0x86, 0xb1, 0xbb, 0xcc, 0x3e, 0x5a, 0xcb, 0x59, 0x5f, 0xb0, 0x9c, 0xa9, 0xa0, 0x51, 0x0b, 0xf5, 0x16, 0xeb, 0x7a, 0x75, 0x2c, 0xd7, 0x4f, 0xae, 0xd5, 0xe9, 0xe6, 0xe7, 0xad, 0xe8, 0x74, 0xd6, 0xf4, 0xea, 0xa8, 0x50, 0x58, 0xaf, ]; // EXP_TABLE[i] = alpha^i, the i-th power of alpha const EXP_TABLE: [u8; 255] = [ 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1d, 0x3a, 0x74, 0xe8, 0xcd, 0x87, 0x13, 0x26, 0x4c, 0x98, 0x2d, 0x5a, 0xb4, 0x75, 0xea, 0xc9, 0x8f, 0x03, 0x06, 0x0c, 0x18, 0x30, 0x60, 0xc0, 0x9d, 0x27, 0x4e, 0x9c, 0x25, 0x4a, 0x94, 0x35, 0x6a, 0xd4, 0xb5, 0x77, 0xee, 0xc1, 0x9f, 0x23, 0x46, 0x8c, 0x05, 0x0a, 0x14, 0x28, 0x50, 0xa0, 0x5d, 0xba, 0x69, 0xd2, 0xb9, 0x6f, 0xde, 0xa1, 0x5f, 0xbe, 0x61, 0xc2, 0x99, 0x2f, 0x5e, 0xbc, 0x65, 0xca, 0x89, 0x0f, 0x1e, 0x3c, 0x78, 0xf0, 0xfd, 0xe7, 0xd3, 0xbb, 0x6b, 0xd6, 0xb1, 0x7f, 0xfe, 0xe1, 0xdf, 0xa3, 0x5b, 0xb6, 0x71, 0xe2, 0xd9, 0xaf, 0x43, 0x86, 0x11, 0x22, 0x44, 0x88, 0x0d, 0x1a, 0x34, 0x68, 0xd0, 0xbd, 0x67, 0xce, 0x81, 0x1f, 0x3e, 0x7c, 0xf8, 0xed, 0xc7, 0x93, 0x3b, 0x76, 0xec, 0xc5, 0x97, 0x33, 0x66, 0xcc, 0x85, 0x17, 0x2e, 0x5c, 0xb8, 0x6d, 0xda, 0xa9, 0x4f, 0x9e, 0x21, 0x42, 0x84, 0x15, 0x2a, 0x54, 0xa8, 0x4d, 0x9a, 0x29, 0x52, 0xa4, 0x55, 0xaa, 0x49, 0x92, 0x39, 0x72, 0xe4, 0xd5, 0xb7, 0x73, 0xe6, 0xd1, 0xbf, 0x63, 0xc6, 0x91, 0x3f, 0x7e, 0xfc, 0xe5, 0xd7, 0xb3, 0x7b, 0xf6, 0xf1, 0xff, 0xe3, 0xdb, 0xab, 0x4b, 0x96, 0x31, 0x62, 0xc4, 0x95, 0x37, 0x6e, 0xdc, 0xa5, 0x57, 0xae, 0x41, 0x82, 0x19, 0x32, 0x64, 0xc8, 0x8d, 0x07, 0x0e, 0x1c, 0x38, 0x70, 0xe0, 0xdd, 0xa7, 0x53, 0xa6, 0x51, 0xa2, 0x59, 0xb2, 0x79, 0xf2, 0xf9, 0xef, 0xc3, 0x9b, 0x2b, 0x56, 0xac, 0x45, 0x8a, 0x09, 0x12, 0x24, 0x48, 0x90, 0x3d, 0x7a, 0xf4, 0xf5, 0xf7, 0xf3, 0xfb, 0xeb, 0xcb, 0x8b, 0x0b, 0x16, 0x2c, 0x58, 0xb0, 0x7d, 0xfa, 0xe9, 0xcf, 0x83, 0x1b, 0x36, 0x6c, 0xd8, 0xad, 0x47, 0x8e, ]; #[derive(Debug, PartialEq, Clone, Copy)] pub struct GF256(pub u8); #[allow(clippy::suspicious_arithmetic_impl)] impl Add for GF256 { type Output = GF256; fn add(self, other: Self) -> Self::Output { Self(self.0 ^ other.0) } } #[allow(clippy::suspicious_arithmetic_impl)] impl Sub for GF256 { type Output = Self; fn sub(self, other: Self) -> Self::Output { Self(self.0 ^ other.0) } } impl Mul for GF256 { type Output = Self; fn mul(self, other: Self) -> Self::Output { if self.0 == 0 || other.0 == 0 { Self(0) } else { let log_x = LOG_TABLE[self.0 as usize] as usize; let log_y = LOG_TABLE[other.0 as usize] as usize; Self(EXP_TABLE[(log_x + log_y) % (ORDER - 1)]) } } } impl Div for GF256 { type Output = Self; fn div(self, other: Self) -> Self::Output { if self.0 == 0 { Self(0) } else { let log_x = LOG_TABLE[self.0 as usize] as usize; let log_y = LOG_TABLE[other.0 as usize] as usize; Self(EXP_TABLE[((ORDER - 1) + log_x - log_y) % (ORDER - 1)]) } } } impl Sum for GF256 { fn sum<I: Iterator<Item = Self>>(iter: I) -> Self { iter.fold(Self(0), |acc, x| acc + x) } } impl Product for GF256 { fn product<I: Iterator<Item = Self>>(iter: I) -> Self { iter.fold(Self(1), |acc, x| acc * x) } } impl GF256 { pub fn pow(self, exp: isize) -> Self { if self.0 == 0 { Self(0) } else { let log_x = LOG_TABLE[self.0 as usize] as usize; if exp > 0 { Self(EXP_TABLE[(log_x * (exp as usize % (ORDER - 1))) % (ORDER - 1)]) } else { let exp_abs = exp.abs() as usize; let p = (ORDER - 1) - exp_abs % (ORDER - 1); Self(EXP_TABLE[(p * log_x) % (ORDER - 1)]) } } } } #[cfg(test)] mod tests { use super::{EXP_TABLE, GF256, LOG_TABLE, ORDER}; // use alloc::vec; #[test] fn add_works() { let answers: [u8; 256] = [ 1, 2, 5, 17, 18, 18, 90, 70, 30, 229, 71, 6, 214, 239, 212, 109, 72, 252, 205, 84, 128, 248, 5, 72, 147, 194, 111, 244, 208, 56, 44, 177, 152, 173, 43, 179, 196, 110, 155, 20, 95, 71, 59, 173, 30, 211, 29, 102, 91, 57, 199, 119, 126, 15, 169, 25, 148, 32, 96, 170, 244, 139, 172, 7, 89, 1, 234, 160, 255, 242, 110, 65, 135, 82, 172, 188, 14, 173, 90, 120, 203, 55, 71, 117, 228, 64, 106, 194, 15, 51, 204, 255, 216, 142, 55, 162, 199, 237, 245, 37, 210, 106, 58, 230, 102, 32, 28, 60, 42, 56, 221, 243, 75, 65, 165, 227, 242, 248, 190, 184, 117, 162, 9, 105, 228, 192, 193, 155, 130, 103, 238, 171, 52, 237, 185, 164, 40, 212, 255, 175, 181, 208, 212, 76, 75, 232, 3, 94, 116, 28, 225, 214, 88, 214, 171, 171, 199, 245, 62, 93, 209, 238, 110, 56, 83, 45, 240, 179, 108, 98, 64, 1, 167, 10, 79, 158, 17, 141, 120, 224, 130, 27, 63, 90, 17, 11, 87, 143, 226, 58, 239, 227, 157, 52, 113, 188, 127, 246, 163, 120, 216, 47, 57, 12, 162, 171, 60, 80, 61, 3, 98, 224, 80, 111, 172, 69, 56, 251, 173, 231, 23, 137, 180, 83, 217, 125, 23, 32, 161, 211, 84, 164, 252, 6, 237, 0, 177, 254, 39, 193, 99, 246, 101, 148, 28, 14, 98, 107, 111, 224, 152, 50, 5, 23, 214, 174, ]; // log i + alpha^i for i = 1..255, where alpha^255 = 1 // alpha_i for i=0 for (i, a) in answers.iter().enumerate() { assert_eq!( (GF256(LOG_TABLE[i]) + GF256(EXP_TABLE[i % (ORDER - 1)])).0, *a ); } } #[test] fn sub_works() { add_works(); } #[test] fn mul_works() { let answers: [u8; 256] = [ 0, 0, 4, 200, 32, 14, 206, 179, 39, 134, 169, 160, 32, 59, 184, 50, 45, 121, 69, 43, 102, 43, 139, 169, 18, 94, 107, 84, 18, 157, 159, 51, 211, 1, 52, 13, 51, 128, 31, 219, 240, 230, 212, 219, 197, 19, 11, 135, 93, 163, 237, 53, 91, 177, 135, 124, 240, 224, 6, 158, 167, 155, 155, 38, 223, 144, 70, 54, 50, 45, 134, 170, 126, 223, 103, 207, 253, 176, 75, 98, 137, 87, 59, 50, 208, 116, 29, 200, 128, 82, 13, 138, 107, 53, 42, 34, 123, 203, 65, 174, 111, 101, 19, 78, 165, 62, 115, 108, 175, 139, 126, 107, 55, 196, 30, 209, 126, 8, 15, 211, 57, 191, 37, 254, 24, 136, 30, 111, 188, 30, 209, 208, 49, 132, 181, 22, 207, 241, 28, 2, 97, 58, 244, 179, 190, 120, 249, 174, 99, 6, 215, 232, 173, 1, 20, 216, 224, 191, 247, 78, 223, 101, 153, 1, 182, 203, 213, 75, 132, 98, 53, 204, 13, 177, 22, 88, 218, 21, 32, 68, 247, 153, 11, 190, 47, 128, 214, 33, 110, 194, 102, 77, 5, 178, 74, 65, 134, 62, 91, 190, 133, 15, 134, 94, 37, 247, 205, 51, 224, 152, 15, 13, 13, 233, 189, 206, 100, 131, 222, 5, 70, 182, 231, 176, 167, 150, 156, 249, 29, 189, 96, 149, 239, 162, 43, 239, 89, 8, 9, 57, 118, 227, 168, 243, 164, 188, 125, 8, 8, 240, 36, 45, 21, 20, 44, 175, ]; // log i * alpha^i for i = 1..255, where alpha^255 = 1 // 0 * alpha^{-1} for i=0 for (i, a) in answers.iter().enumerate() { assert_eq!( (GF256(LOG_TABLE[i]) * GF256(EXP_TABLE[i % (ORDER - 1)])).0, *a ); } } #[test] fn div_works() { let answers: [u8; 256] = [ 0, 0, 71, 174, 173, 87, 134, 213, 152, 231, 124, 39, 203, 113, 13, 198, 88, 171, 55, 150, 177, 227, 25, 225, 227, 180, 157, 225, 252, 122, 88, 161, 45, 87, 148, 78, 40, 165, 74, 134, 142, 120, 121, 163, 156, 75, 154, 241, 239, 27, 152, 130, 125, 235, 230, 32, 138, 225, 145, 90, 214, 226, 182, 168, 155, 175, 179, 124, 105, 169, 249, 58, 201, 14, 155, 217, 196, 254, 201, 143, 229, 12, 178, 24, 100, 226, 163, 234, 177, 36, 75, 106, 114, 208, 162, 63, 235, 181, 108, 131, 248, 51, 190, 187, 235, 115, 112, 37, 79, 90, 112, 237, 195, 121, 136, 110, 174, 143, 113, 134, 229, 255, 35, 175, 156, 208, 240, 222, 94, 202, 228, 34, 123, 23, 48, 18, 122, 114, 75, 243, 212, 139, 56, 132, 157, 119, 219, 170, 236, 11, 51, 86, 224, 221, 142, 200, 154, 136, 179, 72, 3, 32, 142, 149, 180, 209, 253, 17, 210, 134, 162, 106, 38, 108, 154, 154, 74, 181, 115, 142, 204, 195, 23, 162, 178, 41, 9, 90, 190, 14, 2, 45, 227, 253, 115, 93, 155, 244, 83, 219, 11, 196, 167, 241, 33, 60, 103, 69, 181, 189, 145, 130, 174, 137, 65, 65, 45, 153, 79, 236, 199, 209, 41, 10, 205, 44, 182, 38, 222, 209, 253, 247, 64, 71, 32, 1, 27, 53, 4, 110, 170, 221, 215, 4, 179, 163, 64, 90, 152, 163, 235, 6, 41, 93, 176, 175, ]; // log i / alpha^i for i = 1..255, where alpha^255 = 1 // 0 * alpha^{-1} for i=0 for (i, a) in answers.iter().enumerate() { assert_eq!( (GF256(LOG_TABLE[i]) / GF256(EXP_TABLE[i % (ORDER - 1)])).0, *a ); } } #[test] fn sum_works() { let values = vec![GF256(0x53), GF256(0xCA), GF256(0)]; assert_eq!(values.into_iter().sum::<GF256>().0, 0x99); } #[test] fn product_works() { let values = vec![GF256(1), GF256(1), GF256(4)]; assert_eq!(values.into_iter().product::<GF256>().0, 4); } #[test] fn power_works() { let answers = vec![ vec![GF256(0), GF256(1), GF256(1), GF256(1), GF256(1)], vec![GF256(0), GF256(1), GF256(2), GF256(3), GF256(4)], vec![GF256(0), GF256(1), GF256(4), GF256(5), GF256(16)], ]; answers.iter().enumerate().for_each(|(i, v)| { v.iter() .enumerate() .for_each(|(j, u)| assert_eq!(GF256(j as u8).pow(i as isize), *u)); }); assert_eq!(GF256(4).pow(-1), GF256(71)); assert_eq!(GF256(4) * GF256(71), GF256(1)); } }
43.605469
99
0.570635
ddb0f1c111a4fdb58fe8d2b11b8fac704b37f6eb
34,883
//! The Sapling circuits. use ff::{Field, PrimeField, PrimeFieldRepr}; use bellman::{Circuit, ConstraintSystem, SynthesisError}; use zcash_primitives::jubjub::{FixedGenerators, JubjubEngine}; use zcash_primitives::constants; use zcash_primitives::primitives::{PaymentAddress, ProofGenerationKey, ValueCommitment}; use super::ecc; use super::pedersen_hash; use bellman::gadgets::blake2s; use bellman::gadgets::boolean; use bellman::gadgets::multipack; use bellman::gadgets::num; use bellman::gadgets::Assignment; pub const TREE_DEPTH: usize = zcash_primitives::sapling::SAPLING_COMMITMENT_TREE_DEPTH; /// This is an instance of the `Spend` circuit. pub struct Spend<'a, E: JubjubEngine> { pub params: &'a E::Params, /// Pedersen commitment to the value being spent pub value_commitment: Option<ValueCommitment<E>>, /// Key required to construct proofs for spending notes /// for a particular spending key pub proof_generation_key: Option<ProofGenerationKey<E>>, /// The payment address associated with the note pub payment_address: Option<PaymentAddress<E>>, /// The randomness of the note commitment pub commitment_randomness: Option<E::Fs>, /// Re-randomization of the public key pub ar: Option<E::Fs>, /// The authentication path of the commitment in the tree pub auth_path: Vec<Option<(E::Fr, bool)>>, /// The anchor; the root of the tree. If the note being /// spent is zero-value, this can be anything. pub anchor: Option<E::Fr>, } /// This is an output circuit instance. pub struct Output<'a, E: JubjubEngine> { pub params: &'a E::Params, /// Pedersen commitment to the value being spent pub value_commitment: Option<ValueCommitment<E>>, /// The payment address of the recipient pub payment_address: Option<PaymentAddress<E>>, /// The randomness used to hide the note commitment data pub commitment_randomness: Option<E::Fs>, /// The ephemeral secret key for DH with recipient pub esk: Option<E::Fs>, } /// Exposes a Pedersen commitment to the value as an /// input to the circuit fn expose_value_commitment<E, CS>( mut cs: CS, value_commitment: Option<ValueCommitment<E>>, params: &E::Params, ) -> Result<Vec<boolean::Boolean>, SynthesisError> where E: JubjubEngine, CS: ConstraintSystem<E>, { // Booleanize the value into little-endian bit order let value_bits = boolean::u64_into_boolean_vec_le( cs.namespace(|| "value"), value_commitment.as_ref().map(|c| c.value), )?; // Compute the note value in the exponent let value = ecc::fixed_base_multiplication( cs.namespace(|| "compute the value in the exponent"), FixedGenerators::ValueCommitmentValue, &value_bits, params, )?; // Booleanize the randomness. This does not ensure // the bit representation is "in the field" because // it doesn't matter for security. let rcv = boolean::field_into_boolean_vec_le( cs.namespace(|| "rcv"), value_commitment.as_ref().map(|c| c.randomness), )?; // Compute the randomness in the exponent let rcv = ecc::fixed_base_multiplication( cs.namespace(|| "computation of rcv"), FixedGenerators::ValueCommitmentRandomness, &rcv, params, )?; // Compute the Pedersen commitment to the value let cv = value.add(cs.namespace(|| "computation of cv"), &rcv, params)?; // Expose the commitment as an input to the circuit cv.inputize(cs.namespace(|| "commitment point"))?; Ok(value_bits) } impl<'a, E: JubjubEngine> Circuit<E> for Spend<'a, E> { fn synthesize<CS: ConstraintSystem<E>>(self, cs: &mut CS) -> Result<(), SynthesisError> { // Prover witnesses ak (ensures that it's on the curve) let ak = ecc::EdwardsPoint::witness( cs.namespace(|| "ak"), self.proof_generation_key.as_ref().map(|k| k.ak.clone()), self.params, )?; // There are no sensible attacks on small order points // of ak (that we're aware of!) but it's a cheap check, // so we do it. ak.assert_not_small_order(cs.namespace(|| "ak not small order"), self.params)?; // Rerandomize ak and expose it as an input to the circuit { let ar = boolean::field_into_boolean_vec_le(cs.namespace(|| "ar"), self.ar)?; // Compute the randomness in the exponent let ar = ecc::fixed_base_multiplication( cs.namespace(|| "computation of randomization for the signing key"), FixedGenerators::SpendingKeyGenerator, &ar, self.params, )?; let rk = ak.add(cs.namespace(|| "computation of rk"), &ar, self.params)?; rk.inputize(cs.namespace(|| "rk"))?; } // Compute nk = [nsk] ProofGenerationKey let nk; { // Witness nsk as bits let nsk = boolean::field_into_boolean_vec_le( cs.namespace(|| "nsk"), self.proof_generation_key.as_ref().map(|k| k.nsk), )?; // NB: We don't ensure that the bit representation of nsk // is "in the field" (Fs) because it's not used except to // demonstrate the prover knows it. If they know a // congruency then that's equivalent. // Compute nk = [nsk] ProvingPublicKey nk = ecc::fixed_base_multiplication( cs.namespace(|| "computation of nk"), FixedGenerators::ProofGenerationKey, &nsk, self.params, )?; } // This is the "viewing key" preimage for CRH^ivk let mut ivk_preimage = vec![]; // Place ak in the preimage for CRH^ivk ivk_preimage.extend(ak.repr(cs.namespace(|| "representation of ak"))?); // This is the nullifier preimage for PRF^nf let mut nf_preimage = vec![]; // Extend ivk and nf preimages with the representation of // nk. { let repr_nk = nk.repr(cs.namespace(|| "representation of nk"))?; ivk_preimage.extend(repr_nk.iter().cloned()); nf_preimage.extend(repr_nk); } assert_eq!(ivk_preimage.len(), 512); assert_eq!(nf_preimage.len(), 256); // Compute the incoming viewing key ivk let mut ivk = blake2s::blake2s( cs.namespace(|| "computation of ivk"), &ivk_preimage, constants::CRH_IVK_PERSONALIZATION, )?; // drop_5 to ensure it's in the field ivk.truncate(E::Fs::CAPACITY as usize); // Witness g_d, checking that it's on the curve. let g_d = { // This binding is to avoid a weird edge case in Rust's // ownership/borrowing rules. self is partially moved // above, but the closure for and_then will have to // move self (or a reference to self) to reference // self.params, so we have to copy self.params here. let params = self.params; ecc::EdwardsPoint::witness( cs.namespace(|| "witness g_d"), self.payment_address.as_ref().and_then(|a| a.g_d(params)), self.params, )? }; // Check that g_d is not small order. Technically, this check // is already done in the Output circuit, and this proof ensures // g_d is bound to a product of that check, but for defense in // depth let's check it anyway. It's cheap. g_d.assert_not_small_order(cs.namespace(|| "g_d not small order"), self.params)?; // Compute pk_d = g_d^ivk let pk_d = g_d.mul(cs.namespace(|| "compute pk_d"), &ivk, self.params)?; // Compute note contents: // value (in big endian) followed by g_d and pk_d let mut note_contents = vec![]; // Handle the value; we'll need it later for the // dummy input check. let mut value_num = num::Num::zero(); { // Get the value in little-endian bit order let value_bits = expose_value_commitment( cs.namespace(|| "value commitment"), self.value_commitment, self.params, )?; // Compute the note's value as a linear combination // of the bits. let mut coeff = E::Fr::one(); for bit in &value_bits { value_num = value_num.add_bool_with_coeff(CS::one(), bit, coeff); coeff.double(); } // Place the value in the note note_contents.extend(value_bits); } // Place g_d in the note note_contents.extend(g_d.repr(cs.namespace(|| "representation of g_d"))?); // Place pk_d in the note note_contents.extend(pk_d.repr(cs.namespace(|| "representation of pk_d"))?); assert_eq!( note_contents.len(), 64 + // value 256 + // g_d 256 // p_d ); // Compute the hash of the note contents let mut cm = pedersen_hash::pedersen_hash( cs.namespace(|| "note content hash"), pedersen_hash::Personalization::NoteCommitment, &note_contents, self.params, )?; { // Booleanize the randomness for the note commitment let rcm = boolean::field_into_boolean_vec_le( cs.namespace(|| "rcm"), self.commitment_randomness, )?; // Compute the note commitment randomness in the exponent let rcm = ecc::fixed_base_multiplication( cs.namespace(|| "computation of commitment randomness"), FixedGenerators::NoteCommitmentRandomness, &rcm, self.params, )?; // Randomize the note commitment. Pedersen hashes are not // themselves hiding commitments. cm = cm.add( cs.namespace(|| "randomization of note commitment"), &rcm, self.params, )?; } // This will store (least significant bit first) // the position of the note in the tree, for use // in nullifier computation. let mut position_bits = vec![]; // This is an injective encoding, as cur is a // point in the prime order subgroup. let mut cur = cm.get_x().clone(); // Ascend the merkle tree authentication path for (i, e) in self.auth_path.into_iter().enumerate() { let cs = &mut cs.namespace(|| format!("merkle tree hash {}", i)); // Determines if the current subtree is the "right" leaf at this // depth of the tree. let cur_is_right = boolean::Boolean::from(boolean::AllocatedBit::alloc( cs.namespace(|| "position bit"), e.map(|e| e.1), )?); // Push this boolean for nullifier computation later position_bits.push(cur_is_right.clone()); // Witness the authentication path element adjacent // at this depth. let path_element = num::AllocatedNum::alloc(cs.namespace(|| "path element"), || Ok(e.get()?.0))?; // Swap the two if the current subtree is on the right let (xl, xr) = num::AllocatedNum::conditionally_reverse( cs.namespace(|| "conditional reversal of preimage"), &cur, &path_element, &cur_is_right, )?; // We don't need to be strict, because the function is // collision-resistant. If the prover witnesses a congruency, // they will be unable to find an authentication path in the // tree with high probability. let mut preimage = vec![]; preimage.extend(xl.to_bits_le(cs.namespace(|| "xl into bits"))?); preimage.extend(xr.to_bits_le(cs.namespace(|| "xr into bits"))?); // Compute the new subtree value cur = pedersen_hash::pedersen_hash( cs.namespace(|| "computation of pedersen hash"), pedersen_hash::Personalization::MerkleTree(i), &preimage, self.params, )? .get_x() .clone(); // Injective encoding } { let real_anchor_value = self.anchor; // Allocate the "real" anchor that will be exposed. let rt = num::AllocatedNum::alloc(cs.namespace(|| "conditional anchor"), || { Ok(*real_anchor_value.get()?) })?; // (cur - rt) * value = 0 // if value is zero, cur and rt can be different // if value is nonzero, they must be equal cs.enforce( || "conditionally enforce correct root", |lc| lc + cur.get_variable() - rt.get_variable(), |lc| lc + &value_num.lc(E::Fr::one()), |lc| lc, ); // Expose the anchor rt.inputize(cs.namespace(|| "anchor"))?; } // Compute the cm + g^position for preventing // faerie gold attacks let mut rho = cm; { // Compute the position in the exponent let position = ecc::fixed_base_multiplication( cs.namespace(|| "g^position"), FixedGenerators::NullifierPosition, &position_bits, self.params, )?; // Add the position to the commitment rho = rho.add( cs.namespace(|| "faerie gold prevention"), &position, self.params, )?; } // Let's compute nf = BLAKE2s(nk || rho) nf_preimage.extend(rho.repr(cs.namespace(|| "representation of rho"))?); assert_eq!(nf_preimage.len(), 512); // Compute nf let nf = blake2s::blake2s( cs.namespace(|| "nf computation"), &nf_preimage, constants::PRF_NF_PERSONALIZATION, )?; multipack::pack_into_inputs(cs.namespace(|| "pack nullifier"), &nf) } } impl<'a, E: JubjubEngine> Circuit<E> for Output<'a, E> { fn synthesize<CS: ConstraintSystem<E>>(self, cs: &mut CS) -> Result<(), SynthesisError> { // Let's start to construct our note, which contains // value (big endian) let mut note_contents = vec![]; // Expose the value commitment and place the value // in the note. note_contents.extend(expose_value_commitment( cs.namespace(|| "value commitment"), self.value_commitment, self.params, )?); // Let's deal with g_d { let params = self.params; // Prover witnesses g_d, ensuring it's on the // curve. let g_d = ecc::EdwardsPoint::witness( cs.namespace(|| "witness g_d"), self.payment_address.as_ref().and_then(|a| a.g_d(params)), self.params, )?; // g_d is ensured to be large order. The relationship // between g_d and pk_d ultimately binds ivk to the // note. If this were a small order point, it would // not do this correctly, and the prover could // double-spend by finding random ivk's that satisfy // the relationship. // // Further, if it were small order, epk would be // small order too! g_d.assert_not_small_order(cs.namespace(|| "g_d not small order"), self.params)?; // Extend our note contents with the representation of // g_d. note_contents.extend(g_d.repr(cs.namespace(|| "representation of g_d"))?); // Booleanize our ephemeral secret key let esk = boolean::field_into_boolean_vec_le(cs.namespace(|| "esk"), self.esk)?; // Create the ephemeral public key from g_d. let epk = g_d.mul(cs.namespace(|| "epk computation"), &esk, self.params)?; // Expose epk publicly. epk.inputize(cs.namespace(|| "epk"))?; } // Now let's deal with pk_d. We don't do any checks and // essentially allow the prover to witness any 256 bits // they would like. { // Just grab pk_d from the witness let pk_d = self.payment_address.as_ref().map(|e| e.pk_d().to_xy()); // Witness the y-coordinate, encoded as little // endian bits (to match the representation) let y_contents = boolean::field_into_boolean_vec_le( cs.namespace(|| "pk_d bits of y"), pk_d.map(|e| e.1), )?; // Witness the sign bit let sign_bit = boolean::Boolean::from(boolean::AllocatedBit::alloc( cs.namespace(|| "pk_d bit of x"), pk_d.map(|e| e.0.into_repr().is_odd()), )?); // Extend the note with pk_d representation note_contents.extend(y_contents); note_contents.push(sign_bit); } assert_eq!( note_contents.len(), 64 + // value 256 + // g_d 256 // pk_d ); // Compute the hash of the note contents let mut cm = pedersen_hash::pedersen_hash( cs.namespace(|| "note content hash"), pedersen_hash::Personalization::NoteCommitment, &note_contents, self.params, )?; { // Booleanize the randomness let rcm = boolean::field_into_boolean_vec_le( cs.namespace(|| "rcm"), self.commitment_randomness, )?; // Compute the note commitment randomness in the exponent let rcm = ecc::fixed_base_multiplication( cs.namespace(|| "computation of commitment randomness"), FixedGenerators::NoteCommitmentRandomness, &rcm, self.params, )?; // Randomize our note commitment cm = cm.add( cs.namespace(|| "randomization of note commitment"), &rcm, self.params, )?; } // Only the x-coordinate of the output is revealed, // since we know it is prime order, and we know that // the x-coordinate is an injective encoding for // prime-order elements. cm.get_x().inputize(cs.namespace(|| "commitment"))?; Ok(()) } } #[test] fn test_input_circuit_with_bls12_381() { use bellman::gadgets::test::*; use ff::{BitIterator, Field}; use pairing::bls12_381::*; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::{ jubjub::{edwards, fs, JubjubBls12}, pedersen_hash, primitives::{Diversifier, Note, ProofGenerationKey}, }; let params = &JubjubBls12::new(); let rng = &mut XorShiftRng::from_seed([ 0x58, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let tree_depth = 32; for _ in 0..10 { let value_commitment = ValueCommitment { value: rng.next_u64(), randomness: fs::Fs::random(rng), }; let nsk = fs::Fs::random(rng); let ak = edwards::Point::rand(rng, params).mul_by_cofactor(params); let proof_generation_key = ProofGenerationKey { ak: ak.clone(), nsk: nsk.clone(), }; let viewing_key = proof_generation_key.to_viewing_key(params); let payment_address; loop { let diversifier = { let mut d = [0; 11]; rng.fill_bytes(&mut d); Diversifier(d) }; if let Some(p) = viewing_key.to_payment_address(diversifier, params) { payment_address = p; break; } } let g_d = payment_address.diversifier().g_d(params).unwrap(); let commitment_randomness = fs::Fs::random(rng); let auth_path = vec![Some((Fr::random(rng), rng.next_u32() % 2 != 0)); tree_depth]; let ar = fs::Fs::random(rng); { let rk = viewing_key.rk(ar, params).to_xy(); let expected_value_cm = value_commitment.cm(params).to_xy(); let note = Note { value: value_commitment.value, g_d: g_d.clone(), pk_d: payment_address.pk_d().clone(), r: commitment_randomness.clone(), }; let mut position = 0u64; let cm: Fr = note.cm(params); let mut cur = cm.clone(); for (i, val) in auth_path.clone().into_iter().enumerate() { let (uncle, b) = val.unwrap(); let mut lhs = cur; let mut rhs = uncle; if b { ::std::mem::swap(&mut lhs, &mut rhs); } let mut lhs: Vec<bool> = BitIterator::new(lhs.into_repr()).collect(); let mut rhs: Vec<bool> = BitIterator::new(rhs.into_repr()).collect(); lhs.reverse(); rhs.reverse(); cur = pedersen_hash::pedersen_hash::<Bls12, _>( pedersen_hash::Personalization::MerkleTree(i), lhs.into_iter() .take(Fr::NUM_BITS as usize) .chain(rhs.into_iter().take(Fr::NUM_BITS as usize)), params, ) .to_xy() .0; if b { position |= 1 << i; } } let expected_nf = note.nf(&viewing_key, position, params); let expected_nf = multipack::bytes_to_bits_le(&expected_nf); let expected_nf = multipack::compute_multipacking::<Bls12>(&expected_nf); assert_eq!(expected_nf.len(), 2); let mut cs = TestConstraintSystem::<Bls12>::new(); let instance = Spend { params, value_commitment: Some(value_commitment.clone()), proof_generation_key: Some(proof_generation_key.clone()), payment_address: Some(payment_address.clone()), commitment_randomness: Some(commitment_randomness), ar: Some(ar), auth_path: auth_path.clone(), anchor: Some(cur), }; instance.synthesize(&mut cs).unwrap(); assert!(cs.is_satisfied()); assert_eq!(cs.num_constraints(), 98777); assert_eq!( cs.hash(), "d37c738e83df5d9b0bb6495ac96abf21bcb2697477e2c15c2c7916ff7a3b6a89" ); assert_eq!(cs.get("randomization of note commitment/x3/num"), cm); assert_eq!(cs.num_inputs(), 8); assert_eq!(cs.get_input(0, "ONE"), Fr::one()); assert_eq!(cs.get_input(1, "rk/x/input variable"), rk.0); assert_eq!(cs.get_input(2, "rk/y/input variable"), rk.1); assert_eq!( cs.get_input(3, "value commitment/commitment point/x/input variable"), expected_value_cm.0 ); assert_eq!( cs.get_input(4, "value commitment/commitment point/y/input variable"), expected_value_cm.1 ); assert_eq!(cs.get_input(5, "anchor/input variable"), cur); assert_eq!(cs.get_input(6, "pack nullifier/input 0"), expected_nf[0]); assert_eq!(cs.get_input(7, "pack nullifier/input 1"), expected_nf[1]); } } } #[test] fn test_input_circuit_with_bls12_381_external_test_vectors() { use bellman::gadgets::test::*; use ff::{BitIterator, Field}; use pairing::bls12_381::*; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::{ jubjub::{edwards, fs, JubjubBls12}, pedersen_hash, primitives::{Diversifier, Note, ProofGenerationKey}, }; let params = &JubjubBls12::new(); let rng = &mut XorShiftRng::from_seed([ 0x59, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); let tree_depth = 32; let expected_cm_xs = vec![ "43821661663052659750276289184181083197337192946256245809816728673021647664276", "7220807656052227578299730541645543434083158611414003423211850718229633594616", "13239753550660714843257636471668037031928211668773449453628093339627668081697", "10900524635678389360790699587556574797582192824300145558807405770494079767974", "1411013767457690636461779630023011774660680126764323588543800715293173598850", "32334206652383066267661379202183359608706535021387905923603014648832344657662", "20206750741605167608500278423400565295188703622528437817438897624149653579380", "46716485782200334735478719487356079850582051575003452698983255860512578229998", "31221372899739042781372142393132358519434268512685538373976981051223051220367", "18269767207277008186871145355531741929166733260352590789136389380124992250945", ]; let expected_cm_ys = vec![ "27630722367128086497290371604583225252915685718989450292520883698391703910", "23310648738313092772044712773481584369462075017189681529702825235349449805260", "25709635353183537915646348052945798827495141780341329896098121888376871589480", "10516315852014492141081718791576479298042117442649432716255936672048164184691", "23970713991179488695004801139667700217127937225554773561645815034212389459772", "3256052161046564597126736968199320852691566092694819239485673781545479548450", "18887250722195819674378865377623103071236046274361890247643850134985809137409", "36501156873031641173054592888886902104303750771545647842488588827138867116570", "21927526310070011864833939629345235038589128172309792087590183778192091594775", "32959334601512756708397683646222389414681003290313255304927423560477040775488", ]; for i in 0..10 { let value_commitment = ValueCommitment { value: i, randomness: fs::Fs::from_str(&(1000 * (i + 1)).to_string()).unwrap(), }; let nsk = fs::Fs::random(rng); let ak = edwards::Point::rand(rng, params).mul_by_cofactor(params); let proof_generation_key = ProofGenerationKey { ak: ak.clone(), nsk: nsk.clone(), }; let viewing_key = proof_generation_key.to_viewing_key(params); let payment_address; loop { let diversifier = { let mut d = [0; 11]; rng.fill_bytes(&mut d); Diversifier(d) }; if let Some(p) = viewing_key.to_payment_address(diversifier, params) { payment_address = p; break; } } let g_d = payment_address.diversifier().g_d(params).unwrap(); let commitment_randomness = fs::Fs::random(rng); let auth_path = vec![Some((Fr::random(rng), rng.next_u32() % 2 != 0)); tree_depth]; let ar = fs::Fs::random(rng); { let rk = viewing_key.rk(ar, params).to_xy(); let expected_value_cm = value_commitment.cm(params).to_xy(); assert_eq!( expected_value_cm.0, Fr::from_str(&expected_cm_xs[i as usize]).unwrap() ); assert_eq!( expected_value_cm.1, Fr::from_str(&expected_cm_ys[i as usize]).unwrap() ); let note = Note { value: value_commitment.value, g_d: g_d.clone(), pk_d: payment_address.pk_d().clone(), r: commitment_randomness.clone(), }; let mut position = 0u64; let cm: Fr = note.cm(params); let mut cur = cm.clone(); for (i, val) in auth_path.clone().into_iter().enumerate() { let (uncle, b) = val.unwrap(); let mut lhs = cur; let mut rhs = uncle; if b { ::std::mem::swap(&mut lhs, &mut rhs); } let mut lhs: Vec<bool> = BitIterator::new(lhs.into_repr()).collect(); let mut rhs: Vec<bool> = BitIterator::new(rhs.into_repr()).collect(); lhs.reverse(); rhs.reverse(); cur = pedersen_hash::pedersen_hash::<Bls12, _>( pedersen_hash::Personalization::MerkleTree(i), lhs.into_iter() .take(Fr::NUM_BITS as usize) .chain(rhs.into_iter().take(Fr::NUM_BITS as usize)), params, ) .to_xy() .0; if b { position |= 1 << i; } } let expected_nf = note.nf(&viewing_key, position, params); let expected_nf = multipack::bytes_to_bits_le(&expected_nf); let expected_nf = multipack::compute_multipacking::<Bls12>(&expected_nf); assert_eq!(expected_nf.len(), 2); let mut cs = TestConstraintSystem::<Bls12>::new(); let instance = Spend { params: params, value_commitment: Some(value_commitment.clone()), proof_generation_key: Some(proof_generation_key.clone()), payment_address: Some(payment_address.clone()), commitment_randomness: Some(commitment_randomness), ar: Some(ar), auth_path: auth_path.clone(), anchor: Some(cur), }; instance.synthesize(&mut cs).unwrap(); assert!(cs.is_satisfied()); assert_eq!(cs.num_constraints(), 98777); assert_eq!( cs.hash(), "d37c738e83df5d9b0bb6495ac96abf21bcb2697477e2c15c2c7916ff7a3b6a89" ); assert_eq!(cs.get("randomization of note commitment/x3/num"), cm); assert_eq!(cs.num_inputs(), 8); assert_eq!(cs.get_input(0, "ONE"), Fr::one()); assert_eq!(cs.get_input(1, "rk/x/input variable"), rk.0); assert_eq!(cs.get_input(2, "rk/y/input variable"), rk.1); assert_eq!( cs.get_input(3, "value commitment/commitment point/x/input variable"), expected_value_cm.0 ); assert_eq!( cs.get_input(4, "value commitment/commitment point/y/input variable"), expected_value_cm.1 ); assert_eq!(cs.get_input(5, "anchor/input variable"), cur); assert_eq!(cs.get_input(6, "pack nullifier/input 0"), expected_nf[0]); assert_eq!(cs.get_input(7, "pack nullifier/input 1"), expected_nf[1]); } } } #[test] fn test_output_circuit_with_bls12_381() { use bellman::gadgets::test::*; use ff::Field; use pairing::bls12_381::*; use rand_core::{RngCore, SeedableRng}; use rand_xorshift::XorShiftRng; use zcash_primitives::{ jubjub::{edwards, fs, JubjubBls12}, primitives::{Diversifier, ProofGenerationKey}, }; let params = &JubjubBls12::new(); let rng = &mut XorShiftRng::from_seed([ 0x58, 0x62, 0xbe, 0x3d, 0x76, 0x3d, 0x31, 0x8d, 0x17, 0xdb, 0x37, 0x32, 0x54, 0x06, 0xbc, 0xe5, ]); for _ in 0..100 { let value_commitment = ValueCommitment { value: rng.next_u64(), randomness: fs::Fs::random(rng), }; let nsk = fs::Fs::random(rng); let ak = edwards::Point::rand(rng, params).mul_by_cofactor(params); let proof_generation_key = ProofGenerationKey { ak: ak.clone(), nsk: nsk.clone(), }; let viewing_key = proof_generation_key.to_viewing_key(params); let payment_address; loop { let diversifier = { let mut d = [0; 11]; rng.fill_bytes(&mut d); Diversifier(d) }; if let Some(p) = viewing_key.to_payment_address(diversifier, params) { payment_address = p; break; } } let commitment_randomness = fs::Fs::random(rng); let esk = fs::Fs::random(rng); { let mut cs = TestConstraintSystem::<Bls12>::new(); let instance = Output { params, value_commitment: Some(value_commitment.clone()), payment_address: Some(payment_address.clone()), commitment_randomness: Some(commitment_randomness), esk: Some(esk.clone()), }; instance.synthesize(&mut cs).unwrap(); assert!(cs.is_satisfied()); assert_eq!(cs.num_constraints(), 7827); assert_eq!( cs.hash(), "c26d5cdfe6ccd65c03390902c02e11393ea6bb96aae32a7f2ecb12eb9103faee" ); let expected_cm = payment_address .create_note(value_commitment.value, commitment_randomness, params) .expect("should be valid") .cm(params); let expected_value_cm = value_commitment.cm(params).to_xy(); let expected_epk = payment_address .g_d(params) .expect("should be valid") .mul(esk, params); let expected_epk_xy = expected_epk.to_xy(); assert_eq!(cs.num_inputs(), 6); assert_eq!(cs.get_input(0, "ONE"), Fr::one()); assert_eq!( cs.get_input(1, "value commitment/commitment point/x/input variable"), expected_value_cm.0 ); assert_eq!( cs.get_input(2, "value commitment/commitment point/y/input variable"), expected_value_cm.1 ); assert_eq!(cs.get_input(3, "epk/x/input variable"), expected_epk_xy.0); assert_eq!(cs.get_input(4, "epk/y/input variable"), expected_epk_xy.1); assert_eq!(cs.get_input(5, "commitment/input variable"), expected_cm); } } }
35.88786
97
0.56271
092151613d1b5b1309853265b8d810ced83179da
3,121
use std::sync::Arc; use anyhow::Result; use futures::prelude::*; use maplit::btreeset; use openraft::Config; use openraft::LeaderId; use openraft::LogId; use openraft::SnapshotPolicy; use openraft::State; use crate::fixtures::RaftRouter; /// Client write tests. /// /// What does this test do? /// /// - create a stable 3-node cluster. /// - write a lot of data to it. /// - assert that the cluster stayed stable and has all of the expected data. #[tokio::test(flavor = "multi_thread", worker_threads = 4)] async fn client_writes() -> Result<()> { let (_log_guard, ut_span) = init_ut!(); let _ent = ut_span.enter(); // Setup test dependencies. let config = Arc::new( Config { snapshot_policy: SnapshotPolicy::LogsSinceLast(2000), // The write load is heavy in this test, need a relatively long timeout. election_timeout_min: 500, election_timeout_max: 1000, ..Default::default() } .validate()?, ); let mut router = RaftRouter::new(config.clone()); router.new_raft_node(0).await; router.new_raft_node(1).await; router.new_raft_node(2).await; let mut log_index = 0; // Assert all nodes are in learner state & have no entries. router.wait_for_log(&btreeset![0, 1, 2], None, None, "empty").await?; router.wait_for_state(&btreeset![0, 1, 2], State::Learner, None, "empty").await?; router.assert_pristine_cluster().await; // Initialize the cluster, then assert that a stable cluster was formed & held. tracing::info!("--- initializing cluster"); router.initialize_from_single_node(0).await?; log_index += 1; router.wait_for_log(&btreeset![0, 1, 2], Some(log_index), None, "leader init log").await?; router.wait_for_state(&btreeset![0], State::Leader, None, "cluster leader").await?; router.wait_for_state(&btreeset![1, 2], State::Follower, None, "cluster follower").await?; router.assert_stable_cluster(Some(1), Some(log_index)).await; // Write a bunch of data and assert that the cluster stayes stable. let leader = router.leader().expect("leader not found"); let mut clients = futures::stream::FuturesUnordered::new(); clients.push(router.client_request_many(leader, "0", 500)); clients.push(router.client_request_many(leader, "1", 500)); clients.push(router.client_request_many(leader, "2", 500)); clients.push(router.client_request_many(leader, "3", 500)); clients.push(router.client_request_many(leader, "4", 500)); clients.push(router.client_request_many(leader, "5", 500)); while clients.next().await.is_some() {} log_index += 500 * 6; router.wait_for_log(&btreeset![0, 1, 2], Some(log_index), None, "sync logs").await?; router.assert_stable_cluster(Some(1), Some(log_index)).await; // The extra 1 is from the leader's initial commit entry. router .assert_storage_state( 1, log_index, Some(0), LogId::new(LeaderId::new(1, 0), log_index), Some(((1999..2100).into(), 1)), ) .await?; Ok(()) }
35.465909
123
0.652675
089908a7ad95f6d1b5ec8e8f640057c7527f907f
9,537
#![deny(rust_2018_idioms)] #![warn( missing_copy_implementations, missing_debug_implementations, missing_docs, clippy::explicit_iter_loop, clippy::use_self )] //! # influxdb2_client //! //! This is a Rust client to InfluxDB using the [2.0 API][2api]. //! //! [2api]: https://v2.docs.influxdata.com/v2.0/reference/api/ //! //! ## Work Remaining //! //! - Query //! - optional sync client //! - Influx 1.x API? //! - Other parts of the API //! - Pick the best name to use on crates.io and publish //! //! ## Quick start //! //! This example creates a client to an InfluxDB server running at `http://localhost:8888`, creates //! a bucket with the name "mybucket" in the organization with name "myorg" and //! ID "0000111100001111", builds two points, and writes the points to the //! bucket. //! //! ``` //! async fn example() -> Result<(), Box<dyn std::error::Error>> { //! use influxdb2_client::{Client, DataPoint}; //! use futures::stream; //! //! let org = "myorg"; //! let org_id = "0000111100001111"; //! let bucket = "mybucket"; //! //! let client = Client::new("http://localhost:8888", "some-token"); //! //! client.create_bucket(org_id, bucket).await?; //! //! let points = vec![ //! DataPoint::builder("cpu") //! .tag("host", "server01") //! .field("usage", 0.5) //! .build()?, //! DataPoint::builder("cpu") //! .tag("host", "server01") //! .tag("region", "us-west") //! .field("usage", 0.87) //! .build()?, //! ]; //! //! client.write(org, bucket, stream::iter(points)).await?; //! Ok(()) //! } //! ``` use bytes::BufMut; use futures::{Stream, StreamExt}; use reqwest::{Body, Method}; use serde::Serialize; use snafu::{ResultExt, Snafu}; use std::{ fmt, io::{self, Write}, }; pub mod data_point; pub use data_point::{DataPoint, FieldValue, WriteDataPoint}; /// Errors that occur while making requests to the Influx server. #[derive(Debug, Snafu)] pub enum RequestError { /// While making a request to the Influx server, the underlying `reqwest` /// library returned an error that was not an HTTP 400 or 500. #[snafu(display("Error while processing the HTTP request: {}", source))] ReqwestProcessing { /// The underlying error object from `reqwest`. source: reqwest::Error, }, /// The underlying `reqwest` library returned an HTTP error with code 400 /// (meaning a client error) or 500 (meaning a server error). #[snafu(display("HTTP request returned an error: {}, `{}`", status, text))] Http { /// The `StatusCode` returned from the request status: reqwest::StatusCode, /// Any text data returned from the request text: String, }, /// While serializing data as JSON to send in a request, the underlying /// `serde_json` library returned an error. #[snafu(display("Error while serializing to JSON: {}", source))] Serializing { /// The underlying error object from `serde_json`. source: serde_json::error::Error, }, } /// Client to a server supporting the InfluxData 2.0 API. #[derive(Debug, Clone)] pub struct Client { /// The base URL this client sends requests to pub url: String, auth_header: String, reqwest: reqwest::Client, } impl Client { /// Create a new client pointing to the URL specified in /// `protocol://server:port` format and using the specified token for /// authorization. /// /// # Example /// /// ``` /// let client = influxdb2_client::Client::new("http://localhost:8888", "my-token"); /// ``` pub fn new(url: impl Into<String>, auth_token: impl fmt::Display) -> Self { Self { url: url.into(), auth_header: format!("Token {}", auth_token), reqwest: reqwest::Client::new(), } } /// Consolidate common request building code fn request(&self, method: Method, url: &str) -> reqwest::RequestBuilder { self.reqwest .request(method, url) .header("Authorization", &self.auth_header) } /// Write line protocol data to the specified organization and bucket. pub async fn write_line_protocol( &self, org: &str, bucket: &str, body: impl Into<Body>, ) -> Result<(), RequestError> { let body = body.into(); let write_url = format!("{}/api/v2/write", self.url); let response = self .request(Method::POST, &write_url) .query(&[("bucket", bucket), ("org", org)]) .body(body) .send() .await .context(ReqwestProcessing)?; if !response.status().is_success() { let status = response.status(); let text = response.text().await.context(ReqwestProcessing)?; Http { status, text }.fail()?; } Ok(()) } /// Write a `Stream` of `DataPoint`s to the specified organization and /// bucket. pub async fn write( &self, org: &str, bucket: &str, body: impl Stream<Item = impl WriteDataPoint> + Send + Sync + 'static, ) -> Result<(), RequestError> { let mut buffer = bytes::BytesMut::new(); let body = body.map(move |point| { let mut w = (&mut buffer).writer(); point.write_data_point_to(&mut w)?; w.flush()?; Ok::<_, io::Error>(buffer.split().freeze()) }); let body = Body::wrap_stream(body); Ok(self.write_line_protocol(org, bucket, body).await?) } /// Create a new bucket in the organization specified by the 16-digit /// hexadecimal `org_id` and with the bucket name `bucket`. pub async fn create_bucket(&self, org_id: &str, bucket: &str) -> Result<(), RequestError> { let create_bucket_url = format!("{}/api/v2/buckets", self.url); #[derive(Serialize, Debug, Default)] struct CreateBucketInfo { #[serde(rename = "orgID")] org_id: String, name: String, #[serde(rename = "retentionRules")] // The type of `retentionRules` isn't `String`; this is included and always set to // an empty vector to be compatible with the Influx 2.0 API where `retentionRules` is // a required parameter. InfluxDB IOx ignores this parameter. retention_rules: Vec<String>, } let body = CreateBucketInfo { org_id: org_id.into(), name: bucket.into(), ..Default::default() }; let response = self .request(Method::POST, &create_bucket_url) .body(serde_json::to_string(&body).context(Serializing)?) .send() .await .context(ReqwestProcessing)?; if !response.status().is_success() { let status = response.status(); let text = response.text().await.context(ReqwestProcessing)?; Http { status, text }.fail()?; } Ok(()) } } #[cfg(test)] mod tests { use super::*; use futures::stream; use mockito::mock; type Error = Box<dyn std::error::Error>; type Result<T = (), E = Error> = std::result::Result<T, E>; #[tokio::test] async fn writing_points() -> Result { let org = "some-org"; let bucket = "some-bucket"; let token = "some-token"; let mock_server = mock( "POST", format!("/api/v2/write?bucket={}&org={}", bucket, org).as_str(), ) .match_header("Authorization", format!("Token {}", token).as_str()) .match_body( "\ cpu,host=server01 usage=0.5 cpu,host=server01,region=us-west usage=0.87 ", ) .create(); let client = Client::new(&mockito::server_url(), token); let points = vec![ DataPoint::builder("cpu") .tag("host", "server01") .field("usage", 0.5) .build()?, DataPoint::builder("cpu") .tag("host", "server01") .tag("region", "us-west") .field("usage", 0.87) .build()?, ]; // If the requests made are incorrect, Mockito returns status 501 and `write` // will return an error, which causes the test to fail here instead of // when we assert on mock_server. The error messages that Mockito // provides are much clearer for explaining why a test failed than just // that the server returned 501, so don't use `?` here. let _result = client.write(org, bucket, stream::iter(points)).await; mock_server.assert(); Ok(()) } #[tokio::test] async fn create_bucket() -> Result { let org_id = "0000111100001111"; let bucket = "some-bucket"; let token = "some-token"; let mock_server = mock("POST", "/api/v2/buckets") .match_header("Authorization", format!("Token {}", token).as_str()) .match_body( format!( r#"{{"orgID":"{}","name":"{}","retentionRules":[]}}"#, org_id, bucket ) .as_str(), ) .create(); let client = Client::new(&mockito::server_url(), token); let _result = client.create_bucket(org_id, bucket).await; mock_server.assert(); Ok(()) } }
31.371711
99
0.559505
3801efd8f1248581d733ecea889da62f6a026d53
5,160
/* * Copyright 2020 Fluence Labs Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use super::behaviour::ServerBehaviour; use config_utils::to_peer_id; use fluence_libp2p::{build_transport, types::OneshotOutlet}; use server_config::{BehaviourConfig, ServerConfig}; use trust_graph::TrustGraph; use anyhow::Context; use async_std::task; use futures::{channel::oneshot, future::BoxFuture, select, stream::StreamExt, FutureExt}; use libp2p::{ core::{multiaddr::Protocol, Multiaddr}, identity::ed25519::Keypair, Swarm, TransportError, }; use prometheus::Registry; use std::io; use std::net::SocketAddr; // TODO: documentation pub struct Server { swarm: Swarm<ServerBehaviour>, config: ServerConfig, registry: Registry, } impl Server { pub fn new(key_pair: Keypair, config: ServerConfig) -> anyhow::Result<Box<Self>> { let ServerConfig { socket_timeout, .. } = config; let local_peer_id = to_peer_id(&key_pair); log::info!("server peer id = {}", local_peer_id); let trust_graph = TrustGraph::new(config.root_weights()); let registry = Registry::new(); let mut swarm = { let config = BehaviourConfig::new(trust_graph, Some(&registry), key_pair.clone(), &config); let behaviour = ServerBehaviour::new(config).context("failed to crate ServerBehaviour")?; let key_pair = libp2p::identity::Keypair::Ed25519(key_pair); let transport = build_transport(key_pair, socket_timeout); Swarm::new(transport, behaviour, local_peer_id) }; // Add external addresses to Swarm config .external_addresses() .into_iter() .for_each(|addr| Swarm::add_external_address(&mut swarm, addr)); let node_service = Self { swarm, config, registry, }; Ok(Box::new(node_service)) } /// Starts node service pub fn start(mut self: Box<Self>) -> OneshotOutlet<()> { let (exit_outlet, exit_inlet) = oneshot::channel(); let mut exit_inlet = exit_inlet.into_stream().fuse(); self.listen().expect("Error on starting node listener"); self.swarm.dial_bootstrap_nodes(); task::spawn(async move { let mut metrics = Self::start_metrics_endpoint( self.registry, SocketAddr::new(self.config.listen_ip, self.config.prometheus_port), ) .fuse(); loop { select!( _ = self.swarm.select_next_some() => {}, _ = metrics => {}, _ = exit_inlet.next() => { break } ) } }); exit_outlet } pub fn start_metrics_endpoint( registry: Registry, listen_addr: SocketAddr, ) -> BoxFuture<'static, io::Result<()>> { use prometheus::{Encoder, TextEncoder}; use tide::{Error, StatusCode::InternalServerError}; let mut app = tide::with_state(registry); app.at("/metrics") .get(|req: tide::Request<Registry>| async move { let mut buffer = vec![]; let encoder = TextEncoder::new(); let metric_families = req.state().gather(); encoder .encode(&metric_families, &mut buffer) .map_err(|err| { let msg = format!("Error encoding prometheus metrics: {:?}", err); log::warn!("{}", msg); Error::from_str(InternalServerError, msg) })?; String::from_utf8(buffer).map_err(|err| { let msg = format!("Error encoding prometheus metrics: {:?}", err); log::warn!("{}", msg); Error::from_str(InternalServerError, msg) }) }); app.listen(listen_addr).boxed() } /// Starts node service listener. #[inline] fn listen(&mut self) -> Result<(), TransportError<io::Error>> { let mut tcp = Multiaddr::from(self.config.listen_ip); tcp.push(Protocol::Tcp(self.config.tcp_port)); let mut ws = Multiaddr::from(self.config.listen_ip); ws.push(Protocol::Tcp(self.config.websocket_port)); ws.push(Protocol::Ws("/".into())); log::info!("Fluence listening on {} and {}", tcp, ws); Swarm::listen_on(&mut self.swarm, tcp)?; Swarm::listen_on(&mut self.swarm, ws)?; Ok(()) } }
33.076923
94
0.57655
d71f024ce6ff32e2f62eb635e8af0af21df57ade
45
mod nu; mod tree; pub use tree::TreeViewer;
9
25
0.711111
230c28e62445a772446a596f3d5f41a5c2b21864
1,156
use std::{ fs, path::{Path, PathBuf}, }; use crate::{lexer::run_lexer, parser::parse_tokens, GroupsMap, TsmlResult}; #[derive(Debug, Clone)] pub struct Groups { pub map: GroupsMap, pub info: GroupsInfo, } // Collect the start of the file to retrieve as GroupsInfo fn get_file_header(text: &str) -> String { text.lines().take_while(|line| line.starts_with("//") || line.is_empty()).collect::<String>() } impl Groups { pub fn from_text(text: &str) -> TsmlResult<Self> { let tokens = run_lexer(text); parse_tokens(tokens, text).map(|(map, groups_order)| Groups { map, info: GroupsInfo { file_path: None, file_header: get_file_header(text), groups_order }, }) } pub fn from_path(path: impl AsRef<Path>) -> TsmlResult<Self> { let text = fs::read_to_string(path.as_ref())?; let mut result = Self::from_text(&text)?; result.info.file_path = Some(path.as_ref().to_path_buf()); Ok(result) } } #[derive(Debug, Clone)] pub struct GroupsInfo { pub file_path: Option<PathBuf>, pub file_header: String, pub groups_order: Vec<String>, }
27.52381
99
0.633218
d6f3cfd06e0d55ee0d631a5caf404c200ab76a4a
2,811
use std::io::{Read, Seek, SeekFrom, Write}; use serde::{Serialize}; use crate::mp4box::*; use crate::mp4box::{hdlr::HdlrBox, mdhd::MdhdBox, minf::MinfBox}; #[derive(Debug, Clone, PartialEq, Default, Serialize)] pub struct MdiaBox { pub mdhd: MdhdBox, pub hdlr: HdlrBox, pub minf: MinfBox, } impl MdiaBox { pub fn get_type(&self) -> BoxType { BoxType::MdiaBox } pub fn get_size(&self) -> u64 { HEADER_SIZE + self.mdhd.box_size() + self.hdlr.box_size() + self.minf.box_size() } } impl Mp4Box for MdiaBox { fn box_type(&self) -> BoxType { return self.get_type(); } fn box_size(&self) -> u64 { return self.get_size(); } fn to_json(&self) -> Result<String> { Ok(serde_json::to_string(&self).unwrap()) } fn summary(&self) -> Result<String> { let s = format!(""); Ok(s) } } impl<R: Read + Seek> ReadBox<&mut R> for MdiaBox { fn read_box(reader: &mut R, size: u64) -> Result<Self> { let start = box_start(reader)?; let mut mdhd = None; let mut hdlr = None; let mut minf = None; let mut current = reader.seek(SeekFrom::Current(0))?; let end = start + size; while current < end { // Get box header. let header = BoxHeader::read(reader)?; let BoxHeader { name, size: s } = header; match name { BoxType::MdhdBox => { mdhd = Some(MdhdBox::read_box(reader, s)?); } BoxType::HdlrBox => { hdlr = Some(HdlrBox::read_box(reader, s)?); } BoxType::MinfBox => { minf = Some(MinfBox::read_box(reader, s)?); } _ => { // XXX warn!() skip_box(reader, s)?; } } current = reader.seek(SeekFrom::Current(0))?; } if mdhd.is_none() { return Err(Error::BoxNotFound(BoxType::MdhdBox)); } if hdlr.is_none() { return Err(Error::BoxNotFound(BoxType::HdlrBox)); } if minf.is_none() { return Err(Error::BoxNotFound(BoxType::MinfBox)); } skip_bytes_to(reader, start + size)?; Ok(MdiaBox { mdhd: mdhd.unwrap(), hdlr: hdlr.unwrap(), minf: minf.unwrap(), }) } } impl<W: Write> WriteBox<&mut W> for MdiaBox { fn write_box(&self, writer: &mut W) -> Result<u64> { let size = self.box_size(); BoxHeader::new(self.box_type(), size).write(writer)?; self.mdhd.write_box(writer)?; self.hdlr.write_box(writer)?; self.minf.write_box(writer)?; Ok(size) } }
25.788991
88
0.506937
72f792fbdf71ce72e91c250b333018b4a6fc1ab2
1,107
// Copyright 2016 PingCAP, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // See the License for the specific language governing permissions and // limitations under the License. pub use util::codec::{Error, Result}; const TEN_POW: &'static [u32] = &[ 1, 10, 100, 1_000, 10_000, 100_000, 1_000_000, 10_000_000, 100_000_000, 1_000_000_000, ]; /// A shortcut to box an error. macro_rules! invalid_type { ($e:expr) => ({ use util::codec::Error; Error::InvalidDataType(($e).into()) }); ($f:tt, $($arg:expr),+) => ({ use util::codec::Error; Error::InvalidDataType(format!($f, $($arg),+)) }); } pub mod datum; pub mod table; pub mod convert; pub mod mysql; pub use self::datum::Datum;
23.553191
70
0.638663
646f0fca12af337cb10209ec0d9d31c9d29616e4
3,511
use std::cell::RefCell; use std::fmt::Display; use std::rc::Rc; type NodeRef<T> = Rc<RefCell<Node<T>>>; struct LinkedList<T> { head: Option<NodeRef<T>>, } struct Node<T> { data: T, next: Option<NodeRef<T>>, } struct Iter<T> { next: Option<NodeRef<T>>, } impl<T> Node<T> { fn tail(node: &NodeRef<T>) -> Option<NodeRef<T>> { if let Some(cur) = node.borrow().next.as_ref().cloned() { return Node::tail(&cur.clone()); } Some(node.clone()) } } impl<T> LinkedList<T> where T: std::cmp::Eq, T: std::hash::Hash, T: std::clone::Clone, { fn new() -> Self { Self { head: None } } fn append(&mut self, new_value: T) { if let Some(tail) = self.tail() { tail.borrow_mut().next = Some(Rc::new(RefCell::new(Node { data: new_value, next: None, }))); } else { self.head = Some(Rc::new(RefCell::new(Node { data: new_value, next: None, }))); } } fn tail(&self) -> Option<NodeRef<T>> { if let Some(cur) = self.head.as_ref().cloned() { if cur.borrow().next.is_none() { return Some(cur.clone()); } else { return Node::tail(&cur.clone()); } } None } fn remove(&mut self, node_to_remove: &NodeRef<T>) { for node in self.iter() { let mut borrowed_node = node.borrow_mut(); if let Some(next) = borrowed_node.next.as_ref().cloned() { if Rc::ptr_eq(&next, node_to_remove) { borrowed_node.next = node_to_remove.borrow_mut().next.take() } } } } fn iter(&self) -> Iter<T> { Iter { next: self.head.as_ref().cloned(), } } } impl<'a, T> Iterator for Iter<T> { type Item = NodeRef<T>; fn next(&mut self) -> Option<Self::Item> { if let Some(cur) = self.next.as_ref().cloned() { self.next = cur.borrow().next.clone(); return Some(cur.clone()); } None } } impl<T: Display> Display for LinkedList<T> { fn fmt(&self, w: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> { write!(w, "[")?; let mut node = self.head.clone(); while let Some(n) = node { write!(w, "{}", n.borrow().data)?; node = n.borrow().next.clone(); if node.is_some() { write!(w, ", ")?; } } write!(w, "]") } } #[cfg(test)] mod tests { use super::*; #[test] fn test_remove_node() { let mut list1 = LinkedList::<String>::new(); list1.append(String::from("item1")); list1.append(String::from("item2")); list1.append(String::from("item3")); list1.append(String::from("item4")); list1.append(String::from("item5")); for (n, node) in list1.iter().enumerate() { if n == 3 { let to_remove = Some(node.clone()); list1.remove(&to_remove.unwrap()); } } for node in list1.iter() { assert_ne!(node.borrow().data, "item4"); } } } fn main() { let mut list = LinkedList::<String>::new(); list.append(String::from("item1")); list.append(String::from("item2")); for node in list.iter() { list.remove(&node.clone()); } }
24.552448
92
0.482199
e6fc913dc7a8b5c19d3f1e7c7d8d14bb1d057d6a
114
mod cpu; mod opcode; mod render; mod screen; pub use cpu::Cpu; pub use render::Renderer; pub use screen::Screen;
12.666667
25
0.72807
bf1969aab32fa9ce9d17c0f6baea6beaa218cc56
1,225
// Copyright (c) 2016 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::path::Path; use common::ui::UI; use hcore::crypto::BoxKeyPair; use hcore::service::ServiceGroup; use error::Result; pub fn start(ui: &mut UI, org: &str, service_group: &ServiceGroup, cache: &Path) -> Result<()> { try!(ui.begin(format!( "Generating service key for {} in {}", &service_group, org ))); let pair = try!(BoxKeyPair::generate_pair_for_service( org, &service_group.to_string(), cache, )); try!(ui.end(format!( "Generated service key pair {}.", &pair.name_with_rev() ))); Ok(()) }
30.625
96
0.665306
abca73fe6fbf54568353f68b52dc63f1fce6c7e3
2,496
#[doc = "Reader of register INTRAWS"] pub type R = crate::R<u32, super::INTRAWS>; #[doc = "Reader of field `INTRAWS0`"] pub type INTRAWS0_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS1`"] pub type INTRAWS1_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS2`"] pub type INTRAWS2_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS3`"] pub type INTRAWS3_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS4`"] pub type INTRAWS4_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS5`"] pub type INTRAWS5_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS6`"] pub type INTRAWS6_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS7`"] pub type INTRAWS7_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS8`"] pub type INTRAWS8_R = crate::R<bool, bool>; #[doc = "Reader of field `INTRAWS9`"] pub type INTRAWS9_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - INTRAWS0"] #[inline(always)] pub fn intraws0(&self) -> INTRAWS0_R { INTRAWS0_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - INTRAWS1"] #[inline(always)] pub fn intraws1(&self) -> INTRAWS1_R { INTRAWS1_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - INTRAWS2"] #[inline(always)] pub fn intraws2(&self) -> INTRAWS2_R { INTRAWS2_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - INTRAWS3"] #[inline(always)] pub fn intraws3(&self) -> INTRAWS3_R { INTRAWS3_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - INTRAWS4"] #[inline(always)] pub fn intraws4(&self) -> INTRAWS4_R { INTRAWS4_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - INTRAWS5"] #[inline(always)] pub fn intraws5(&self) -> INTRAWS5_R { INTRAWS5_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - INTRAWS6"] #[inline(always)] pub fn intraws6(&self) -> INTRAWS6_R { INTRAWS6_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - INTRAWS7"] #[inline(always)] pub fn intraws7(&self) -> INTRAWS7_R { INTRAWS7_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - INTRAWS8"] #[inline(always)] pub fn intraws8(&self) -> INTRAWS8_R { INTRAWS8_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - INTRAWS9"] #[inline(always)] pub fn intraws9(&self) -> INTRAWS9_R { INTRAWS9_R::new(((self.bits >> 9) & 0x01) != 0) } }
33.28
55
0.56851
89eb63a5f5386fe7cdb11ff5f925abfadc3d316a
62
use bevy::prelude::*; fn main() { App::build().run(); }
8.857143
23
0.5
1c429e7252ee9bd84469782234aa017dafe3a7f1
363
use ast; use core; pub fn to_qualified_name(name: ast::Name) -> Option<core::Name> { let components = match name.repr { ast::NameKind::Qualified(components) => components, ast::NameKind::Unqualified(s) => vec![s], _ => return None, }; Some(core::Name::Qual { components: components, span: name.span, }) }
22.6875
65
0.584022
bbbeba97af9159d05fb7130e7aa7568755190163
24,738
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::ANSELB { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct ANSB0R { bits: bool, } impl ANSB0R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB1R { bits: bool, } impl ANSB1R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB2R { bits: bool, } impl ANSB2R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB3R { bits: bool, } impl ANSB3R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB4R { bits: bool, } impl ANSB4R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB5R { bits: bool, } impl ANSB5R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB6R { bits: bool, } impl ANSB6R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB7R { bits: bool, } impl ANSB7R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB8R { bits: bool, } impl ANSB8R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB9R { bits: bool, } impl ANSB9R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB10R { bits: bool, } impl ANSB10R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB11R { bits: bool, } impl ANSB11R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB12R { bits: bool, } impl ANSB12R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB13R { bits: bool, } impl ANSB13R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB14R { bits: bool, } impl ANSB14R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Value of the field"] pub struct ANSB15R { bits: bool, } impl ANSB15R { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bit(&self) -> bool { self.bits } #[doc = r" Returns `true` if the bit is clear (0)"] #[inline] pub fn bit_is_clear(&self) -> bool { !self.bit() } #[doc = r" Returns `true` if the bit is set (1)"] #[inline] pub fn bit_is_set(&self) -> bool { self.bit() } } #[doc = r" Proxy"] pub struct _ANSB0W<'a> { w: &'a mut W, } impl<'a> _ANSB0W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB1W<'a> { w: &'a mut W, } impl<'a> _ANSB1W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 1; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB2W<'a> { w: &'a mut W, } impl<'a> _ANSB2W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 2; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB3W<'a> { w: &'a mut W, } impl<'a> _ANSB3W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 3; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB4W<'a> { w: &'a mut W, } impl<'a> _ANSB4W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 4; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB5W<'a> { w: &'a mut W, } impl<'a> _ANSB5W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 5; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB6W<'a> { w: &'a mut W, } impl<'a> _ANSB6W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 6; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB7W<'a> { w: &'a mut W, } impl<'a> _ANSB7W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 7; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB8W<'a> { w: &'a mut W, } impl<'a> _ANSB8W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 8; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB9W<'a> { w: &'a mut W, } impl<'a> _ANSB9W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 9; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB10W<'a> { w: &'a mut W, } impl<'a> _ANSB10W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 10; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB11W<'a> { w: &'a mut W, } impl<'a> _ANSB11W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 11; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB12W<'a> { w: &'a mut W, } impl<'a> _ANSB12W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 12; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB13W<'a> { w: &'a mut W, } impl<'a> _ANSB13W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 13; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB14W<'a> { w: &'a mut W, } impl<'a> _ANSB14W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 14; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } #[doc = r" Proxy"] pub struct _ANSB15W<'a> { w: &'a mut W, } impl<'a> _ANSB15W<'a> { #[doc = r" Sets the field bit"] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r" Clears the field bit"] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r" Writes raw bits to the field"] #[inline] pub fn bit(self, value: bool) -> &'a mut W { const MASK: bool = true; const OFFSET: u8 = 15; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bit 0"] #[inline] pub fn ansb0(&self) -> ANSB0R { let bits = { const MASK: bool = true; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB0R { bits } } #[doc = "Bit 1"] #[inline] pub fn ansb1(&self) -> ANSB1R { let bits = { const MASK: bool = true; const OFFSET: u8 = 1; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB1R { bits } } #[doc = "Bit 2"] #[inline] pub fn ansb2(&self) -> ANSB2R { let bits = { const MASK: bool = true; const OFFSET: u8 = 2; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB2R { bits } } #[doc = "Bit 3"] #[inline] pub fn ansb3(&self) -> ANSB3R { let bits = { const MASK: bool = true; const OFFSET: u8 = 3; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB3R { bits } } #[doc = "Bit 4"] #[inline] pub fn ansb4(&self) -> ANSB4R { let bits = { const MASK: bool = true; const OFFSET: u8 = 4; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB4R { bits } } #[doc = "Bit 5"] #[inline] pub fn ansb5(&self) -> ANSB5R { let bits = { const MASK: bool = true; const OFFSET: u8 = 5; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB5R { bits } } #[doc = "Bit 6"] #[inline] pub fn ansb6(&self) -> ANSB6R { let bits = { const MASK: bool = true; const OFFSET: u8 = 6; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB6R { bits } } #[doc = "Bit 7"] #[inline] pub fn ansb7(&self) -> ANSB7R { let bits = { const MASK: bool = true; const OFFSET: u8 = 7; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB7R { bits } } #[doc = "Bit 8"] #[inline] pub fn ansb8(&self) -> ANSB8R { let bits = { const MASK: bool = true; const OFFSET: u8 = 8; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB8R { bits } } #[doc = "Bit 9"] #[inline] pub fn ansb9(&self) -> ANSB9R { let bits = { const MASK: bool = true; const OFFSET: u8 = 9; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB9R { bits } } #[doc = "Bit 10"] #[inline] pub fn ansb10(&self) -> ANSB10R { let bits = { const MASK: bool = true; const OFFSET: u8 = 10; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB10R { bits } } #[doc = "Bit 11"] #[inline] pub fn ansb11(&self) -> ANSB11R { let bits = { const MASK: bool = true; const OFFSET: u8 = 11; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB11R { bits } } #[doc = "Bit 12"] #[inline] pub fn ansb12(&self) -> ANSB12R { let bits = { const MASK: bool = true; const OFFSET: u8 = 12; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB12R { bits } } #[doc = "Bit 13"] #[inline] pub fn ansb13(&self) -> ANSB13R { let bits = { const MASK: bool = true; const OFFSET: u8 = 13; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB13R { bits } } #[doc = "Bit 14"] #[inline] pub fn ansb14(&self) -> ANSB14R { let bits = { const MASK: bool = true; const OFFSET: u8 = 14; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB14R { bits } } #[doc = "Bit 15"] #[inline] pub fn ansb15(&self) -> ANSB15R { let bits = { const MASK: bool = true; const OFFSET: u8 = 15; ((self.bits >> OFFSET) & MASK as u32) != 0 }; ANSB15R { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 65535 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bit 0"] #[inline] pub fn ansb0(&mut self) -> _ANSB0W { _ANSB0W { w: self } } #[doc = "Bit 1"] #[inline] pub fn ansb1(&mut self) -> _ANSB1W { _ANSB1W { w: self } } #[doc = "Bit 2"] #[inline] pub fn ansb2(&mut self) -> _ANSB2W { _ANSB2W { w: self } } #[doc = "Bit 3"] #[inline] pub fn ansb3(&mut self) -> _ANSB3W { _ANSB3W { w: self } } #[doc = "Bit 4"] #[inline] pub fn ansb4(&mut self) -> _ANSB4W { _ANSB4W { w: self } } #[doc = "Bit 5"] #[inline] pub fn ansb5(&mut self) -> _ANSB5W { _ANSB5W { w: self } } #[doc = "Bit 6"] #[inline] pub fn ansb6(&mut self) -> _ANSB6W { _ANSB6W { w: self } } #[doc = "Bit 7"] #[inline] pub fn ansb7(&mut self) -> _ANSB7W { _ANSB7W { w: self } } #[doc = "Bit 8"] #[inline] pub fn ansb8(&mut self) -> _ANSB8W { _ANSB8W { w: self } } #[doc = "Bit 9"] #[inline] pub fn ansb9(&mut self) -> _ANSB9W { _ANSB9W { w: self } } #[doc = "Bit 10"] #[inline] pub fn ansb10(&mut self) -> _ANSB10W { _ANSB10W { w: self } } #[doc = "Bit 11"] #[inline] pub fn ansb11(&mut self) -> _ANSB11W { _ANSB11W { w: self } } #[doc = "Bit 12"] #[inline] pub fn ansb12(&mut self) -> _ANSB12W { _ANSB12W { w: self } } #[doc = "Bit 13"] #[inline] pub fn ansb13(&mut self) -> _ANSB13W { _ANSB13W { w: self } } #[doc = "Bit 14"] #[inline] pub fn ansb14(&mut self) -> _ANSB14W { _ANSB14W { w: self } } #[doc = "Bit 15"] #[inline] pub fn ansb15(&mut self) -> _ANSB15W { _ANSB15W { w: self } } }
24.517344
59
0.48472
2f6f83b85ddcf73437813fdce3cb9c7e23adeba0
6,181
use crate::b2_math::*; use crate::b2_settings::*; use crate::b2_time_step::*; use crate::joints::b2_motor_joint::*; // Point-to-point constraint // cdot = v2 - v1 // = v2 + cross(w2, r2) - v1 - cross(w1, r1) // J = [-i -r1_skew i r2_skew ] // Identity used: // w k % (rx i + ry j) = w * (-ry i + rx j) // // r1 = offset - c1 // r2 = -c2 // Angle constraint // cdot = w2 - w1 // J = [0 0 -1 0 0 1] // k = invI1 + invI2 pub(crate) fn init_velocity_constraints<D: UserDataType>( this: &mut B2motorJoint<D>, data: &mut B2solverData, positions: &mut [B2position], velocities: &mut [B2velocity], ) { { let m_body_a = this.base.m_body_a.borrow(); let m_body_b = this.base.m_body_b.borrow(); this.m_index_a = m_body_a.m_island_index; this.m_index_b = m_body_b.m_island_index; this.m_local_center_a = m_body_a.m_sweep.local_center; this.m_local_center_b = m_body_b.m_sweep.local_center; this.m_inv_mass_a = m_body_a.m_inv_mass; this.m_inv_mass_b = m_body_b.m_inv_mass; this.m_inv_ia = m_body_a.m_inv_i; this.m_inv_ib = m_body_b.m_inv_i; } let c_a: B2vec2 = positions[this.m_index_a as usize].c; let a_a: f32 = positions[this.m_index_a as usize].a; let mut v_a: B2vec2 = velocities[this.m_index_a as usize].v; let mut w_a: f32 = velocities[this.m_index_a as usize].w; let c_b: B2vec2 = positions[this.m_index_b as usize].c; let a_b: f32 = positions[this.m_index_b as usize].a; let mut v_b: B2vec2 = velocities[this.m_index_b as usize].v; let mut w_b: f32 = velocities[this.m_index_b as usize].w; let (q_a, q_b) = (B2Rot::new(a_a), B2Rot::new(a_b)); // Compute the effective mass matrix. this.m_r_a = b2_mul_rot_by_vec2(q_a, this.m_linear_offset - this.m_local_center_a); this.m_r_b = b2_mul_rot_by_vec2(q_b, -this.m_local_center_b); // J = [-i -r1_skew i r2_skew] // r_skew = [-ry; rx] // Matlab // k = [ m_a+r1y^2*i_a+m_b+r2y^2*i_b, -r1y*i_a*r1x-r2y*i_b*r2x, -r1y*i_a-r2y*i_b] // [ -r1y*i_a*r1x-r2y*i_b*r2x, m_a+r1x^2*i_a+m_b+r2x^2*i_b, r1x*i_a+r2x*i_b] // [ -r1y*i_a-r2y*i_b, r1x*i_a+r2x*i_b, i_a+i_b] let m_a: f32 = this.m_inv_mass_a; let m_b: f32 = this.m_inv_mass_b; let i_a: f32 = this.m_inv_ia; let i_b: f32 = this.m_inv_ib; // Upper 2 by 2 of k for point to point let mut k = B2Mat22::default(); k.ex.x = m_a + m_b + i_a * this.m_r_a.y * this.m_r_a.y + i_b * this.m_r_b.y * this.m_r_b.y; k.ex.y = -i_a * this.m_r_a.x * this.m_r_a.y - i_b * this.m_r_b.x * this.m_r_b.y; k.ey.x = k.ex.y; k.ey.y = m_a + m_b + i_a * this.m_r_a.x * this.m_r_a.x + i_b * this.m_r_b.x * this.m_r_b.x; this.m_linear_mass = k.get_inverse(); this.m_angular_mass = i_a + i_b; if this.m_angular_mass > 0.0 { this.m_angular_mass = 1.0 / this.m_angular_mass; } this.m_linear_error = c_b + this.m_r_b - c_a - this.m_r_a; this.m_angular_error = a_b - a_a - this.m_angular_offset; if data.step.warm_starting { // Scale impulses to support a variable time step. this.m_linear_impulse *= data.step.dt_ratio; this.m_angular_impulse *= data.step.dt_ratio; let p = B2vec2::new(this.m_linear_impulse.x, this.m_linear_impulse.y); v_a -= m_a * p; w_a -= i_a * (b2_cross(this.m_r_a, p) + this.m_angular_impulse); v_b += m_b * p; w_b += i_b * (b2_cross(this.m_r_b, p) + this.m_angular_impulse); } else { this.m_linear_impulse.set_zero(); this.m_angular_impulse = 0.0; } velocities[this.m_index_a as usize].v = v_a; velocities[this.m_index_a as usize].w = w_a; velocities[this.m_index_b as usize].v = v_b; velocities[this.m_index_b as usize].w = w_b; } pub(crate) fn solve_velocity_constraints<D: UserDataType>( this: &mut B2motorJoint<D>, data: &mut B2solverData, velocities: &mut [B2velocity], ) { let B2velocity { v: mut v_a, w: mut w_a, } = velocities[this.m_index_a as usize]; // let mut v_a: B2vec2 =velocities[this.m_index_a as usize].v; // let mut w_a: f32 =velocities[this.m_index_a as usize].w; //TODO_humman - optimize others like this let B2velocity { v: mut v_b, w: mut w_b, } = velocities[this.m_index_b as usize]; // let mut v_b: B2vec2 =velocities[this.m_index_b as usize].v; // let mut w_b: f32 =velocities[this.m_index_b as usize].w; //TODO_humman - optimize others like this let B2motorJoint { m_inv_mass_a: m_a, m_inv_mass_b: m_b, m_inv_ia: i_a, m_inv_ib: i_b, .. } = *this; // let m_a:f32 = this.m_inv_mass_a; // let m_b: f32 =this.m_inv_mass_b; // let i_a: f32 = this.m_inv_ia; // let i_b: f32 =this.m_inv_ib; let h: f32 = data.step.dt; let inv_h: f32 = data.step.inv_dt; // solve angular friction { let cdot: f32 = w_b - w_a + inv_h * this.m_correction_factor * this.m_angular_error; let mut impulse: f32 = -this.m_angular_mass * cdot; let old_impulse: f32 = this.m_angular_impulse; let max_impulse: f32 = h * this.m_max_torque; this.m_angular_impulse = b2_clamp(this.m_angular_impulse + impulse, -max_impulse, max_impulse); impulse = this.m_angular_impulse - old_impulse; w_a -= i_a * impulse; w_b += i_b * impulse; } // solve linear friction { let cdot: B2vec2 = v_b + b2_cross_scalar_by_vec(w_b, this.m_r_b) - v_a - b2_cross_scalar_by_vec(w_a, this.m_r_a) + inv_h * this.m_correction_factor * this.m_linear_error; let mut impulse: B2vec2 = -b2_mul(this.m_linear_mass, cdot); let old_impulse: B2vec2 = this.m_linear_impulse; this.m_linear_impulse += impulse; let max_impulse: f32 = h * this.m_max_force; if this.m_linear_impulse.length_squared() > max_impulse * max_impulse { this.m_linear_impulse.normalize(); this.m_linear_impulse *= max_impulse; } impulse = this.m_linear_impulse - old_impulse; v_a -= m_a * impulse; w_a -= i_a * b2_cross(this.m_r_a, impulse); v_b += m_b * impulse; w_b += i_b * b2_cross(this.m_r_b, impulse); } velocities[this.m_index_a as usize].v = v_a; velocities[this.m_index_a as usize].w = w_a; velocities[this.m_index_b as usize].v = v_b; velocities[this.m_index_b as usize].w = w_b; } pub(crate) fn solve_position_constraints<D: UserDataType>( _this: &B2motorJoint<D>, data: &mut B2solverData, _positions: &mut [B2position], ) -> bool { b2_not_used(data); return true; }
31.217172
92
0.682737
0a9370e6f5ac79493dc6f9b4280cfd1af5deeb4b
1,702
// run-pass #![feature(arbitrary_self_types)] use std::ptr; trait Foo { fn foo(self: *const Self) -> &'static str; unsafe fn bar(self: *const Self) -> i64; unsafe fn complicated(self: *const *const Self) -> i64 where Self: Sized { (*self).bar() } } impl Foo for i32 { fn foo(self: *const Self) -> &'static str { "I'm an i32!" } unsafe fn bar(self: *const Self) -> i64 { *self as i64 } } impl Foo for u32 { fn foo(self: *const Self) -> &'static str { "I'm a u32!" } unsafe fn bar(self: *const Self) -> i64 { *self as i64 } } fn main() { let null_i32 = ptr::null::<i32>() as *const dyn Foo; let null_u32 = ptr::null::<u32>() as *const dyn Foo; assert_eq!("I'm an i32!", null_i32.foo()); assert_eq!("I'm a u32!", null_u32.foo()); let valid_i32 = 5i32; let valid_i32_thin = &valid_i32 as *const i32; assert_eq!("I'm an i32!", valid_i32_thin.foo()); assert_eq!(5, unsafe { valid_i32_thin.bar() }); assert_eq!(5, unsafe { (&valid_i32_thin as *const *const i32).complicated() }); let valid_i32_fat = valid_i32_thin as *const dyn Foo; assert_eq!("I'm an i32!", valid_i32_fat.foo()); assert_eq!(5, unsafe { valid_i32_fat.bar() }); let valid_u32 = 18u32; let valid_u32_thin = &valid_u32 as *const u32; assert_eq!("I'm a u32!", valid_u32_thin.foo()); assert_eq!(18, unsafe { valid_u32_thin.bar() }); assert_eq!(18, unsafe { (&valid_u32_thin as *const *const u32).complicated() }); let valid_u32_fat = valid_u32_thin as *const dyn Foo; assert_eq!("I'm a u32!", valid_u32_fat.foo()); assert_eq!(18, unsafe { valid_u32_fat.bar() }); }
27.451613
84
0.60047
649cccc36c346e88c9542912250db9762bdd9c02
74,441
//! A pass that qualifies constness of temporaries in constants, //! static initializers and functions and also drives promotion. //! //! The Qualif flags below can be used to also provide better //! diagnostics as to why a constant rvalue wasn't promoted. use rustc_data_structures::bit_set::BitSet; use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::fx::FxHashSet; use rustc_target::spec::abi::Abi; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::traits::{self, TraitEngine}; use rustc::ty::{self, TyCtxt, Ty, TypeFoldable}; use rustc::ty::cast::CastTy; use rustc::ty::query::Providers; use rustc::mir::*; use rustc::mir::interpret::ConstValue; use rustc::mir::traversal::ReversePostorder; use rustc::mir::visit::{PlaceContext, Visitor, MutatingUseContext, NonMutatingUseContext}; use rustc::middle::lang_items; use rustc::session::config::nightly_options; use syntax::ast::LitKind; use syntax::feature_gate::{emit_feature_err, GateIssue}; use syntax::symbol::sym; use syntax_pos::{Span, DUMMY_SP}; use std::fmt; use std::ops::{Deref, Index, IndexMut}; use std::usize; use crate::transform::{MirPass, MirSource}; use super::promote_consts::{self, Candidate, TempState}; /// What kind of item we are in. #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Mode { /// A `static` item. Static, /// A `static mut` item. StaticMut, /// A `const fn` item. ConstFn, /// A `const` item or an anonymous constant (e.g. in array lengths). Const, /// Other type of `fn`. NonConstFn, } impl Mode { /// Determine whether we have to do full const-checking because syntactically, we /// are required to be "const". #[inline] fn requires_const_checking(self) -> bool { self != Mode::NonConstFn } } impl fmt::Display for Mode { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Mode::Const => write!(f, "constant"), Mode::Static | Mode::StaticMut => write!(f, "static"), Mode::ConstFn => write!(f, "constant function"), Mode::NonConstFn => write!(f, "function") } } } const QUALIF_COUNT: usize = 4; // FIXME(eddyb) once we can use const generics, replace this array with // something like `IndexVec` but for fixed-size arrays (`IndexArray`?). #[derive(Copy, Clone, Default)] struct PerQualif<T>([T; QUALIF_COUNT]); impl<T: Clone> PerQualif<T> { fn new(x: T) -> Self { PerQualif([x.clone(), x.clone(), x.clone(), x]) } } impl<T> PerQualif<T> { fn as_mut(&mut self) -> PerQualif<&mut T> { let [x0, x1, x2, x3] = &mut self.0; PerQualif([x0, x1, x2, x3]) } fn zip<U>(self, other: PerQualif<U>) -> PerQualif<(T, U)> { let [x0, x1, x2, x3] = self.0; let [y0, y1, y2, y3] = other.0; PerQualif([(x0, y0), (x1, y1), (x2, y2), (x3, y3)]) } } impl PerQualif<bool> { fn encode_to_bits(self) -> u8 { self.0.iter().enumerate().fold(0, |bits, (i, &qualif)| { bits | ((qualif as u8) << i) }) } fn decode_from_bits(bits: u8) -> Self { let mut qualifs = Self::default(); for (i, qualif) in qualifs.0.iter_mut().enumerate() { *qualif = (bits & (1 << i)) != 0; } qualifs } } impl<Q: Qualif, T> Index<Q> for PerQualif<T> { type Output = T; fn index(&self, _: Q) -> &T { &self.0[Q::IDX] } } impl<Q: Qualif, T> IndexMut<Q> for PerQualif<T> { fn index_mut(&mut self, _: Q) -> &mut T { &mut self.0[Q::IDX] } } struct ConstCx<'a, 'tcx> { tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>, mode: Mode, body: &'a Body<'tcx>, per_local: PerQualif<BitSet<Local>>, } impl<'a, 'tcx> ConstCx<'a, 'tcx> { fn is_const_panic_fn(&self, def_id: DefId) -> bool { Some(def_id) == self.tcx.lang_items().panic_fn() || Some(def_id) == self.tcx.lang_items().begin_panic_fn() } } #[derive(Copy, Clone, Debug)] enum ValueSource<'a, 'tcx> { Rvalue(&'a Rvalue<'tcx>), DropAndReplace(&'a Operand<'tcx>), Call { callee: &'a Operand<'tcx>, args: &'a [Operand<'tcx>], return_ty: Ty<'tcx>, }, } /// A "qualif"(-ication) is a way to look for something "bad" in the MIR that would disqualify some /// code for promotion or prevent it from evaluating at compile time. So `return true` means /// "I found something bad, no reason to go on searching". `false` is only returned if we /// definitely cannot find anything bad anywhere. /// /// The default implementations proceed structurally. trait Qualif { const IDX: usize; /// Return the qualification that is (conservatively) correct for any value /// of the type, or `None` if the qualification is not value/type-based. fn in_any_value_of_ty(_cx: &ConstCx<'_, 'tcx>, _ty: Ty<'tcx>) -> Option<bool> { None } /// Return a mask for the qualification, given a type. This is `false` iff /// no value of that type can have the qualification. fn mask_for_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> bool { Self::in_any_value_of_ty(cx, ty).unwrap_or(true) } fn in_local(cx: &ConstCx<'_, '_>, local: Local) -> bool { cx.per_local.0[Self::IDX].contains(local) } fn in_static(_cx: &ConstCx<'_, 'tcx>, _static: &Static<'tcx>) -> bool { // FIXME(eddyb) should we do anything here for value properties? false } fn in_projection_structurally( cx: &ConstCx<'_, 'tcx>, place: PlaceRef<'_, 'tcx>, ) -> bool { let proj = place.projection.as_ref().unwrap(); let base_qualif = Self::in_place(cx, PlaceRef { base: place.base, projection: &proj.base, }); let qualif = base_qualif && Self::mask_for_ty( cx, Place::ty_from(place.base, &proj.base, cx.body, cx.tcx) .projection_ty(cx.tcx, &proj.elem) .ty, ); match proj.elem { ProjectionElem::Deref | ProjectionElem::Subslice { .. } | ProjectionElem::Field(..) | ProjectionElem::ConstantIndex { .. } | ProjectionElem::Downcast(..) => qualif, ProjectionElem::Index(local) => qualif || Self::in_local(cx, local), } } fn in_projection( cx: &ConstCx<'_, 'tcx>, place: PlaceRef<'_, 'tcx>, ) -> bool { Self::in_projection_structurally(cx, place) } fn in_place(cx: &ConstCx<'_, 'tcx>, place: PlaceRef<'_, 'tcx>) -> bool { match place { PlaceRef { base: PlaceBase::Local(local), projection: None, } => Self::in_local(cx, *local), PlaceRef { base: PlaceBase::Static(box Static { kind: StaticKind::Promoted(_), .. }), projection: None, } => bug!("qualifying already promoted MIR"), PlaceRef { base: PlaceBase::Static(static_), projection: None, } => { Self::in_static(cx, static_) }, PlaceRef { base: _, projection: Some(_), } => Self::in_projection(cx, place), } } fn in_operand(cx: &ConstCx<'_, 'tcx>, operand: &Operand<'tcx>) -> bool { match *operand { Operand::Copy(ref place) | Operand::Move(ref place) => Self::in_place(cx, place.as_ref()), Operand::Constant(ref constant) => { if let ConstValue::Unevaluated(def_id, _) = constant.literal.val { // Don't peek inside trait associated constants. if cx.tcx.trait_of_item(def_id).is_some() { Self::in_any_value_of_ty(cx, constant.literal.ty).unwrap_or(false) } else { let (bits, _) = cx.tcx.at(constant.span).mir_const_qualif(def_id); let qualif = PerQualif::decode_from_bits(bits).0[Self::IDX]; // Just in case the type is more specific than // the definition, e.g., impl associated const // with type parameters, take it into account. qualif && Self::mask_for_ty(cx, constant.literal.ty) } } else { false } } } } fn in_rvalue_structurally(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool { match *rvalue { Rvalue::NullaryOp(..) => false, Rvalue::Discriminant(ref place) | Rvalue::Len(ref place) => Self::in_place(cx, place.as_ref()), Rvalue::Use(ref operand) | Rvalue::Repeat(ref operand, _) | Rvalue::UnaryOp(_, ref operand) | Rvalue::Cast(_, ref operand, _) => Self::in_operand(cx, operand), Rvalue::BinaryOp(_, ref lhs, ref rhs) | Rvalue::CheckedBinaryOp(_, ref lhs, ref rhs) => { Self::in_operand(cx, lhs) || Self::in_operand(cx, rhs) } Rvalue::Ref(_, _, ref place) => { // Special-case reborrows to be more like a copy of the reference. if let Some(ref proj) = place.projection { if let ProjectionElem::Deref = proj.elem { let base_ty = Place::ty_from(&place.base, &proj.base, cx.body, cx.tcx).ty; if let ty::Ref(..) = base_ty.sty { return Self::in_place(cx, PlaceRef { base: &place.base, projection: &proj.base, }); } } } Self::in_place(cx, place.as_ref()) } Rvalue::Aggregate(_, ref operands) => { operands.iter().any(|o| Self::in_operand(cx, o)) } } } fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool { Self::in_rvalue_structurally(cx, rvalue) } fn in_call( cx: &ConstCx<'_, 'tcx>, _callee: &Operand<'tcx>, _args: &[Operand<'tcx>], return_ty: Ty<'tcx>, ) -> bool { // Be conservative about the returned value of a const fn. Self::in_any_value_of_ty(cx, return_ty).unwrap_or(false) } fn in_value(cx: &ConstCx<'_, 'tcx>, source: ValueSource<'_, 'tcx>) -> bool { match source { ValueSource::Rvalue(rvalue) => Self::in_rvalue(cx, rvalue), ValueSource::DropAndReplace(source) => Self::in_operand(cx, source), ValueSource::Call { callee, args, return_ty } => { Self::in_call(cx, callee, args, return_ty) } } } } /// Constant containing interior mutability (`UnsafeCell<T>`). /// This must be ruled out to make sure that evaluating the constant at compile-time /// and at *any point* during the run-time would produce the same result. In particular, /// promotion of temporaries must not change program behavior; if the promoted could be /// written to, that would be a problem. struct HasMutInterior; impl Qualif for HasMutInterior { const IDX: usize = 0; fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> { Some(!ty.is_freeze(cx.tcx, cx.param_env, DUMMY_SP)) } fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool { match *rvalue { // Returning `true` for `Rvalue::Ref` indicates the borrow isn't // allowed in constants (and the `Checker` will error), and/or it // won't be promoted, due to `&mut ...` or interior mutability. Rvalue::Ref(_, kind, ref place) => { let ty = place.ty(cx.body, cx.tcx).ty; if let BorrowKind::Mut { .. } = kind { // In theory, any zero-sized value could be borrowed // mutably without consequences. However, only &mut [] // is allowed right now, and only in functions. if cx.mode == Mode::StaticMut { // Inside a `static mut`, &mut [...] is also allowed. match ty.sty { ty::Array(..) | ty::Slice(_) => {} _ => return true, } } else if let ty::Array(_, len) = ty.sty { // FIXME(eddyb) the `cx.mode == Mode::NonConstFn` condition // seems unnecessary, given that this is merely a ZST. match len.try_eval_usize(cx.tcx, cx.param_env) { Some(0) if cx.mode == Mode::NonConstFn => {}, _ => return true, } } else { return true; } } } Rvalue::Aggregate(ref kind, _) => { if let AggregateKind::Adt(def, ..) = **kind { if Some(def.did) == cx.tcx.lang_items().unsafe_cell_type() { let ty = rvalue.ty(cx.body, cx.tcx); assert_eq!(Self::in_any_value_of_ty(cx, ty), Some(true)); return true; } } } _ => {} } Self::in_rvalue_structurally(cx, rvalue) } } /// Constant containing an ADT that implements `Drop`. /// This must be ruled out (a) because we cannot run `Drop` during compile-time /// as that might not be a `const fn`, and (b) because implicit promotion would /// remove side-effects that occur as part of dropping that value. struct NeedsDrop; impl Qualif for NeedsDrop { const IDX: usize = 1; fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> { Some(ty.needs_drop(cx.tcx, cx.param_env)) } fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool { if let Rvalue::Aggregate(ref kind, _) = *rvalue { if let AggregateKind::Adt(def, ..) = **kind { if def.has_dtor(cx.tcx) { return true; } } } Self::in_rvalue_structurally(cx, rvalue) } } /// Not promotable at all - non-`const fn` calls, `asm!`, /// pointer comparisons, ptr-to-int casts, etc. /// Inside a const context all constness rules apply, so promotion simply has to follow the regular /// constant rules (modulo interior mutability or `Drop` rules which are handled `HasMutInterior` /// and `NeedsDrop` respectively). Basically this duplicates the checks that the const-checking /// visitor enforces by emitting errors when working in const context. struct IsNotPromotable; impl Qualif for IsNotPromotable { const IDX: usize = 2; fn in_static(cx: &ConstCx<'_, 'tcx>, static_: &Static<'tcx>) -> bool { match static_.kind { StaticKind::Promoted(_) => unreachable!(), StaticKind::Static(def_id) => { // Only allow statics (not consts) to refer to other statics. let allowed = cx.mode == Mode::Static || cx.mode == Mode::StaticMut; !allowed || cx.tcx.get_attrs(def_id).iter().any( |attr| attr.check_name(sym::thread_local) ) } } } fn in_projection( cx: &ConstCx<'_, 'tcx>, place: PlaceRef<'_, 'tcx>, ) -> bool { let proj = place.projection.as_ref().unwrap(); match proj.elem { ProjectionElem::Deref | ProjectionElem::Downcast(..) => return true, ProjectionElem::ConstantIndex {..} | ProjectionElem::Subslice {..} | ProjectionElem::Index(_) => {} ProjectionElem::Field(..) => { if cx.mode == Mode::NonConstFn { let base_ty = Place::ty_from(place.base, &proj.base, cx.body, cx.tcx).ty; if let Some(def) = base_ty.ty_adt_def() { // No promotion of union field accesses. if def.is_union() { return true; } } } } } Self::in_projection_structurally(cx, place) } fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool { match *rvalue { Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if cx.mode == Mode::NonConstFn => { let operand_ty = operand.ty(cx.body, cx.tcx); let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); match (cast_in, cast_out) { (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) => { // in normal functions, mark such casts as not promotable return true; } _ => {} } } Rvalue::BinaryOp(op, ref lhs, _) if cx.mode == Mode::NonConstFn => { if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.body, cx.tcx).sty { assert!(op == BinOp::Eq || op == BinOp::Ne || op == BinOp::Le || op == BinOp::Lt || op == BinOp::Ge || op == BinOp::Gt || op == BinOp::Offset); // raw pointer operations are not allowed inside promoteds return true; } } Rvalue::NullaryOp(NullOp::Box, _) => return true, _ => {} } Self::in_rvalue_structurally(cx, rvalue) } fn in_call( cx: &ConstCx<'_, 'tcx>, callee: &Operand<'tcx>, args: &[Operand<'tcx>], _return_ty: Ty<'tcx>, ) -> bool { let fn_ty = callee.ty(cx.body, cx.tcx); match fn_ty.sty { ty::FnDef(def_id, _) => { match cx.tcx.fn_sig(def_id).abi() { Abi::RustIntrinsic | Abi::PlatformIntrinsic => { assert!(!cx.tcx.is_const_fn(def_id)); match &cx.tcx.item_name(def_id).as_str()[..] { | "size_of" | "min_align_of" | "needs_drop" | "type_id" | "bswap" | "bitreverse" | "ctpop" | "cttz" | "cttz_nonzero" | "ctlz" | "ctlz_nonzero" | "wrapping_add" | "wrapping_sub" | "wrapping_mul" | "unchecked_shl" | "unchecked_shr" | "rotate_left" | "rotate_right" | "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" | "saturating_add" | "saturating_sub" | "transmute" => return true, _ => {} } } _ => { let is_const_fn = cx.tcx.is_const_fn(def_id) || cx.tcx.is_unstable_const_fn(def_id).is_some() || cx.is_const_panic_fn(def_id); if !is_const_fn { return true; } } } } _ => return true, } Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg)) } } /// Refers to temporaries which cannot be promoted *implicitly*. /// Explicit promotion happens e.g. for constant arguments declared via `rustc_args_required_const`. /// Implicit promotion has almost the same rules, except that disallows `const fn` except for /// those marked `#[rustc_promotable]`. This is to avoid changing a legitimate run-time operation /// into a failing compile-time operation e.g. due to addresses being compared inside the function. struct IsNotImplicitlyPromotable; impl Qualif for IsNotImplicitlyPromotable { const IDX: usize = 3; fn in_call( cx: &ConstCx<'_, 'tcx>, callee: &Operand<'tcx>, args: &[Operand<'tcx>], _return_ty: Ty<'tcx>, ) -> bool { if cx.mode == Mode::NonConstFn { if let ty::FnDef(def_id, _) = callee.ty(cx.body, cx.tcx).sty { // Never promote runtime `const fn` calls of // functions without `#[rustc_promotable]`. if !cx.tcx.is_promotable_const_fn(def_id) { return true; } } } Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg)) } } // Ensure the `IDX` values are sequential (`0..QUALIF_COUNT`). macro_rules! static_assert_seq_qualifs { ($i:expr => $first:ident $(, $rest:ident)*) => { static_assert!({ static_assert_seq_qualifs!($i + 1 => $($rest),*); $first::IDX == $i }); }; ($i:expr =>) => { static_assert!(QUALIF_COUNT == $i); }; } static_assert_seq_qualifs!( 0 => HasMutInterior, NeedsDrop, IsNotPromotable, IsNotImplicitlyPromotable ); impl ConstCx<'_, 'tcx> { fn qualifs_in_any_value_of_ty(&self, ty: Ty<'tcx>) -> PerQualif<bool> { let mut qualifs = PerQualif::default(); qualifs[HasMutInterior] = HasMutInterior::in_any_value_of_ty(self, ty).unwrap_or(false); qualifs[NeedsDrop] = NeedsDrop::in_any_value_of_ty(self, ty).unwrap_or(false); qualifs[IsNotPromotable] = IsNotPromotable::in_any_value_of_ty(self, ty).unwrap_or(false); qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_any_value_of_ty(self, ty).unwrap_or(false); qualifs } fn qualifs_in_local(&self, local: Local) -> PerQualif<bool> { let mut qualifs = PerQualif::default(); qualifs[HasMutInterior] = HasMutInterior::in_local(self, local); qualifs[NeedsDrop] = NeedsDrop::in_local(self, local); qualifs[IsNotPromotable] = IsNotPromotable::in_local(self, local); qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_local(self, local); qualifs } fn qualifs_in_value(&self, source: ValueSource<'_, 'tcx>) -> PerQualif<bool> { let mut qualifs = PerQualif::default(); qualifs[HasMutInterior] = HasMutInterior::in_value(self, source); qualifs[NeedsDrop] = NeedsDrop::in_value(self, source); qualifs[IsNotPromotable] = IsNotPromotable::in_value(self, source); qualifs[IsNotImplicitlyPromotable] = IsNotImplicitlyPromotable::in_value(self, source); qualifs } } /// Checks MIR for being admissible as a compile-time constant, using `ConstCx` /// for value qualifications, and accumulates writes of /// rvalue/call results to locals, in `local_qualif`. /// It also records candidates for promotion in `promotion_candidates`, /// both in functions and const/static items. struct Checker<'a, 'tcx> { cx: ConstCx<'a, 'tcx>, span: Span, def_id: DefId, rpo: ReversePostorder<'a, 'tcx>, temp_promotion_state: IndexVec<Local, TempState>, promotion_candidates: Vec<Candidate>, } macro_rules! unleash_miri { ($this:expr) => {{ if $this.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { $this.tcx.sess.span_warn($this.span, "skipping const checks"); return; } }} } impl Deref for Checker<'a, 'tcx> { type Target = ConstCx<'a, 'tcx>; fn deref(&self) -> &Self::Target { &self.cx } } impl<'a, 'tcx> Checker<'a, 'tcx> { fn new(tcx: TyCtxt<'tcx>, def_id: DefId, body: &'a Body<'tcx>, mode: Mode) -> Self { assert!(def_id.is_local()); let mut rpo = traversal::reverse_postorder(body); let temps = promote_consts::collect_temps(body, &mut rpo); rpo.reset(); let param_env = tcx.param_env(def_id); let mut cx = ConstCx { tcx, param_env, mode, body, per_local: PerQualif::new(BitSet::new_empty(body.local_decls.len())), }; for (local, decl) in body.local_decls.iter_enumerated() { if let LocalKind::Arg = body.local_kind(local) { let qualifs = cx.qualifs_in_any_value_of_ty(decl.ty); for (per_local, qualif) in &mut cx.per_local.as_mut().zip(qualifs).0 { if *qualif { per_local.insert(local); } } } if !temps[local].is_promotable() { cx.per_local[IsNotPromotable].insert(local); } if let LocalKind::Var = body.local_kind(local) { // Sanity check to prevent implicit and explicit promotion of // named locals assert!(cx.per_local[IsNotPromotable].contains(local)); } } Checker { cx, span: body.span, def_id, rpo, temp_promotion_state: temps, promotion_candidates: vec![] } } // FIXME(eddyb) we could split the errors into meaningful // categories, but enabling full miri would make that // slightly pointless (even with feature-gating). fn not_const(&mut self) { unleash_miri!(self); if self.mode.requires_const_checking() { let mut err = struct_span_err!( self.tcx.sess, self.span, E0019, "{} contains unimplemented expression type", self.mode ); if self.tcx.sess.teach(&err.get_code().unwrap()) { err.note("A function call isn't allowed in the const's initialization expression \ because the expression's value must be known at compile-time."); err.note("Remember: you can't use a function call inside a const's initialization \ expression! However, you can use it anywhere else."); } err.emit(); } } /// Assigns an rvalue/call qualification to the given destination. fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location: Location) { trace!("assign: {:?} <- {:?}", dest, source); let mut qualifs = self.qualifs_in_value(source); match source { ValueSource::Rvalue(&Rvalue::Ref(_, kind, ref place)) => { // Getting `true` from `HasMutInterior::in_rvalue` means // the borrowed place is disallowed from being borrowed, // due to either a mutable borrow (with some exceptions), // or an shared borrow of a value with interior mutability. // Then `HasMutInterior` is replaced with `IsNotPromotable`, // to avoid duplicate errors (e.g. from reborrowing). if qualifs[HasMutInterior] { qualifs[HasMutInterior] = false; qualifs[IsNotPromotable] = true; if self.mode.requires_const_checking() { if !self.tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { if let BorrowKind::Mut { .. } = kind { let mut err = struct_span_err!(self.tcx.sess, self.span, E0017, "references in {}s may only refer \ to immutable values", self.mode); err.span_label(self.span, format!("{}s require immutable values", self.mode)); if self.tcx.sess.teach(&err.get_code().unwrap()) { err.note("References in statics and constants may only refer \ to immutable values.\n\n\ Statics are shared everywhere, and if they refer to \ mutable data one might violate memory safety since \ holding multiple mutable references to shared data \ is not allowed.\n\n\ If you really want global mutable state, try using \ static mut or a global UnsafeCell."); } err.emit(); } else { span_err!(self.tcx.sess, self.span, E0492, "cannot borrow a constant which may contain \ interior mutability, create a static instead"); } } } } else if let BorrowKind::Mut { .. } | BorrowKind::Shared = kind { // Don't promote BorrowKind::Shallow borrows, as they don't // reach codegen. // We might have a candidate for promotion. let candidate = Candidate::Ref(location); // Start by traversing to the "base", with non-deref projections removed. let mut place_projection = &place.projection; while let Some(proj) = place_projection { if proj.elem == ProjectionElem::Deref { break; } place_projection = &proj.base; } debug!( "qualify_consts: promotion candidate: place={:?} {:?}", place.base, place_projection ); // We can only promote interior borrows of promotable temps (non-temps // don't get promoted anyway). // (If we bailed out of the loop due to a `Deref` above, we will definitely // not enter the conditional here.) if let (PlaceBase::Local(local), None) = (&place.base, place_projection) { if self.body.local_kind(*local) == LocalKind::Temp { debug!("qualify_consts: promotion candidate: local={:?}", local); // The borrowed place doesn't have `HasMutInterior` // (from `in_rvalue`), so we can safely ignore // `HasMutInterior` from the local's qualifications. // This allows borrowing fields which don't have // `HasMutInterior`, from a type that does, e.g.: // `let _: &'static _ = &(Cell::new(1), 2).1;` let mut local_qualifs = self.qualifs_in_local(*local); // Any qualifications, except HasMutInterior (see above), disqualify // from promotion. // This is, in particular, the "implicit promotion" version of // the check making sure that we don't run drop glue during const-eval. local_qualifs[HasMutInterior] = false; if !local_qualifs.0.iter().any(|&qualif| qualif) { debug!("qualify_consts: promotion candidate: {:?}", candidate); self.promotion_candidates.push(candidate); } } } } }, ValueSource::Rvalue(&Rvalue::Repeat(ref operand, _)) => { let candidate = Candidate::Repeat(location); let not_promotable = IsNotImplicitlyPromotable::in_operand(self, operand) || IsNotPromotable::in_operand(self, operand); debug!("assign: self.def_id={:?} operand={:?}", self.def_id, operand); if !not_promotable && self.tcx.features().const_in_array_repeat_expressions { debug!("assign: candidate={:?}", candidate); self.promotion_candidates.push(candidate); } }, _ => {}, } let mut dest_projection = &dest.projection; let index = loop { match (&dest.base, dest_projection) { // We treat all locals equal in constants (&PlaceBase::Local(index), None) => break index, // projections are transparent for assignments // we qualify the entire destination at once, even if just a field would have // stricter qualification (base, Some(proj)) => { // Catch more errors in the destination. `visit_place` also checks various // projection rules like union field access and raw pointer deref let context = PlaceContext::MutatingUse(MutatingUseContext::Store); self.visit_place_base(base, context, location); self.visit_projection(base, proj, context, location); dest_projection = &proj.base; }, (&PlaceBase::Static(box Static { kind: StaticKind::Promoted(_), .. }), None) => bug!("promoteds don't exist yet during promotion"), (&PlaceBase::Static(box Static{ kind: _, .. }), None) => { // Catch more errors in the destination. `visit_place` also checks that we // do not try to access statics from constants or try to mutate statics let context = PlaceContext::MutatingUse(MutatingUseContext::Store); self.visit_place_base(&dest.base, context, location); return; } } }; let kind = self.body.local_kind(index); debug!("store to {:?} {:?}", kind, index); // Only handle promotable temps in non-const functions. if self.mode == Mode::NonConstFn { if kind != LocalKind::Temp || !self.temp_promotion_state[index].is_promotable() { return; } } // this is overly restrictive, because even full assignments do not clear the qualif // While we could special case full assignments, this would be inconsistent with // aggregates where we overwrite all fields via assignments, which would not get // that feature. for (per_local, qualif) in &mut self.cx.per_local.as_mut().zip(qualifs).0 { if *qualif { per_local.insert(index); } } // Ensure the `IsNotPromotable` qualification is preserved. // NOTE(eddyb) this is actually unnecessary right now, as // we never replace the local's qualif, but we might in // the future, and so it serves to catch changes that unset // important bits (in which case, asserting `contains` could // be replaced with calling `insert` to re-set the bit). if kind == LocalKind::Temp { if !self.temp_promotion_state[index].is_promotable() { assert!(self.cx.per_local[IsNotPromotable].contains(index)); } } } /// Check a whole const, static initializer or const fn. fn check_const(&mut self) -> (u8, &'tcx BitSet<Local>) { debug!("const-checking {} {:?}", self.mode, self.def_id); let body = self.body; let mut seen_blocks = BitSet::new_empty(body.basic_blocks().len()); let mut bb = START_BLOCK; loop { seen_blocks.insert(bb.index()); self.visit_basic_block_data(bb, &body[bb]); let target = match body[bb].terminator().kind { TerminatorKind::Goto { target } | TerminatorKind::FalseUnwind { real_target: target, .. } | TerminatorKind::Drop { target, .. } | TerminatorKind::DropAndReplace { target, .. } | TerminatorKind::Assert { target, .. } | TerminatorKind::Call { destination: Some((_, target)), .. } => { Some(target) } // Non-terminating calls cannot produce any value. TerminatorKind::Call { destination: None, .. } => { break; } TerminatorKind::SwitchInt {..} | TerminatorKind::Resume | TerminatorKind::Abort | TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } | TerminatorKind::Unreachable | TerminatorKind::FalseEdges { .. } => None, TerminatorKind::Return => { break; } }; match target { // No loops allowed. Some(target) if !seen_blocks.contains(target.index()) => { bb = target; } _ => { self.not_const(); break; } } } // Collect all the temps we need to promote. let mut promoted_temps = BitSet::new_empty(self.temp_promotion_state.len()); debug!("qualify_const: promotion_candidates={:?}", self.promotion_candidates); for candidate in &self.promotion_candidates { match *candidate { Candidate::Repeat(Location { block: bb, statement_index: stmt_idx }) => { if let StatementKind::Assign(_, box Rvalue::Repeat( Operand::Move(Place { base: PlaceBase::Local(index), projection: None, }), _ )) = self.body[bb].statements[stmt_idx].kind { promoted_temps.insert(index); } } Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => { if let StatementKind::Assign( _, box Rvalue::Ref(_, _, Place { base: PlaceBase::Local(index), projection: None, }) ) = self.body[bb].statements[stmt_idx].kind { promoted_temps.insert(index); } } Candidate::Argument { .. } => {} } } let mut qualifs = self.qualifs_in_local(RETURN_PLACE); // Account for errors in consts by using the // conservative type qualification instead. if qualifs[IsNotPromotable] { qualifs = self.qualifs_in_any_value_of_ty(body.return_ty()); } (qualifs.encode_to_bits(), self.tcx.arena.alloc(promoted_temps)) } } impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { fn visit_place_base( &mut self, place_base: &PlaceBase<'tcx>, context: PlaceContext, location: Location, ) { self.super_place_base(place_base, context, location); match place_base { PlaceBase::Local(_) => {} PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. }) => { unreachable!() } PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. }) => { if self.tcx .get_attrs(*def_id) .iter() .any(|attr| attr.check_name(sym::thread_local)) { if self.mode.requires_const_checking() { span_err!(self.tcx.sess, self.span, E0625, "thread-local statics cannot be \ accessed at compile-time"); } return; } // Only allow statics (not consts) to refer to other statics. if self.mode == Mode::Static || self.mode == Mode::StaticMut { if self.mode == Mode::Static && context.is_mutating_use() { // this is not strictly necessary as miri will also bail out // For interior mutability we can't really catch this statically as that // goes through raw pointers and intermediate temporaries, so miri has // to catch this anyway self.tcx.sess.span_err( self.span, "cannot mutate statics in the initializer of another static", ); } return; } unleash_miri!(self); if self.mode.requires_const_checking() { let mut err = struct_span_err!(self.tcx.sess, self.span, E0013, "{}s cannot refer to statics, use \ a constant instead", self.mode); if self.tcx.sess.teach(&err.get_code().unwrap()) { err.note( "Static and const variables can refer to other const variables. \ But a const variable cannot refer to a static variable." ); err.help( "To fix this, the value can be extracted as a const and then used." ); } err.emit() } } } } fn visit_projection( &mut self, place_base: &PlaceBase<'tcx>, proj: &Projection<'tcx>, context: PlaceContext, location: Location, ) { debug!( "visit_place_projection: proj={:?} context={:?} location={:?}", proj, context, location, ); self.super_projection(place_base, proj, context, location); match proj.elem { ProjectionElem::Deref => { if context.is_mutating_use() { // `not_const` errors out in const contexts self.not_const() } let base_ty = Place::ty_from(place_base, &proj.base, self.body, self.tcx).ty; match self.mode { Mode::NonConstFn => {}, _ => { if let ty::RawPtr(_) = base_ty.sty { if !self.tcx.features().const_raw_ptr_deref { emit_feature_err( &self.tcx.sess.parse_sess, sym::const_raw_ptr_deref, self.span, GateIssue::Language, &format!( "dereferencing raw pointers in {}s is unstable", self.mode, ), ); } } } } } ProjectionElem::ConstantIndex {..} | ProjectionElem::Subslice {..} | ProjectionElem::Field(..) | ProjectionElem::Index(_) => { let base_ty = Place::ty_from(place_base, &proj.base, self.body, self.tcx).ty; if let Some(def) = base_ty.ty_adt_def() { if def.is_union() { match self.mode { Mode::ConstFn => { if !self.tcx.features().const_fn_union { emit_feature_err( &self.tcx.sess.parse_sess, sym::const_fn_union, self.span, GateIssue::Language, "unions in const fn are unstable", ); } }, | Mode::NonConstFn | Mode::Static | Mode::StaticMut | Mode::Const => {}, } } } } ProjectionElem::Downcast(..) => { self.not_const() } } } fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) { debug!("visit_operand: operand={:?} location={:?}", operand, location); self.super_operand(operand, location); match *operand { Operand::Move(ref place) => { // Mark the consumed locals to indicate later drops are noops. if let Place { base: PlaceBase::Local(local), projection: None, } = *place { self.cx.per_local[NeedsDrop].remove(local); } } Operand::Copy(_) | Operand::Constant(_) => {} } } fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { debug!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location); // Check nested operands and places. if let Rvalue::Ref(_, kind, ref place) = *rvalue { // Special-case reborrows. let mut reborrow_place = None; if let Some(ref proj) = place.projection { if let ProjectionElem::Deref = proj.elem { let base_ty = Place::ty_from(&place.base, &proj.base, self.body, self.tcx).ty; if let ty::Ref(..) = base_ty.sty { reborrow_place = Some(&proj.base); } } } if let Some(proj) = reborrow_place { let ctx = match kind { BorrowKind::Shared => PlaceContext::NonMutatingUse( NonMutatingUseContext::SharedBorrow, ), BorrowKind::Shallow => PlaceContext::NonMutatingUse( NonMutatingUseContext::ShallowBorrow, ), BorrowKind::Unique => PlaceContext::NonMutatingUse( NonMutatingUseContext::UniqueBorrow, ), BorrowKind::Mut { .. } => PlaceContext::MutatingUse( MutatingUseContext::Borrow, ), }; self.visit_place_base(&place.base, ctx, location); if let Some(proj) = proj { self.visit_projection(&place.base, proj, ctx, location); } } else { self.super_rvalue(rvalue, location); } } else { self.super_rvalue(rvalue, location); } match *rvalue { Rvalue::Use(_) | Rvalue::Repeat(..) | Rvalue::UnaryOp(UnOp::Neg, _) | Rvalue::UnaryOp(UnOp::Not, _) | Rvalue::NullaryOp(NullOp::SizeOf, _) | Rvalue::CheckedBinaryOp(..) | Rvalue::Cast(CastKind::Pointer(_), ..) | Rvalue::Discriminant(..) | Rvalue::Len(_) | Rvalue::Ref(..) | Rvalue::Aggregate(..) => {} Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => { let operand_ty = operand.ty(self.body, self.tcx); let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); match (cast_in, cast_out) { (CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) if self.mode != Mode::NonConstFn => { unleash_miri!(self); if !self.tcx.features().const_raw_ptr_to_usize_cast { // in const fn and constants require the feature gate // FIXME: make it unsafe inside const fn and constants emit_feature_err( &self.tcx.sess.parse_sess, sym::const_raw_ptr_to_usize_cast, self.span, GateIssue::Language, &format!( "casting pointers to integers in {}s is unstable", self.mode, ), ); } } _ => {} } } Rvalue::BinaryOp(op, ref lhs, _) => { if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(self.body, self.tcx).sty { assert!(op == BinOp::Eq || op == BinOp::Ne || op == BinOp::Le || op == BinOp::Lt || op == BinOp::Ge || op == BinOp::Gt || op == BinOp::Offset); unleash_miri!(self); if self.mode.requires_const_checking() && !self.tcx.features().const_compare_raw_pointers { // require the feature gate inside constants and const fn // FIXME: make it unsafe to use these operations emit_feature_err( &self.tcx.sess.parse_sess, sym::const_compare_raw_pointers, self.span, GateIssue::Language, &format!("comparing raw pointers inside {}", self.mode), ); } } } Rvalue::NullaryOp(NullOp::Box, _) => { unleash_miri!(self); if self.mode.requires_const_checking() { let mut err = struct_span_err!(self.tcx.sess, self.span, E0010, "allocations are not allowed in {}s", self.mode); err.span_label(self.span, format!("allocation not allowed in {}s", self.mode)); if self.tcx.sess.teach(&err.get_code().unwrap()) { err.note( "The value of statics and constants must be known at compile time, \ and they live for the entire lifetime of a program. Creating a boxed \ value allocates memory on the heap at runtime, and therefore cannot \ be done at compile time." ); } err.emit(); } } } } fn visit_terminator_kind(&mut self, kind: &TerminatorKind<'tcx>, location: Location) { debug!("visit_terminator_kind: kind={:?} location={:?}", kind, location); if let TerminatorKind::Call { ref func, ref args, ref destination, .. } = *kind { if let Some((ref dest, _)) = *destination { self.assign(dest, ValueSource::Call { callee: func, args, return_ty: dest.ty(self.body, self.tcx).ty, }, location); } let fn_ty = func.ty(self.body, self.tcx); let mut callee_def_id = None; let mut is_shuffle = false; match fn_ty.sty { ty::FnDef(def_id, _) => { callee_def_id = Some(def_id); match self.tcx.fn_sig(def_id).abi() { Abi::RustIntrinsic | Abi::PlatformIntrinsic => { assert!(!self.tcx.is_const_fn(def_id)); match &self.tcx.item_name(def_id).as_str()[..] { // special intrinsic that can be called diretly without an intrinsic // feature gate needs a language feature gate "transmute" => { if self.mode.requires_const_checking() { // const eval transmute calls only with the feature gate if !self.tcx.features().const_transmute { emit_feature_err( &self.tcx.sess.parse_sess, sym::const_transmute, self.span, GateIssue::Language, &format!("The use of std::mem::transmute() \ is gated in {}s", self.mode)); } } } name if name.starts_with("simd_shuffle") => { is_shuffle = true; } // no need to check feature gates, intrinsics are only callable // from the libstd or with forever unstable feature gates _ => {} } } _ => { // In normal functions no calls are feature-gated. if self.mode.requires_const_checking() { let unleash_miri = self .tcx .sess .opts .debugging_opts .unleash_the_miri_inside_of_you; if self.tcx.is_const_fn(def_id) || unleash_miri { // stable const fns or unstable const fns // with their feature gate active // FIXME(eddyb) move stability checks from `is_const_fn` here. } else if self.is_const_panic_fn(def_id) { // Check the const_panic feature gate. // FIXME: cannot allow this inside `allow_internal_unstable` // because that would make `panic!` insta stable in constants, // since the macro is marked with the attribute. if !self.tcx.features().const_panic { // Don't allow panics in constants without the feature gate. emit_feature_err( &self.tcx.sess.parse_sess, sym::const_panic, self.span, GateIssue::Language, &format!("panicking in {}s is unstable", self.mode), ); } } else if let Some(feature) = self.tcx.is_unstable_const_fn(def_id) { // Check `#[unstable]` const fns or `#[rustc_const_unstable]` // functions without the feature gate active in this crate in // order to report a better error message than the one below. if !self.span.allows_unstable(feature) { let mut err = self.tcx.sess.struct_span_err(self.span, &format!("`{}` is not yet stable as a const fn", self.tcx.def_path_str(def_id))); if nightly_options::is_nightly_build() { help!(&mut err, "add `#![feature({})]` to the \ crate attributes to enable", feature); } err.emit(); } } else { let mut err = struct_span_err!( self.tcx.sess, self.span, E0015, "calls in {}s are limited to constant functions, \ tuple structs and tuple variants", self.mode, ); err.emit(); } } } } } ty::FnPtr(_) => { if self.mode.requires_const_checking() { let mut err = self.tcx.sess.struct_span_err( self.span, &format!("function pointers are not allowed in const fn")); err.emit(); } } _ => { self.not_const(); } } // No need to do anything in constants and statics, as everything is "constant" anyway // so promotion would be useless. if self.mode != Mode::Static && self.mode != Mode::Const { let constant_args = callee_def_id.and_then(|id| { args_required_const(self.tcx, id) }).unwrap_or_default(); for (i, arg) in args.iter().enumerate() { if !(is_shuffle && i == 2 || constant_args.contains(&i)) { continue; } let candidate = Candidate::Argument { bb: location.block, index: i }; // Since the argument is required to be constant, // we care about constness, not promotability. // If we checked for promotability, we'd miss out on // the results of function calls (which are never promoted // in runtime code). // This is not a problem, because the argument explicitly // requests constness, in contrast to regular promotion // which happens even without the user requesting it. // We can error out with a hard error if the argument is not // constant here. if !IsNotPromotable::in_operand(self, arg) { debug!("visit_terminator_kind: candidate={:?}", candidate); self.promotion_candidates.push(candidate); } else { if is_shuffle { span_err!(self.tcx.sess, self.span, E0526, "shuffle indices are not constant"); } else { self.tcx.sess.span_err(self.span, &format!("argument {} is required to be a constant", i + 1)); } } } } // Check callee and argument operands. self.visit_operand(func, location); for arg in args { self.visit_operand(arg, location); } } else if let TerminatorKind::Drop { location: ref place, .. } | TerminatorKind::DropAndReplace { location: ref place, .. } = *kind { match *kind { TerminatorKind::DropAndReplace { .. } => {} _ => self.super_terminator_kind(kind, location), } // Deny *any* live drops anywhere other than functions. if self.mode.requires_const_checking() { unleash_miri!(self); // HACK(eddyb): emulate a bit of dataflow analysis, // conservatively, that drop elaboration will do. let needs_drop = if let Place { base: PlaceBase::Local(local), projection: None, } = *place { if NeedsDrop::in_local(self, local) { Some(self.body.local_decls[local].source_info.span) } else { None } } else { Some(self.span) }; if let Some(span) = needs_drop { // Double-check the type being dropped, to minimize false positives. let ty = place.ty(self.body, self.tcx).ty; if ty.needs_drop(self.tcx, self.param_env) { struct_span_err!(self.tcx.sess, span, E0493, "destructors cannot be evaluated at compile-time") .span_label(span, format!("{}s cannot evaluate destructors", self.mode)) .emit(); } } } match *kind { TerminatorKind::DropAndReplace { ref value, .. } => { self.assign(place, ValueSource::DropAndReplace(value), location); self.visit_operand(value, location); } _ => {} } } else { // Qualify any operands inside other terminators. self.super_terminator_kind(kind, location); } } fn visit_assign(&mut self, dest: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) { debug!("visit_assign: dest={:?} rvalue={:?} location={:?}", dest, rvalue, location); self.assign(dest, ValueSource::Rvalue(rvalue), location); self.visit_rvalue(rvalue, location); } fn visit_source_info(&mut self, source_info: &SourceInfo) { debug!("visit_source_info: source_info={:?}", source_info); self.span = source_info.span; } fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { debug!("visit_statement: statement={:?} location={:?}", statement, location); match statement.kind { StatementKind::Assign(..) => { self.super_statement(statement, location); } StatementKind::FakeRead(FakeReadCause::ForMatchedPlace, _) => { self.not_const(); } // FIXME(eddyb) should these really do nothing? StatementKind::FakeRead(..) | StatementKind::SetDiscriminant { .. } | StatementKind::StorageLive(_) | StatementKind::StorageDead(_) | StatementKind::InlineAsm {..} | StatementKind::Retag { .. } | StatementKind::AscribeUserType(..) | StatementKind::Nop => {} } } } pub fn provide(providers: &mut Providers<'_>) { *providers = Providers { mir_const_qualif, ..*providers }; } fn mir_const_qualif(tcx: TyCtxt<'_>, def_id: DefId) -> (u8, &BitSet<Local>) { // N.B., this `borrow()` is guaranteed to be valid (i.e., the value // cannot yet be stolen), because `mir_validated()`, which steals // from `mir_const(), forces this query to execute before // performing the steal. let body = &tcx.mir_const(def_id).borrow(); if body.return_ty().references_error() { tcx.sess.delay_span_bug(body.span, "mir_const_qualif: MIR had errors"); return (1 << IsNotPromotable::IDX, tcx.arena.alloc(BitSet::new_empty(0))); } Checker::new(tcx, def_id, body, Mode::Const).check_const() } pub struct QualifyAndPromoteConstants; impl MirPass for QualifyAndPromoteConstants { fn run_pass<'tcx>(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) { // There's not really any point in promoting errorful MIR. if body.return_ty().references_error() { tcx.sess.delay_span_bug(body.span, "QualifyAndPromoteConstants: MIR had errors"); return; } if src.promoted.is_some() { return; } let def_id = src.def_id(); let id = tcx.hir().as_local_hir_id(def_id).unwrap(); let mut const_promoted_temps = None; let mode = match tcx.hir().body_owner_kind(id) { hir::BodyOwnerKind::Closure => Mode::NonConstFn, hir::BodyOwnerKind::Fn => { if tcx.is_const_fn(def_id) { Mode::ConstFn } else { Mode::NonConstFn } } hir::BodyOwnerKind::Const => { const_promoted_temps = Some(tcx.mir_const_qualif(def_id).1); Mode::Const } hir::BodyOwnerKind::Static(hir::MutImmutable) => Mode::Static, hir::BodyOwnerKind::Static(hir::MutMutable) => Mode::StaticMut, }; debug!("run_pass: mode={:?}", mode); if mode == Mode::NonConstFn || mode == Mode::ConstFn { // This is ugly because Checker holds onto mir, // which can't be mutated until its scope ends. let (temps, candidates) = { let mut checker = Checker::new(tcx, def_id, body, mode); if mode == Mode::ConstFn { if tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you { checker.check_const(); } else if tcx.is_min_const_fn(def_id) { // enforce `min_const_fn` for stable const fns use super::qualify_min_const_fn::is_min_const_fn; if let Err((span, err)) = is_min_const_fn(tcx, def_id, body) { let mut diag = struct_span_err!( tcx.sess, span, E0723, "{}", err, ); diag.note("for more information, see issue \ https://github.com/rust-lang/rust/issues/57563"); diag.help( "add `#![feature(const_fn)]` to the crate attributes to enable", ); diag.emit(); } else { // this should not produce any errors, but better safe than sorry // FIXME(#53819) checker.check_const(); } } else { // Enforce a constant-like CFG for `const fn`. checker.check_const(); } } else { while let Some((bb, data)) = checker.rpo.next() { checker.visit_basic_block_data(bb, data); } } (checker.temp_promotion_state, checker.promotion_candidates) }; // Do the actual promotion, now that we know what's viable. promote_consts::promote_candidates(body, tcx, temps, candidates); } else { if !body.control_flow_destroyed.is_empty() { let mut locals = body.vars_iter(); if let Some(local) = locals.next() { let span = body.local_decls[local].source_info.span; let mut error = tcx.sess.struct_span_err( span, &format!( "new features like let bindings are not permitted in {}s \ which also use short circuiting operators", mode, ), ); for (span, kind) in body.control_flow_destroyed.iter() { error.span_note( *span, &format!("use of {} here does not actually short circuit due to \ the const evaluator presently not being able to do control flow. \ See https://github.com/rust-lang/rust/issues/49146 for more \ information.", kind), ); } for local in locals { let span = body.local_decls[local].source_info.span; error.span_note( span, "more locals defined here", ); } error.emit(); } } let promoted_temps = if mode == Mode::Const { // Already computed by `mir_const_qualif`. const_promoted_temps.unwrap() } else { Checker::new(tcx, def_id, body, mode).check_const().1 }; // In `const` and `static` everything without `StorageDead` // is `'static`, we don't have to create promoted MIR fragments, // just remove `Drop` and `StorageDead` on "promoted" locals. debug!("run_pass: promoted_temps={:?}", promoted_temps); for block in body.basic_blocks_mut() { block.statements.retain(|statement| { match statement.kind { StatementKind::StorageDead(index) => { !promoted_temps.contains(index) } _ => true } }); let terminator = block.terminator_mut(); match terminator.kind { TerminatorKind::Drop { location: Place { base: PlaceBase::Local(index), projection: None, }, target, .. } => { if promoted_temps.contains(index) { terminator.kind = TerminatorKind::Goto { target, }; } } _ => {} } } } // Statics must be Sync. if mode == Mode::Static { // `#[thread_local]` statics don't have to be `Sync`. for attr in &tcx.get_attrs(def_id)[..] { if attr.check_name(sym::thread_local) { return; } } let ty = body.return_ty(); tcx.infer_ctxt().enter(|infcx| { let param_env = ty::ParamEnv::empty(); let cause = traits::ObligationCause::new(body.span, id, traits::SharedStatic); let mut fulfillment_cx = traits::FulfillmentContext::new(); fulfillment_cx.register_bound(&infcx, param_env, ty, tcx.require_lang_item(lang_items::SyncTraitLangItem), cause); if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) { infcx.report_fulfillment_errors(&err, None, false); } }); } } } fn args_required_const(tcx: TyCtxt<'_>, def_id: DefId) -> Option<FxHashSet<usize>> { let attrs = tcx.get_attrs(def_id); let attr = attrs.iter().find(|a| a.check_name(sym::rustc_args_required_const))?; let mut ret = FxHashSet::default(); for meta in attr.meta_item_list()? { match meta.literal()?.node { LitKind::Int(a, _) => { ret.insert(a as usize); } _ => return None, } } Some(ret) }
42.200113
100
0.471152
2126a6141bbb85a7f14d59bfeb92b1eeb66daa13
7,969
#![cfg(all(test, feature = "test_e2e"))] use azure_core::Context; use azure_cosmos::prelude::{CreateDocumentOptions, GetDocumentOptions}; use serde::{Deserialize, Serialize}; mod setup; use azure_core::prelude::*; use azure_cosmos::prelude::*; use collection::*; #[derive(Serialize, Deserialize, Debug, PartialEq)] struct MyDocument { id: String, hello: u32, } impl<'a> azure_cosmos::CosmosEntity<'a> for MyDocument { type Entity = &'a str; fn partition_key(&'a self) -> Self::Entity { self.id.as_ref() } } #[tokio::test] async fn create_and_delete_document() { const DATABASE_NAME: &str = "test-cosmos-db-create-and-delete-document"; const COLLECTION_NAME: &str = "test-collection-create-and-delete-document"; const DOCUMENT_NAME: &str = "test-document-name-create-and-delete-document"; let client = setup::initialize().unwrap(); client .create_database( azure_core::Context::new(), DATABASE_NAME, CreateDatabaseOptions::new(), ) .await .unwrap(); let database_client = client.into_database_client(DATABASE_NAME); // create a new collection let indexing_policy = IndexingPolicy { automatic: true, indexing_mode: IndexingMode::Consistent, included_paths: vec![], excluded_paths: vec![], }; let options = CreateCollectionOptions::new("/id") .offer(Offer::Throughput(400)) .indexing_policy(indexing_policy); database_client .create_collection(Context::new(), COLLECTION_NAME, options) .await .unwrap(); let collection_client = database_client .clone() .into_collection_client(COLLECTION_NAME); // create a new document let document_data = MyDocument { id: DOCUMENT_NAME.to_owned(), hello: 42, }; collection_client .create_document(Context::new(), &document_data, CreateDocumentOptions::new()) .await .unwrap(); let documents = collection_client .list_documents() .execute::<MyDocument>() .await .unwrap() .documents; assert!(documents.len() == 1); // try to get the contents of the previously created document let document_client = collection_client .clone() .into_document_client(DOCUMENT_NAME, &DOCUMENT_NAME) .unwrap(); let document_after_get = document_client .get_document::<MyDocument>(Context::new(), GetDocumentOptions::new()) .await .unwrap(); if let GetDocumentResponse::Found(document) = document_after_get { assert_eq!(document.document.document, document_data); } else { panic!("document not found"); } // delete document document_client.delete_document().execute().await.unwrap(); let documents = collection_client .list_documents() .execute::<MyDocument>() .await .unwrap() .documents; assert!(documents.len() == 0); database_client.delete_database().execute().await.unwrap(); } #[tokio::test] async fn query_documents() { const DATABASE_NAME: &str = "test-cosmos-db-query-documents"; const COLLECTION_NAME: &str = "test-collection-query-documents"; const DOCUMENT_NAME: &str = "test-document-name-query-documents"; let client = setup::initialize().unwrap(); client .create_database( azure_core::Context::new(), DATABASE_NAME, CreateDatabaseOptions::new(), ) .await .unwrap(); let database_client = client.into_database_client(DATABASE_NAME); // create a new collection let indexing_policy = IndexingPolicy { automatic: true, indexing_mode: IndexingMode::Consistent, included_paths: vec![], excluded_paths: vec![], }; let options = CreateCollectionOptions::new("/id") .indexing_policy(indexing_policy) .offer(Offer::S2); database_client .create_collection(Context::new(), COLLECTION_NAME, options) .await .unwrap(); let collection_client = database_client .clone() .into_collection_client(COLLECTION_NAME); // create a new document let document_data = MyDocument { id: DOCUMENT_NAME.to_owned(), hello: 42, }; collection_client .create_document(Context::new(), &document_data, CreateDocumentOptions::new()) .await .unwrap(); let documents = collection_client .list_documents() .execute::<MyDocument>() .await .unwrap() .documents; assert!(documents.len() == 1); // now query all documents and see if we get the correct result let query_result = collection_client .query_documents() .query_cross_partition(true) .execute::<MyDocument, _>("SELECT * FROM c") .await .unwrap() .into_documents() .unwrap() .results; assert!(query_result.len() == 1); assert!(query_result[0].document_attributes.rid() == documents[0].document_attributes.rid()); assert_eq!(query_result[0].result, document_data); database_client.delete_database().execute().await.unwrap(); } #[tokio::test] async fn replace_document() { const DATABASE_NAME: &str = "test-cosmos-db-replace-documents"; const COLLECTION_NAME: &str = "test-collection-replace-documents"; const DOCUMENT_NAME: &str = "test-document-name-replace-documents"; let client = setup::initialize().unwrap(); client .create_database( azure_core::Context::new(), DATABASE_NAME, CreateDatabaseOptions::new(), ) .await .unwrap(); let database_client = client.into_database_client(DATABASE_NAME); // create a new collection let indexing_policy = IndexingPolicy { automatic: true, indexing_mode: IndexingMode::Consistent, included_paths: vec![], excluded_paths: vec![], }; let options = CreateCollectionOptions::new("/id") .indexing_policy(indexing_policy) .offer(Offer::S2); database_client .create_collection(Context::new(), COLLECTION_NAME, options) .await .unwrap(); let collection_client = database_client .clone() .into_collection_client(COLLECTION_NAME); // create a new document let mut document_data = MyDocument { id: DOCUMENT_NAME.to_owned(), hello: 42, }; collection_client .create_document(Context::new(), &document_data, CreateDocumentOptions::new()) .await .unwrap(); let documents = collection_client .list_documents() .execute::<MyDocument>() .await .unwrap(); assert!(documents.documents.len() == 1); // replace document with optimistic concurrency and session token document_data.hello = 190; collection_client .clone() .into_document_client(document_data.id.clone(), &document_data.id) .unwrap() .replace_document() .consistency_level(ConsistencyLevel::from(&documents)) .if_match_condition(IfMatchCondition::Match( &documents.documents[0].document_attributes.etag(), )) .execute(&document_data) .await .unwrap(); // now get the replaced document let document_client = collection_client .into_document_client(DOCUMENT_NAME, &DOCUMENT_NAME) .unwrap(); let document_after_get = document_client .get_document::<MyDocument>(Context::new(), GetDocumentOptions::new()) .await .unwrap(); if let GetDocumentResponse::Found(document) = document_after_get { assert!(document.document.document.hello == 190); } else { panic!("document not found"); } database_client.delete_database().execute().await.unwrap(); }
28.978182
97
0.632576
2649489c82f6ebb78bcaa67375282299e42f436e
11,199
#![allow(unused_imports)] use error_chain::bail; use fxhash::FxHashMap; use serde::{de::DeserializeOwned, Serialize}; use std::net::SocketAddr; #[cfg(not(feature = "enable_dns"))] use std::net::ToSocketAddrs; use std::result::Result; use std::sync::Arc; use std::sync::Mutex as StdMutex; #[cfg(feature = "enable_full")] use tokio::net::TcpStream; use tokio::sync::broadcast; use tokio::sync::Mutex; use tokio::time::Duration; use tracing::{debug, error, info, instrument, span, trace, warn, Level}; use tracing_futures::{Instrument, WithSubscriber}; #[allow(unused_imports)] use crate::conf::*; use crate::crypto::*; use crate::engine::TaskEngine; use crate::spec::*; use crate::{comms::NodeId, error::*}; use super::hello; use super::helper::*; use super::key_exchange; use super::metrics::*; use super::rx_tx::*; use super::throttle::*; use super::CertificateValidation; use super::{conf::*, hello::HelloMetadata}; #[allow(unused_imports)] use { super::Stream, super::StreamProtocol, super::StreamRx, super::StreamTx, super::StreamTxChannel, }; pub(crate) async fn connect<M, C>( conf: &MeshConfig, hello_path: String, node_id: NodeId, inbox: impl InboxProcessor<M, C> + 'static, metrics: Arc<StdMutex<Metrics>>, throttle: Arc<StdMutex<Throttle>>, exit: broadcast::Receiver<()>, ) -> Result<Tx, CommsError> where M: Send + Sync + Serialize + DeserializeOwned + Default + Clone + 'static, C: Send + Sync + Default + 'static, { // Create all the outbound connections if let Some(target) = &conf.connect_to { // Perform the connect operation let inbox = Box::new(inbox); let upstream = mesh_connect_to::<M, C>( target.clone(), hello_path.clone(), node_id, conf.cfg_mesh.domain_name.clone(), inbox, conf.cfg_mesh.wire_protocol, conf.cfg_mesh.wire_encryption, conf.cfg_mesh.connect_timeout, conf.cfg_mesh.fail_fast, conf.cfg_mesh.certificate_validation.clone(), Arc::clone(&metrics), Arc::clone(&throttle), exit, ) .await?; // Return the mesh Ok(Tx { direction: TxDirection::Upcast(upstream), hello_path: hello_path.clone(), wire_format: conf.cfg_mesh.wire_format, relay: None, metrics: Arc::clone(&metrics), throttle: Arc::clone(&throttle), exit_dependencies: Vec::new(), }) } else { bail!(CommsErrorKind::NoAddress); } } pub(super) async fn mesh_connect_to<M, C>( addr: MeshConnectAddr, hello_path: String, node_id: NodeId, domain: String, inbox: Box<dyn InboxProcessor<M, C>>, wire_protocol: StreamProtocol, wire_encryption: Option<KeySize>, timeout: Duration, fail_fast: bool, validation: CertificateValidation, metrics: Arc<StdMutex<super::metrics::Metrics>>, throttle: Arc<StdMutex<super::throttle::Throttle>>, exit: broadcast::Receiver<()>, ) -> Result<Upstream, CommsError> where M: Send + Sync + Serialize + DeserializeOwned + Clone + Default + 'static, C: Send + Sync + Default + 'static, { // Make the connection trace!("prepare connect (path={})", hello_path); let worker_connect = mesh_connect_prepare( addr.clone(), hello_path, node_id, domain, wire_protocol, wire_encryption, fail_fast, ); let (mut worker_connect, mut stream_tx) = crate::engine::timeout(timeout, worker_connect).await??; let wire_format = worker_connect.hello_metadata.wire_format; let server_id = worker_connect.hello_metadata.server_id; // If we are using wire encryption then exchange secrets let ek = match wire_encryption { Some(key_size) => Some( key_exchange::mesh_key_exchange_sender( &mut worker_connect.stream_rx, &mut stream_tx, key_size, validation, ) .await?, ), None => None, }; // background thread - connects and then runs inbox and outbox threads // if the upstream object signals a termination event it will exit trace!("spawning connect worker"); TaskEngine::spawn(mesh_connect_worker::<M, C>( worker_connect, addr, ek, node_id, server_id, inbox, metrics, throttle, exit, )); trace!("building upstream with tx channel"); let stream_tx = StreamTxChannel::new(stream_tx, ek); Ok(Upstream { id: node_id, outbox: stream_tx, wire_format, }) } struct MeshConnectContext { addr: MeshConnectAddr, stream_rx: StreamRx, hello_metadata: HelloMetadata, } #[allow(unused_variables)] async fn mesh_connect_prepare( addr: MeshConnectAddr, hello_path: String, node_id: NodeId, domain: String, wire_protocol: StreamProtocol, wire_encryption: Option<KeySize>, #[allow(unused_variables)] fail_fast: bool, ) -> Result<(MeshConnectContext, StreamTx), CommsError> { async move { #[allow(unused_mut)] let mut exp_backoff = Duration::from_millis(100); loop { // If we have a factory then use it #[allow(unused_mut)] let mut stream = { let mut factory = crate::mesh::GLOBAL_COMM_FACTORY.lock().await; if let Some(factory) = factory.as_mut() { let create_client = Arc::clone(&factory); drop(factory); create_client(addr.clone()).await } else { None } }; // If no stream yet exists then create one #[cfg(feature = "enable_full")] if stream.is_none() { stream = { #[cfg(not(feature = "enable_dns"))] let addr = { match format!("{}:{}", addr.host, addr.port) .to_socket_addrs()? .next() { Some(a) => a, None => { bail!(CommsErrorKind::InvalidDomainName); } } }; let stream = match TcpStream::connect(addr.clone()).await { Err(err) if match err.kind() { std::io::ErrorKind::ConnectionRefused => { if fail_fast { bail!(CommsErrorKind::Refused); } true } std::io::ErrorKind::ConnectionReset => true, std::io::ErrorKind::ConnectionAborted => true, _ => false, } => { debug!( "connect failed: reason={}, backoff={}s", err, exp_backoff.as_secs_f32() ); crate::engine::sleep(exp_backoff).await; exp_backoff *= 2; if exp_backoff > Duration::from_secs(10) { exp_backoff = Duration::from_secs(10); } continue; } a => a?, }; // Setup the TCP stream setup_tcp_stream(&stream)?; // Convert the TCP stream into the right protocol let stream = Stream::Tcp(stream); let stream = stream.upgrade_client(wire_protocol).await?; Some(stream) }; #[cfg(all(feature = "enable_web_sys", not(feature = "enable_full")))] bail!(CommsErrorKind::InternalError( "Web based clients require a GLOBAL_COMM_FACTORY".to_string() )); } let stream = match stream { Some(a) => a, None => { bail!(CommsErrorKind::InternalError( "Failed to create a client stream".to_string() )); } }; // Build the stream trace!("splitting stream into rx/tx"); let (mut stream_rx, mut stream_tx) = stream.split(); // Say hello let hello_metadata = hello::mesh_hello_exchange_sender( &mut stream_rx, &mut stream_tx, node_id, hello_path.clone(), domain.clone(), wire_encryption, ) .await?; // Return the result return Ok(( MeshConnectContext { addr, stream_rx, hello_metadata, }, stream_tx, )); } } .instrument(tracing::info_span!("connect")) .await } async fn mesh_connect_worker<M, C>( connect: MeshConnectContext, sock_addr: MeshConnectAddr, wire_encryption: Option<EncryptKey>, node_id: NodeId, peer_id: NodeId, inbox: Box<dyn InboxProcessor<M, C>>, metrics: Arc<StdMutex<super::metrics::Metrics>>, throttle: Arc<StdMutex<super::throttle::Throttle>>, exit: broadcast::Receiver<()>, ) where M: Send + Sync + Serialize + DeserializeOwned + Clone + Default + 'static, C: Send + Sync + Default + 'static, { let span = span!( Level::DEBUG, "client", id = node_id.to_short_string().as_str(), peer = peer_id.to_short_string().as_str() ); let wire_format = connect.hello_metadata.wire_format; let context = Arc::new(C::default()); match process_inbox::<M, C>( connect.stream_rx, inbox, metrics, throttle, node_id, peer_id, sock_addr, context, wire_format, wire_encryption, exit, ) .instrument(span.clone()) .await { Ok(_) => {} Err(CommsError(CommsErrorKind::IO(err), _)) if match err.kind() { std::io::ErrorKind::BrokenPipe => true, std::io::ErrorKind::UnexpectedEof => true, std::io::ErrorKind::ConnectionReset => true, std::io::ErrorKind::ConnectionAborted => true, _ => false, } => {} Err(err) => { warn!("connection-failed: {}", err.to_string()); } }; let _span = span.enter(); //#[cfg(feature = "enable_verbose")] debug!("disconnected-inbox: {}", connect.addr.to_string()); }
31.815341
99
0.514689
e2f3f35bf883aecc5a6345ab7340594e7b573e8b
1,723
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT #[cfg(any(feature = "v2_42", feature = "dox"))] use glib::object::Cast; use glib::object::IsA; use glib::translate::*; use glib::GString; use glib::StaticType; use glib::Value; use gobject_sys; use soup_sys; use std::fmt; use CookieJar; glib_wrapper! { pub struct CookieJarDB(Object<soup_sys::SoupCookieJarDB, soup_sys::SoupCookieJarDBClass, CookieJarDBClass>) @extends CookieJar; match fn { get_type => || soup_sys::soup_cookie_jar_db_get_type(), } } impl CookieJarDB { #[cfg(any(feature = "v2_42", feature = "dox"))] pub fn new(filename: &str, read_only: bool) -> CookieJarDB { assert_initialized_main_thread!(); unsafe { CookieJar::from_glib_full(soup_sys::soup_cookie_jar_db_new(filename.to_glib_none().0, read_only.to_glib())).unsafe_cast() } } } pub const NONE_COOKIE_JAR_DB: Option<&CookieJarDB> = None; pub trait CookieJarDBExt: 'static { fn get_property_filename(&self) -> Option<GString>; } impl<O: IsA<CookieJarDB>> CookieJarDBExt for O { fn get_property_filename(&self) -> Option<GString> { unsafe { let mut value = Value::from_type(<GString as StaticType>::static_type()); gobject_sys::g_object_get_property(self.to_glib_none().0 as *mut gobject_sys::GObject, b"filename\0".as_ptr() as *const _, value.to_glib_none_mut().0); value.get().expect("Return Value for property `filename` getter") } } } impl fmt::Display for CookieJarDB { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CookieJarDB") } }
30.767857
163
0.670342
f4a2d84968e237b60e6dd48dc1323037866a9d79
1,350
extern crate cow_utils; use cow_utils::CowUtils; /// To extend types which implement `AsRef<str>` to have a `ends_with_ignore_case` method. pub trait EndsWithIgnoreCase { /// Returns `true` if the given string slice case-insensitively (using case-folding) matches a suffix of this string slice. /// /// NOTE: This method may allocate heap memory. fn ends_with_ignore_case<S: AsRef<str>>(&self, b: S) -> bool; } impl<T: AsRef<str>> EndsWithIgnoreCase for T { #[inline] fn ends_with_ignore_case<S: AsRef<str>>(&self, b: S) -> bool { let a = self.as_ref(); let b = b.as_ref(); if b.is_empty() { return true; } { let au = a.cow_to_uppercase(); let bu = b.cow_to_uppercase(); let au_length = au.len(); let bu_length = bu.len(); if au_length >= bu_length && unsafe { au.get_unchecked((au_length - bu_length)..) == bu } { return true; } } let al = a.cow_to_lowercase(); let bl = b.cow_to_lowercase(); let al_length = al.len(); let bl_length = bl.len(); if al_length >= bl_length { unsafe { al.get_unchecked((al_length - bl_length)..) == bl } } else { false } } }
27
127
0.543704
b9a76b58915d0e5a09ff216297acb07e94ac09c8
1,349
macro_rules! syscall_a { ($id:expr, $( $reg:tt = $val:expr),*) => {{ let rv: usize; asm!("svc #0" : "={x0}" (rv) : "{x12}" ($id as usize) $(, $reg ($val as usize))* : "x0","x1","x2","x3", "x4", "x5", "x6", "x7" : "volatile" ); rv as u64 }}; } // SAVE x1, x2, x3, x4, x5, r6 #[inline(always)] pub unsafe fn syscall_0(id: u32) -> u64 { syscall_a!(id, ) } #[inline(always)] pub unsafe fn syscall_1(id: u32, a1: usize) -> u64 { syscall_a!(id, "{x0}"=a1) } #[inline(always)] pub unsafe fn syscall_2(id: u32, a1: usize, a2: usize) -> u64 { syscall_a!(id, "{x0}"=a1, "{x1}"=a2) } #[inline(always)] pub unsafe fn syscall_3(id: u32, a1: usize, a2: usize, a3: usize) -> u64 { syscall_a!(id, "{x0}"=a1, "{x1}"=a2, "{x2}"=a3) } #[inline(always)] pub unsafe fn syscall_4(id: u32, a1: usize, a2: usize, a3: usize, a4: usize) -> u64 { syscall_a!(id, "{x0}"=a1, "{x1}"=a2, "{x2}"=a3, "{x3}"=a4) } #[inline(always)] pub unsafe fn syscall_5(id: u32, a1: usize, a2: usize, a3: usize, a4: usize, a5: usize) -> u64 { syscall_a!(id, "{x0}"=a1, "{x1}"=a2, "{x2}"=a3, "{x3}"=a4, "{x4}"=a5) } #[inline(always)] pub unsafe fn syscall_6(id: u32, a1: usize, a2: usize, a3: usize, a4: usize, a5: usize, a6: usize) -> u64 { syscall_a!(id, "{x0}"=a1, "{x1}"=a2, "{x2}"=a3, "{x3}"=a4, "{x4}"=a5, "{x5}"=a6) }
31.372093
108
0.532987
032189dca4877323f6c6a9d6c79982192e3a1eec
6,415
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use rustc; use rustc::{driver, middle}; use rustc::middle::{privacy, ty}; use rustc::lint; use rustc::back::link; use syntax::ast; use syntax::parse::token; use syntax; use std::cell::RefCell; use std::gc::GC; use std::os; use std::collections::{HashMap, HashSet}; use visit_ast::RustdocVisitor; use clean; use clean::Clean; /// Are we generating documentation (`Typed`) or tests (`NotTyped`)? pub enum MaybeTyped { Typed(middle::ty::ctxt), NotTyped(driver::session::Session) } pub type ExternalPaths = RefCell<Option<HashMap<ast::DefId, (Vec<String>, clean::TypeKind)>>>; pub struct DocContext { pub krate: ast::Crate, pub maybe_typed: MaybeTyped, pub src: Path, pub external_paths: ExternalPaths, pub external_traits: RefCell<Option<HashMap<ast::DefId, clean::Trait>>>, pub external_typarams: RefCell<Option<HashMap<ast::DefId, String>>>, pub inlined: RefCell<Option<HashSet<ast::DefId>>>, pub populated_crate_impls: RefCell<HashSet<ast::CrateNum>>, } impl DocContext { pub fn sess<'a>(&'a self) -> &'a driver::session::Session { match self.maybe_typed { Typed(ref tcx) => &tcx.sess, NotTyped(ref sess) => sess } } pub fn tcx_opt<'a>(&'a self) -> Option<&'a ty::ctxt> { match self.maybe_typed { Typed(ref tcx) => Some(tcx), NotTyped(_) => None } } pub fn tcx<'a>(&'a self) -> &'a ty::ctxt { let tcx_opt = self.tcx_opt(); tcx_opt.expect("tcx not present") } } pub struct CrateAnalysis { pub exported_items: privacy::ExportedItems, pub public_items: privacy::PublicItems, pub external_paths: ExternalPaths, pub external_traits: RefCell<Option<HashMap<ast::DefId, clean::Trait>>>, pub external_typarams: RefCell<Option<HashMap<ast::DefId, String>>>, pub inlined: RefCell<Option<HashSet<ast::DefId>>>, } pub type Externs = HashMap<String, Vec<String>>; /// Parses, resolves, and typechecks the given crate fn get_ast_and_resolve(cpath: &Path, libs: HashSet<Path>, cfgs: Vec<String>, externs: Externs, triple: Option<String>) -> (DocContext, CrateAnalysis) { use syntax::codemap::dummy_spanned; use rustc::driver::driver::{FileInput, phase_1_parse_input, phase_2_configure_and_expand, phase_3_run_analysis_passes}; use rustc::driver::config::build_configuration; let input = FileInput(cpath.clone()); let warning_lint = lint::builtin::WARNINGS.name_lower(); let sessopts = driver::config::Options { maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()), addl_lib_search_paths: RefCell::new(libs), crate_types: vec!(driver::config::CrateTypeRlib), lint_opts: vec!((warning_lint, lint::Allow)), externs: externs, target_triple: triple.unwrap_or(driver::driver::host_triple().to_string()), ..rustc::driver::config::basic_options().clone() }; let codemap = syntax::codemap::CodeMap::new(); let diagnostic_handler = syntax::diagnostic::default_handler(syntax::diagnostic::Auto, None); let span_diagnostic_handler = syntax::diagnostic::mk_span_handler(diagnostic_handler, codemap); let sess = driver::session::build_session_(sessopts, Some(cpath.clone()), span_diagnostic_handler); let mut cfg = build_configuration(&sess); for cfg_ in cfgs.move_iter() { let cfg_ = token::intern_and_get_ident(cfg_.as_slice()); cfg.push(box(GC) dummy_spanned(ast::MetaWord(cfg_))); } let krate = phase_1_parse_input(&sess, cfg, &input); let name = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), &input); let (krate, ast_map) = phase_2_configure_and_expand(&sess, krate, name.as_slice(), None) .expect("phase_2_configure_and_expand aborted in rustdoc!"); let driver::driver::CrateAnalysis { exported_items, public_items, ty_cx, .. } = phase_3_run_analysis_passes(sess, &krate, ast_map, name); debug!("crate: {:?}", krate); (DocContext { krate: krate, maybe_typed: Typed(ty_cx), src: cpath.clone(), external_traits: RefCell::new(Some(HashMap::new())), external_typarams: RefCell::new(Some(HashMap::new())), external_paths: RefCell::new(Some(HashMap::new())), inlined: RefCell::new(Some(HashSet::new())), populated_crate_impls: RefCell::new(HashSet::new()), }, CrateAnalysis { exported_items: exported_items, public_items: public_items, external_paths: RefCell::new(None), external_traits: RefCell::new(None), external_typarams: RefCell::new(None), inlined: RefCell::new(None), }) } pub fn run_core(libs: HashSet<Path>, cfgs: Vec<String>, externs: Externs, path: &Path, triple: Option<String>) -> (clean::Crate, CrateAnalysis) { let (ctxt, analysis) = get_ast_and_resolve(path, libs, cfgs, externs, triple); let ctxt = box(GC) ctxt; super::ctxtkey.replace(Some(ctxt)); let krate = { let mut v = RustdocVisitor::new(&*ctxt, Some(&analysis)); v.visit(&ctxt.krate); v.clean() }; let external_paths = ctxt.external_paths.borrow_mut().take(); *analysis.external_paths.borrow_mut() = external_paths; let map = ctxt.external_traits.borrow_mut().take(); *analysis.external_traits.borrow_mut() = map; let map = ctxt.external_typarams.borrow_mut().take(); *analysis.external_typarams.borrow_mut() = map; let map = ctxt.inlined.borrow_mut().take(); *analysis.inlined.borrow_mut() = map; (krate, analysis) }
35.837989
97
0.633983
ebedb893bd2903e086ce87071c19393258b37ddd
47,147
use crate::cx::*; use crate::cx_xlib::*; use makepad_glx_sys as glx_sys; use makepad_x11_sys as X11_sys; use std::ffi::{CStr, CString}; use std::os::raw::{c_ulong, c_void}; use std::ptr; use std::mem; use makepad_shader_compiler::generate_glsl; impl Cx { pub fn render_view( &mut self, pass_id: usize, view_id: usize, scroll: Vec2, clip: (Vec2, Vec2), full_repaint: bool, view_rect: &Rect, opengl_cx: &OpenglCx, zbias: &mut f32, zbias_step: f32 ) { // tad ugly otherwise the borrow checker locks 'self' and we can't recur let draw_calls_len = self.views[view_id].draw_calls_len; if !full_repaint && !view_rect.intersects(self.views[view_id].get_scrolled_rect()) { return } self.views[view_id].uniform_view_transform(&Mat4::identity()); self.views[view_id].parent_scroll = scroll; let local_scroll = self.views[view_id].get_local_scroll(); let clip = self.views[view_id].intersect_clip(clip); for draw_call_id in 0..draw_calls_len { let sub_view_id = self.views[view_id].draw_calls[draw_call_id].sub_view_id; if sub_view_id != 0 { self.render_view( pass_id, sub_view_id, Vec2 {x: local_scroll.x + scroll.x, y: local_scroll.y + scroll.y}, clip, full_repaint, view_rect, opengl_cx, zbias, zbias_step ); } else { let cxview = &mut self.views[view_id]; //view.platform.uni_vw.update_with_f32_data(device, &view.uniforms); let draw_call = &mut cxview.draw_calls[draw_call_id]; let sh = &self.shaders[draw_call.shader_id]; let shp = sh.platform.as_ref().unwrap(); if draw_call.instance_dirty { draw_call.instance_dirty = false; draw_call.platform.inst_vbuf.update_with_f32_data(opengl_cx, &draw_call.instance); } draw_call.platform.check_vao(draw_call.shader_id, &shp); draw_call.set_zbias(*zbias); draw_call.set_local_scroll(scroll, local_scroll); draw_call.set_clip(clip); *zbias += zbias_step; if draw_call.uniforms_dirty { draw_call.uniforms_dirty = false; } unsafe { gl::UseProgram(shp.program); gl::BindVertexArray(draw_call.platform.vao.unwrap()); let instances = draw_call.instance.len() / sh.mapping.instance_props.total_slots; let indices = sh.shader_gen.geometry.indices.len(); let pass_uniforms = self.passes[pass_id].pass_uniforms.as_slice(); let view_uniforms = cxview.view_uniforms.as_slice(); let draw_uniforms = draw_call.draw_uniforms.as_slice(); opengl_cx.set_uniform_buffer(&shp.pass_uniforms, pass_uniforms); opengl_cx.set_uniform_buffer(&shp.view_uniforms, view_uniforms); opengl_cx.set_uniform_buffer(&shp.draw_uniforms, draw_uniforms); opengl_cx.set_uniform_buffer(&shp.uniforms, &draw_call.uniforms); if let Some(ct) = &sh.mapping.const_table{ opengl_cx.set_uniform_array(&shp.const_table_uniform, ct); } // lets set our textures for (i, texture_id) in draw_call.textures_2d.iter().enumerate() { let cxtexture = &mut self.textures[*texture_id as usize]; if cxtexture.update_image { opengl_cx.update_platform_texture_image2d(cxtexture); } // get the loc gl::ActiveTexture(gl::TEXTURE0 + i as u32); if let Some(texture) = cxtexture.platform.gl_texture { gl::BindTexture(gl::TEXTURE_2D, texture); } else { gl::BindTexture(gl::TEXTURE_2D, 0); } } gl::DrawElementsInstanced( gl::TRIANGLES, indices as i32, gl::UNSIGNED_INT, ptr::null(), instances as i32 ); } } } } pub fn calc_dirty_bounds(&mut self, pass_id: usize, view_id: usize, view_bounds: &mut ViewBounds) { let draw_calls_len = self.views[view_id].draw_calls_len; for draw_call_id in 0..draw_calls_len { let sub_view_id = self.views[view_id].draw_calls[draw_call_id].sub_view_id; if sub_view_id != 0 { self.calc_dirty_bounds(pass_id, sub_view_id, view_bounds) } else { let cxview = &mut self.views[view_id]; let draw_call = &mut cxview.draw_calls[draw_call_id]; //let sh = &self.shaders[draw_call.shader_id]; //let shp = sh.platform.as_ref().unwrap(); if draw_call.instance_dirty || draw_call.uniforms_dirty { view_bounds.add_rect(&cxview.get_inverse_scrolled_rect()); } } } } pub fn set_default_depth_and_blend_mode() { unsafe { gl::Enable(gl::DEPTH_TEST); gl::DepthFunc(gl::LEQUAL); gl::BlendEquationSeparate(gl::FUNC_ADD, gl::FUNC_ADD); gl::BlendFuncSeparate(gl::ONE, gl::ONE_MINUS_SRC_ALPHA, gl::ONE, gl::ONE_MINUS_SRC_ALPHA); gl::Enable(gl::BLEND); } } pub fn draw_pass_to_window( &mut self, pass_id: usize, dpi_factor: f32, opengl_window: &mut OpenglWindow, opengl_cx: &OpenglCx, _force_full_repaint: bool, ) -> bool { let view_id = self.passes[pass_id].main_view_id.unwrap(); let mut view_bounds = ViewBounds::new(); let mut init_repaint = false; self.calc_dirty_bounds(pass_id, view_id, &mut view_bounds); let full_repaint = true;/*force_full_repaint || view_bounds.max_x - view_bounds.min_x > opengl_window.window_geom.inner_size.x - 100. && view_bounds.max_y - view_bounds.min_y > opengl_window.window_geom.inner_size.y - 100. || opengl_window.opening_repaint_count < 10;*/ if opengl_window.opening_repaint_count < 10 { // for some reason the first repaint doesn't arrive on the window opengl_window.opening_repaint_count += 1; init_repaint = true; } let window; let view_rect; if full_repaint { opengl_window.xlib_window.hide_child_windows(); window = opengl_window.xlib_window.window.unwrap(); let pass_size = self.passes[pass_id].pass_size; self.passes[pass_id].set_ortho_matrix(Vec2::default(), pass_size); let pix_width = opengl_window.window_geom.inner_size.x * opengl_window.window_geom.dpi_factor; let pix_height = opengl_window.window_geom.inner_size.y * opengl_window.window_geom.dpi_factor; unsafe { glx_sys::glXMakeCurrent(opengl_cx.display, window, opengl_cx.context); gl::Viewport(0, 0, pix_width as i32, pix_height as i32); } view_rect = Rect::default(); } else { if view_bounds.max_x == std::f32::NEG_INFINITY || view_bounds.max_y == std::f32::NEG_INFINITY || view_bounds.min_x == std::f32::INFINITY || view_bounds.min_x == std::f32::INFINITY || view_bounds.min_x == view_bounds.max_x || view_bounds.min_y == view_bounds.max_y { return false } /* unsafe { glx_sys::glXMakeCurrent(xlib_app.display, opengl_window.xlib_window.window.unwrap(), opengl_cx.context); gl::Viewport( 0, 0, (opengl_window.window_geom.inner_size.x * opengl_window.window_geom.dpi_factor) as i32, (opengl_window.window_geom.inner_size.y * opengl_window.window_geom.dpi_factor) as i32 ); gl::ClearColor(0.0, 1.0, 0.0, 0.0); gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT); glx_sys::glXSwapBuffers(xlib_app.display, opengl_window.xlib_window.window.unwrap()); }*/ let pix_width = (view_bounds.max_x - view_bounds.min_x) * opengl_window.window_geom.dpi_factor; let pix_height = (view_bounds.max_y - view_bounds.min_y) * opengl_window.window_geom.dpi_factor; window = opengl_window.xlib_window.alloc_child_window( (view_bounds.min_x * opengl_window.window_geom.dpi_factor) as i32, (view_bounds.min_y * opengl_window.window_geom.dpi_factor) as i32, pix_width as u32, pix_height as u32 ).unwrap(); //let pass_size = self.passes[pass_id].pass_size; self.passes[pass_id].set_ortho_matrix( Vec2 {x: view_bounds.min_x, y: view_bounds.min_y}, Vec2 {x: pix_width / opengl_window.window_geom.dpi_factor, y: pix_height / opengl_window.window_geom.dpi_factor} ); unsafe { glx_sys::glXMakeCurrent(opengl_cx.display, window, opengl_cx.context); gl::Viewport(0, 0, pix_width as i32, pix_height as i32); } view_rect = Rect {x: view_bounds.min_x, y: view_bounds.min_y, w: view_bounds.max_x - view_bounds.min_x, h: view_bounds.max_y - view_bounds.min_y} } self.passes[pass_id].uniform_camera_view(&Mat4::identity()); self.passes[pass_id].set_dpi_factor(dpi_factor); // set up the let clear_color = if self.passes[pass_id].color_textures.len() == 0 { Color::default() } else { match self.passes[pass_id].color_textures[0].clear_color { ClearColor::InitWith(color) => color, ClearColor::ClearWith(color) => color } }; let clear_depth = match self.passes[pass_id].clear_depth { ClearDepth::InitWith(depth) => depth, ClearDepth::ClearWith(depth) => depth }; unsafe { gl::BindFramebuffer(gl::FRAMEBUFFER, 0); gl::ClearDepth(clear_depth); gl::ClearColor(clear_color.r, clear_color.g, clear_color.b, clear_color.a); gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT); } Self::set_default_depth_and_blend_mode(); let mut zbias = 0.0; let zbias_step = self.passes[pass_id].zbias_step; self.render_view( pass_id, view_id, Vec2::default(), (Vec2 {x: -50000., y: -50000.}, Vec2 {x: 50000., y: 50000.}), full_repaint, &view_rect, &opengl_cx, &mut zbias, zbias_step ); unsafe { glx_sys::glXSwapBuffers(opengl_cx.display, window); } return init_repaint; } pub fn draw_pass_to_texture( &mut self, pass_id: usize, inherit_dpi_factor: f32, opengl_cx: &OpenglCx, ) { let pass_size = self.passes[pass_id].pass_size; self.passes[pass_id].set_ortho_matrix(Vec2::default(), pass_size); self.passes[pass_id].uniform_camera_view(&Mat4::identity()); self.passes[pass_id].paint_dirty = false; let dpi_factor = if let Some(override_dpi_factor) = self.passes[pass_id].override_dpi_factor { override_dpi_factor } else { inherit_dpi_factor }; self.passes[pass_id].set_dpi_factor(dpi_factor); let mut clear_color = Color::default(); let mut clear_depth = 1.0; let mut clear_flags = 0; // make a framebuffer if self.passes[pass_id].platform.gl_framebuffer.is_none() { unsafe { let mut gl_framebuffer = std::mem::MaybeUninit::uninit(); gl::GenFramebuffers(1, gl_framebuffer.as_mut_ptr()); self.passes[pass_id].platform.gl_framebuffer = Some(gl_framebuffer.assume_init()); } } // bind the framebuffer unsafe { gl::BindFramebuffer(gl::FRAMEBUFFER, self.passes[pass_id].platform.gl_framebuffer.unwrap()); } for (index, color_texture) in self.passes[pass_id].color_textures.iter().enumerate() { match color_texture.clear_color { ClearColor::InitWith(color) => { if opengl_cx.update_platform_render_target(&mut self.textures[color_texture.texture_id], dpi_factor, pass_size, false) { clear_color = color; clear_flags = gl::COLOR_BUFFER_BIT; } }, ClearColor::ClearWith(color) => { opengl_cx.update_platform_render_target(&mut self.textures[color_texture.texture_id], dpi_factor, pass_size, false); clear_color = color; clear_flags = gl::COLOR_BUFFER_BIT; } } if let Some(gl_texture) = self.textures[color_texture.texture_id].platform.gl_texture { unsafe { gl::FramebufferTexture2D(gl::FRAMEBUFFER, gl::COLOR_ATTACHMENT0 + index as u32, gl::TEXTURE_2D, gl_texture, 0); } } } // attach/clear depth buffers, if any if let Some(depth_texture_id) = self.passes[pass_id].depth_texture { match self.passes[pass_id].clear_depth { ClearDepth::InitWith(depth_clear) => { if opengl_cx.update_platform_render_target(&mut self.textures[depth_texture_id], dpi_factor, pass_size, true) { clear_depth = depth_clear; clear_flags = gl::DEPTH_BUFFER_BIT; } }, ClearDepth::ClearWith(depth_clear) => { opengl_cx.update_platform_render_target(&mut self.textures[depth_texture_id], dpi_factor, pass_size, true); clear_depth = depth_clear; clear_flags = gl::COLOR_BUFFER_BIT; } } if let Some(gl_texture) = self.textures[depth_texture_id].platform.gl_texture { unsafe { gl::FramebufferTexture2D(gl::FRAMEBUFFER, gl::DEPTH_STENCIL_ATTACHMENT, gl::TEXTURE_2D, gl_texture, 0); } } } unsafe { gl::Viewport(0, 0, (pass_size.x * dpi_factor) as i32, (pass_size.y * dpi_factor) as i32); } if clear_flags != 0 { unsafe { gl::ClearDepth(clear_depth); gl::ClearColor(clear_color.r, clear_color.g, clear_color.b, clear_color.a); gl::Clear(clear_flags); } } Self::set_default_depth_and_blend_mode(); let mut zbias = 0.0; let zbias_step = self.passes[pass_id].zbias_step; let view_id = self.passes[pass_id].main_view_id.unwrap(); self.render_view( pass_id, view_id, Vec2::default(), (Vec2 {x: -50000., y: -50000.}, Vec2 {x: 50000., y: 50000.}), true, &Rect::default(), &opengl_cx, &mut zbias, zbias_step ); } //let view_id = self.passes[pass_id].main_view_id.unwrap(); //let _pass_size = self.passes[pass_id].pass_size; /* for (index, color_texture) in self.passes[pass_id].color_textures.iter().enumerate() { let cxtexture = &mut self.textures[color_texture.texture_id]; metal_cx.update_platform_render_target(cxtexture, dpi_factor, pass_size, false); let color_attachment = render_pass_descriptor.color_attachments().object_at(index).unwrap(); if let Some(mtltex) = &cxtexture.platform.mtltexture { color_attachment.set_texture(Some(&mtltex)); } else { println!("draw_pass_to_texture invalid render target"); } color_attachment.set_store_action(MTLStoreAction::Store); if let Some(color) = color_texture.clear_color { color_attachment.set_load_action(MTLLoadAction::Clear); color_attachment.set_clear_color(MTLClearColor::new(color.r as f64, color.g as f64, color.b as f64, color.a as f64)); } else { color_attachment.set_load_action(MTLLoadAction::Load); } } */ //self.render_view(pass_id, view_id, true, &Rect::zero(), &opengl_cx); // commit //} pub fn opengl_compile_all_shaders(&mut self, opengl_cx: &OpenglCx) { unsafe { glx_sys::glXMakeCurrent(opengl_cx.display, opengl_cx.hidden_window, opengl_cx.context); } for (index, sh) in self.shaders.iter_mut().enumerate() { let result = Self::opengl_compile_shader(index, false, sh, opengl_cx, &mut self.shader_inherit_cache); if let ShaderCompileResult::Fail{err, ..} = result { panic!("{}", err); } }; } pub fn opengl_get_info_log(compile: bool,shader: usize, source: &str) -> String { unsafe{ let mut length = 0; if compile { gl::GetShaderiv(shader as u32, gl::INFO_LOG_LENGTH, &mut length); } else { gl::GetProgramiv(shader as u32, gl::INFO_LOG_LENGTH, &mut length); } let mut log = Vec::with_capacity(length as usize); if compile { gl::GetShaderInfoLog(shader as u32, length, ptr::null_mut(), log.as_mut_ptr()); } else { gl::GetProgramInfoLog(shader as u32, length, ptr::null_mut(), log.as_mut_ptr()); } log.set_len(length as usize); let mut r = "".to_string(); r.push_str(CStr::from_ptr(log.as_ptr()).to_str().unwrap()); r.push_str("\n"); let split = source.split("\n"); for (line, chunk) in split.enumerate() { r.push_str(&(line + 1).to_string()); r.push_str(":"); r.push_str(chunk); r.push_str("\n"); } r } } pub fn opengl_has_shader_error(compile: bool, shader: usize, source: &str) -> Option<String> { //None unsafe { let mut success = i32::from(gl::FALSE); if compile { gl::GetShaderiv(shader as u32, gl::COMPILE_STATUS, &mut success); } else { gl::GetProgramiv(shader as u32, gl::LINK_STATUS, &mut success); }; if success != i32::from(gl::TRUE) { Some(Self::opengl_get_info_log(compile, shader, source)) } else { None } } } pub fn ceil_div4(base: usize) -> usize { let r = base >> 2; if base & 3 != 0 { return r + 1 } r } pub fn opengl_get_attributes(program: u32, prefix: &str, slots: usize) -> Vec<OpenglAttribute> { let mut attribs = Vec::new(); let stride = (slots * mem::size_of::<f32>()) as i32; let num_attr = Self::ceil_div4(slots); for i in 0..num_attr { let mut name0 = prefix.to_string(); name0.push_str(&i.to_string()); name0.push_str("\0"); let mut size = ((slots - i * 4)) as i32; if size > 4 { size = 4; } unsafe { attribs.push( OpenglAttribute { loc: { let loc = gl::GetAttribLocation(program, name0.as_ptr() as *const _) as u32; loc }, offset: (i * 4 * mem::size_of::<f32>()) as usize, size: size, stride: stride } ) } } attribs } pub fn opengl_get_uniforms(program: u32, unis: &Vec<PropDef>) -> Vec<OpenglUniform> { let mut gl_uni = Vec::new(); for uni in unis { gl_uni.push( Self::opengl_get_uniform(program, &uni.name, uni.prop_id.shader_ty().size()) ); } gl_uni } pub fn opengl_get_uniform(program: u32, name:&str, size:usize) -> OpenglUniform { let mut name0 = String::new(); name0.push_str(name); name0.push_str("\0"); unsafe { OpenglUniform { loc:gl::GetUniformLocation(program, name0.as_ptr() as *const _), name: name.to_string(), size: size } } } pub fn opengl_get_texture_slots(program: u32, texture_slots: &Vec<PropDef>) -> Vec<OpenglUniform> { let mut gl_texture_slots = Vec::new(); for slot in texture_slots { let mut name0 = "".to_string(); name0.push_str(&slot.name); name0.push_str("\0"); unsafe { gl_texture_slots.push(OpenglUniform { loc: gl::GetUniformLocation(program, name0.as_ptr() as *const _), name: slot.name.clone(), size: 0 //,sampler:sam.sampler.clone() }) } } gl_texture_slots } pub fn opengl_compile_shader(shader_id:usize, use_const_table:bool, sh: &mut CxShader, opengl_cx: &OpenglCx, inherit_cache: &mut ShaderInheritCache) -> ShaderCompileResult { // lets compile. let shader_ast = sh.shader_gen.lex_parse_analyse(true, use_const_table, inherit_cache); let shader_ast = match shader_ast{ ShaderGenResult::Error(err)=>{ return ShaderCompileResult::Fail{id:shader_id, err:err} }, ShaderGenResult::PatchedConstTable(const_table)=>{ sh.mapping.const_table = Some(const_table); return ShaderCompileResult::Nop{id:shader_id} }, ShaderGenResult::ShaderAst(shader_ast)=>{ shader_ast } }; // lets generate the vertexshader let vertex = generate_glsl::generate_vertex_shader(&shader_ast, use_const_table); let fragment = generate_glsl::generate_fragment_shader(&shader_ast, use_const_table); let mapping = CxShaderMapping::from_shader_gen(&sh.shader_gen, shader_ast.const_table.borrow_mut().take()); let vertex = format!(" #version 100 precision highp float; precision highp int; vec4 sample2d(sampler2D sampler, vec2 pos){{return texture2D(sampler, vec2(pos.x, 1.0-pos.y));}} {}\0", vertex); let fragment = format!(" #version 100 #extension GL_OES_standard_derivatives : enable precision highp float; precision highp int; vec4 sample2d(sampler2D sampler, vec2 pos){{return texture2D(sampler, vec2(pos.x, 1.0-pos.y));}} {}\0", fragment); if shader_ast.debug{ println!("--------------- Vertex shader {} --------------- \n{}\n---------------\n", shader_id, vertex); println!("--------------- Fragment shader {} --------------- \n{}\n---------------\n", shader_id, fragment); } if let Some(sh_platform) = &sh.platform{ if sh_platform.vertex == vertex && sh_platform.fragment == fragment{ sh.mapping = mapping; return ShaderCompileResult::Nop{id:shader_id} } } //println!("{} {} {}", sh.name, vertex, fragment); unsafe { let vs = gl::CreateShader(gl::VERTEX_SHADER); gl::ShaderSource(vs, 1, [vertex.as_ptr() as *const _].as_ptr(), ptr::null()); gl::CompileShader(vs); //println!("{}", Self::opengl_get_info_log(true, vs as usize, &vertex)); if let Some(error) = Self::opengl_has_shader_error(true, vs as usize, &vertex) { if use_const_table{ println!("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n{}", error); return ShaderCompileResult::Nop{id:shader_id} } panic!("ERROR::SHADER::VERTEX::COMPILATION_FAILED\n{}", error); } let fs = gl::CreateShader(gl::FRAGMENT_SHADER); gl::ShaderSource(fs, 1, [fragment.as_ptr() as *const _].as_ptr(), ptr::null()); gl::CompileShader(fs); //println!("{}", Self::opengl_get_info_log(true, fs as usize, &fragment)); if let Some(error) = Self::opengl_has_shader_error(true, fs as usize, &fragment) { if use_const_table{ println!("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n{}", error); return ShaderCompileResult::Nop{id:shader_id} } panic!("ERROR::SHADER::FRAGMENT::COMPILATION_FAILED\n{}", error); } let program = gl::CreateProgram(); gl::AttachShader(program, vs); gl::AttachShader(program, fs); gl::LinkProgram(program); if let Some(error) = Self::opengl_has_shader_error(false, program as usize, "") { if use_const_table{ println!("ERROR::SHADER::LINK::COMPILATION_FAILED\n{}", error); return ShaderCompileResult::Nop{id:shader_id} } panic!("ERROR::SHADER::LINK::COMPILATION_FAILED\n{}", error); } gl::DeleteShader(vs); gl::DeleteShader(fs); let geometries = Self::opengl_get_attributes(program, "mpsc_packed_geometry_", mapping.geometry_props.total_slots); let instances = Self::opengl_get_attributes(program, "mpsc_packed_instance_", mapping.instance_props.total_slots); // lets fetch the uniform positions for our uniforms sh.platform = Some(CxPlatformShader { program: program, geom_ibuf: { let mut buf = OpenglBuffer::default(); buf.update_with_u32_data(opengl_cx, &sh.shader_gen.geometry.indices); buf }, geom_vbuf: { let mut buf = OpenglBuffer::default(); buf.update_with_f32_data(opengl_cx, &sh.shader_gen.geometry.vertices); buf }, geometries, instances, vertex, fragment, pass_uniforms: Self::opengl_get_uniforms(program, &mapping.pass_uniforms), view_uniforms: Self::opengl_get_uniforms(program, &mapping.view_uniforms), draw_uniforms: Self::opengl_get_uniforms(program, &mapping.draw_uniforms), const_table_uniform: Self::opengl_get_uniform(program, "mpsc_const_table", 1), uniforms: Self::opengl_get_uniforms(program, &mapping.uniforms), }); sh.mapping = mapping; return ShaderCompileResult::Ok{id:shader_id}; } } } #[derive(Clone, PartialEq)] pub struct ViewBounds { pub min_x: f32, pub min_y: f32, pub max_x: f32, pub max_y: f32 } impl ViewBounds { fn new() -> ViewBounds { ViewBounds { min_x: std::f32::INFINITY, min_y: std::f32::INFINITY, max_x: std::f32::NEG_INFINITY, max_y: std::f32::NEG_INFINITY, } } fn add_rect(&mut self, rect: &Rect) { if rect.x < self.min_x { self.min_x = rect.x; } if rect.x + rect.w > self.max_x { self.max_x = rect.x + rect.w; } if rect.y < self.min_y { self.min_y = rect.y; } if rect.y + rect.h > self.max_y { self.max_y = rect.y + rect.h; } } } pub struct OpenglCx { pub display: *mut glx_sys::Display, pub context: glx_sys::GLXContext, pub visual_info: glx_sys::XVisualInfo, pub hidden_window: glx_sys::Window, } impl OpenglCx { pub fn new(display: *mut X11_sys::Display) -> OpenglCx { unsafe { let display = display as *mut glx_sys::Display; // Query GLX version. let mut major = 0; let mut minor = 0; assert!( glx_sys::glXQueryVersion(display, &mut major, &mut minor) >= 0, "can't query GLX version" ); // Check that GLX version number is 1.4 or higher. assert!( major > 1 || major == 1 && minor >= 4, "GLX version must be 1.4 or higher, got {}.{}", major, minor, ); let screen = glx_sys::XDefaultScreen(display); // Query extensions string let supported_extensions = glx_sys::glXQueryExtensionsString(display, screen); assert!( !supported_extensions.is_null(), "can't query GLX extensions string" ); let supported_extensions = CStr::from_ptr(supported_extensions).to_str().unwrap(); // Check that required extensions are supported. let required_extensions = &["GLX_ARB_get_proc_address", "GLX_ARB_create_context"]; for required_extension in required_extensions { assert!( supported_extensions.contains(required_extension), "extension {} is required, but not supported", required_extension, ); } // Load GLX function pointers. #[allow(non_snake_case)] let glXCreateContextAttribsARB = mem::transmute::< _, glx_sys::PFNGLXCREATECONTEXTATTRIBSARBPROC, >(glx_sys::glXGetProcAddressARB( CString::new("glXCreateContextAttribsARB") .unwrap() .to_bytes_with_nul() .as_ptr(), )) .expect("can't load glXCreateContextAttribsARB function pointer"); // Load GL function pointers. gl::load_with(|symbol| { glx_sys::glXGetProcAddressARB( CString::new(symbol).unwrap().to_bytes_with_nul().as_ptr(), ) .map_or(ptr::null(), |ptr| ptr as *const c_void) }); // Choose framebuffer configuration. let config_attribs = &[ glx_sys::GLX_DOUBLEBUFFER as i32, glx_sys::True as i32, glx_sys::GLX_RED_SIZE as i32, 8, glx_sys::GLX_GREEN_SIZE as i32, 8, glx_sys::GLX_BLUE_SIZE as i32, 8, glx_sys::GLX_ALPHA_SIZE as i32, 8, glx_sys::None as i32, ]; let mut config_count = 0; let configs = glx_sys::glXChooseFBConfig( display, glx_sys::XDefaultScreen(display), config_attribs.as_ptr(), &mut config_count, ); if configs.is_null() { panic!("can't choose framebuffer configuration"); } let config = *configs; glx_sys::XFree(configs as *mut c_void); // Create GLX context. let context_attribs = &[ glx_sys::GLX_CONTEXT_MAJOR_VERSION_ARB as i32, 3, glx_sys::GLX_CONTEXT_MINOR_VERSION_ARB as i32, 0, glx_sys::GLX_CONTEXT_PROFILE_MASK_ARB as i32, glx_sys::GLX_CONTEXT_ES_PROFILE_BIT_EXT as i32, glx_sys::None as i32 ]; let context = glXCreateContextAttribsARB( display, config, ptr::null_mut(), glx_sys::True as i32, context_attribs.as_ptr(), ); // Get visual from framebuffer configuration. let visual_info_ptr = glx_sys::glXGetVisualFromFBConfig(display, config); assert!( !visual_info_ptr.is_null(), "can't get visual from framebuffer configuration" ); let visual_info = *visual_info_ptr; glx_sys::XFree(visual_info_ptr as *mut c_void); let root_window = glx_sys::XRootWindow(display, screen); // Create hidden window compatible with visual // // We need a hidden window because we sometimes want to create OpenGL resources, such as // shaders, when Makepad does not have any windows open. In cases such as these, we need // *some* window to make the OpenGL context current on. let mut attributes = mem::zeroed::<glx_sys::XSetWindowAttributes>(); // We need a color map that is compatible with our visual. Otherwise, the call to // XCreateWindow below will fail. attributes.colormap = glx_sys::XCreateColormap( display, root_window, visual_info.visual, glx_sys::AllocNone as i32 ); let hidden_window = glx_sys::XCreateWindow( display, root_window, 0, 0, 16, 16, 0, visual_info.depth, glx_sys::InputOutput as u32, visual_info.visual, glx_sys::CWColormap as c_ulong, &mut attributes, ); // To make sure the window stays hidden, we simply never call XMapWindow on it. OpenglCx { display, context, visual_info, hidden_window, } } } pub fn set_uniform_array(&self, loc: &OpenglUniform, array: &[f32]) { unsafe{ gl::Uniform1fv(loc.loc as i32, array.len() as i32, array.as_ptr()); } } pub fn set_uniform_buffer(&self, locs: &Vec<OpenglUniform>, uni: &[f32]) { let mut o = 0; for loc in locs { if o + loc.size > uni.len() { return } if (o & 3) != 0 && (o & 3) + loc.size > 4 { // goes over the boundary o += 4 - (o & 3); // make jump to new slot } if loc.loc >= 0 { unsafe { match loc.size { 1 => { gl::Uniform1f(loc.loc as i32, uni[o]); }, 2 => gl::Uniform2f(loc.loc as i32, uni[o], uni[o + 1]), 3 => gl::Uniform3f(loc.loc as i32, uni[o], uni[o + 1], uni[o + 2]), 4 => { gl::Uniform4f(loc.loc as i32, uni[o], uni[o + 1], uni[o + 2], uni[o + 3]); }, 16 => { gl::UniformMatrix4fv(loc.loc as i32, 1, 0, uni.as_ptr().offset((o) as isize)); }, _ => () } } }; o = o + loc.size; } } pub fn update_platform_texture_image2d(&self, cxtexture: &mut CxTexture) { if cxtexture.desc.width.is_none() || cxtexture.desc.height.is_none() { println!("update_platform_texture_image2d without width/height"); return; } let width = cxtexture.desc.width.unwrap(); let height = cxtexture.desc.height.unwrap(); // allocate new texture if descriptor change if cxtexture.platform.alloc_desc != cxtexture.desc { cxtexture.platform.alloc_desc = cxtexture.desc.clone(); cxtexture.platform.width = width as u64; cxtexture.platform.height = height as u64; let gl_texture = match cxtexture.platform.gl_texture { None => { unsafe { let mut gl_texture = std::mem::MaybeUninit::uninit(); gl::GenTextures(1, gl_texture.as_mut_ptr()); let gl_texture = gl_texture.assume_init(); cxtexture.platform.gl_texture = Some(gl_texture); gl_texture } } Some(gl_texture_old) => { gl_texture_old } }; unsafe { gl::BindTexture(gl::TEXTURE_2D, gl_texture); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32); gl::TexImage2D(gl::TEXTURE_2D, 0, gl::RGBA as i32, width as i32, height as i32, 0, gl::RGBA, gl::UNSIGNED_BYTE, cxtexture.image_u32.as_ptr() as *const _); gl::BindTexture(gl::TEXTURE_2D, 0); } } cxtexture.update_image = false; } pub fn update_platform_render_target(&self, cxtexture: &mut CxTexture, dpi_factor: f32, size: Vec2, is_depth: bool) -> bool { let width = if let Some(width) = cxtexture.desc.width {width as u64} else {(size.x * dpi_factor) as u64}; let height = if let Some(height) = cxtexture.desc.height {height as u64} else {(size.y * dpi_factor) as u64}; if cxtexture.platform.width == width && cxtexture.platform.height == height && cxtexture.platform.alloc_desc == cxtexture.desc { return false } unsafe { if let Some(gl_texture) = cxtexture.platform.gl_texture { gl::DeleteTextures(1, &gl_texture); } let mut gl_texture = std::mem::MaybeUninit::uninit(); gl::GenTextures(1, gl_texture.as_mut_ptr()); let gl_texture = gl_texture.assume_init(); gl::BindTexture(gl::TEXTURE_2D, gl_texture); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MIN_FILTER, gl::LINEAR as i32); gl::TexParameteri(gl::TEXTURE_2D, gl::TEXTURE_MAG_FILTER, gl::LINEAR as i32); cxtexture.platform.alloc_desc = cxtexture.desc.clone(); cxtexture.platform.width = width; cxtexture.platform.height = height; cxtexture.platform.gl_texture = Some(gl_texture); if !is_depth { match cxtexture.desc.format { TextureFormat::Default | TextureFormat::RenderBGRA => { gl::TexImage2D(gl::TEXTURE_2D, 0, gl::RGBA as i32, width as i32, height as i32, 0, gl::RGBA, gl::UNSIGNED_BYTE, ptr::null()); }, _ => { println!("update_platform_render_target unsupported texture format"); return false; } } } else { match cxtexture.desc.format { TextureFormat::Default | TextureFormat::Depth32Stencil8 => { println!("Depth stencil texture!"); }, _ => { println!("update_platform_render_targete unsupported texture format"); return false; } } } } return true; } } #[derive(Clone)] pub struct CxPlatformShader { pub program: u32, pub vertex: String, pub fragment: String, pub geom_vbuf: OpenglBuffer, pub geom_ibuf: OpenglBuffer, pub geometries: Vec<OpenglAttribute>, pub instances: Vec<OpenglAttribute>, pub pass_uniforms: Vec<OpenglUniform>, pub view_uniforms: Vec<OpenglUniform>, pub draw_uniforms: Vec<OpenglUniform>, pub const_table_uniform: OpenglUniform, pub uniforms: Vec<OpenglUniform> } #[derive(Clone)] pub struct OpenglWindow { pub first_draw: bool, pub window_id: usize, pub window_geom: WindowGeom, pub opening_repaint_count: u32, pub cal_size: Vec2, pub xlib_window: XlibWindow, } impl OpenglWindow { pub fn new(window_id: usize, opengl_cx: &OpenglCx, xlib_app: &mut XlibApp, inner_size: Vec2, position: Option<Vec2>, title: &str) -> OpenglWindow { let mut xlib_window = XlibWindow::new(xlib_app, window_id); let visual_info = unsafe { mem::transmute(opengl_cx.visual_info) }; xlib_window.init(title, inner_size, position, visual_info); OpenglWindow { first_draw: true, window_id, opening_repaint_count: 0, cal_size: Vec2::default(), window_geom: xlib_window.get_window_geom(), xlib_window } } pub fn resize_framebuffer(&mut self, _opengl_cx: &OpenglCx) -> bool { let cal_size = Vec2 { x: self.window_geom.inner_size.x * self.window_geom.dpi_factor, y: self.window_geom.inner_size.y * self.window_geom.dpi_factor }; if self.cal_size != cal_size { self.cal_size = cal_size; // resize the framebuffer true } else { false } } } #[derive(Default, Clone)] pub struct OpenglAttribute { pub loc: u32, pub size: i32, pub offset: usize, pub stride: i32 } #[derive(Debug, Default, Clone)] pub struct OpenglUniform { pub loc: i32, pub name: String, pub size: usize } /* #[derive(Default, Clone)] pub struct OpenglTextureSlot { pub loc: isize, pub name: String } */ #[derive(Clone, Default)] pub struct CxPlatformView { } #[derive(Default, Clone)] pub struct CxPlatformDrawCall { pub inst_vbuf: OpenglBuffer, pub vao_shader_id: Option<usize>, pub vao: Option<u32> } impl CxPlatformDrawCall { pub fn check_vao(&mut self, shader_id: usize, shp: &CxPlatformShader) { if self.vao_shader_id.is_none() || self.vao_shader_id.unwrap() != shader_id { self.free_vao(); // create the VAO unsafe { let mut vao = std::mem::MaybeUninit::uninit(); gl::GenVertexArrays(1, vao.as_mut_ptr()); let vao = vao.assume_init(); gl::BindVertexArray(vao); // bind the vertex and indexbuffers gl::BindBuffer(gl::ARRAY_BUFFER, shp.geom_vbuf.gl_buffer.unwrap()); for attr in &shp.geometries { gl::VertexAttribPointer(attr.loc, attr.size, gl::FLOAT, 0, attr.stride, attr.offset as *const () as *const _); gl::EnableVertexAttribArray(attr.loc); } gl::BindBuffer(gl::ARRAY_BUFFER, self.inst_vbuf.gl_buffer.unwrap()); for attr in &shp.instances { gl::VertexAttribPointer(attr.loc, attr.size, gl::FLOAT, 0, attr.stride, attr.offset as *const () as *const _); gl::EnableVertexAttribArray(attr.loc); gl::VertexAttribDivisor(attr.loc, 1 as gl::types::GLuint); } // bind the indexbuffer gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, shp.geom_ibuf.gl_buffer.unwrap()); gl::BindVertexArray(0); self.vao_shader_id = Some(shader_id); self.vao = Some(vao); } } } fn free_vao(&mut self) { unsafe { if let Some(mut vao) = self.vao { gl::DeleteVertexArrays(1, &mut vao); self.vao = None; } } } } #[derive(Default, Clone)] pub struct CxPlatformTexture { pub alloc_desc: TextureDesc, pub width: u64, pub height: u64, pub gl_texture: Option<u32>, } #[derive(Default, Clone)] pub struct CxPlatformPass { pub gl_framebuffer: Option<u32> } #[derive(Default, Clone)] pub struct OpenglBuffer { pub gl_buffer: Option<u32> } impl OpenglBuffer { pub fn alloc_gl_buffer(&mut self) { unsafe { let mut gl_buffer = std::mem::MaybeUninit::uninit(); gl::GenBuffers(1, gl_buffer.as_mut_ptr()); self.gl_buffer = Some(gl_buffer.assume_init()); } } pub fn update_with_f32_data(&mut self, _opengl_cx: &OpenglCx, data: &Vec<f32>) { if self.gl_buffer.is_none() { self.alloc_gl_buffer(); } unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, self.gl_buffer.unwrap()); gl::BufferData(gl::ARRAY_BUFFER, (data.len() * mem::size_of::<f32>()) as gl::types::GLsizeiptr, data.as_ptr() as *const _, gl::STATIC_DRAW); } } pub fn update_with_u32_data(&mut self, _opengl_cx: &OpenglCx, data: &Vec<u32>) { if self.gl_buffer.is_none() { self.alloc_gl_buffer(); } unsafe { gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.gl_buffer.unwrap()); gl::BufferData(gl::ELEMENT_ARRAY_BUFFER, (data.len() * mem::size_of::<u32>()) as gl::types::GLsizeiptr, data.as_ptr() as *const _, gl::STATIC_DRAW); } } }
38.868096
177
0.529366
292b38ff1cf04a3e29aba66fb72e619b2aca277a
1,212
// structs1.rs // Address all the TODOs to make the tests pass! struct ColorClassicStruct { // TODO: Something goes here name: String, hex: String, } // struct ColorTupleStruct(/* TODO: Something goes here */); struct ColorTupleStruct(String, String); #[derive(Debug)] struct UnitStruct; #[cfg(test)] mod tests { use super::*; #[test] fn classic_c_structs() { // TODO: Instantiate a classic c struct! let name = String::from("green"); let hex = String::from("#00FF00"); let green = ColorClassicStruct { name, hex }; assert_eq!(green.name, "green"); assert_eq!(green.hex, "#00FF00"); } #[test] fn tuple_structs() { // TODO: Instantiate a tuple struct! // let green = let name = String::from("green"); let hex = String::from("#00FF00"); let green = ColorTupleStruct ( name, hex ); assert_eq!(green.0, "green"); assert_eq!(green.1, "#00FF00"); } #[test] fn unit_structs() { // TODO: Instantiate a unit struct! let unit_struct = UnitStruct; let message = format!("{:?}s are fun!", unit_struct); assert_eq!(message, "UnitStructs are fun!"); } }
23.307692
61
0.592409
39fedcd48962292446f6792f7e2d1a605a6a478d
4,836
//! # tui-realm //! //! tui-realm is a **framework** for [tui](https://github.com/fdehau/tui-rs) to simplify the implementation of terminal //! user interfaces adding the possibility to work with re-usable components with properties and states, //! as you'd do in React. //! But that's not all: the components communicate with the ui engine via a system based on **Messages** and **Events**, //! providing you with the possibility to implement `update` routines as happens in Elm. //! In addition, the components are organized inside the **View**, which manages mounting/umounting, //! focus and event forwarding for you. //! //! tui-realm also comes with a standard library of components, which can be added to your dependencies, //! that you may find very useful. //! //! ## Get started 🏁 //! //! > ⚠️ Warning: currently tui-realm supports these backends: crossterm, termion //! //! ### Add tui-realm to your Cargo.toml 🦀 //! //! If you want the default features, just add tuirealm 1.x version: //! //! ```toml //! tuirealm = "^1.3.0" //! ``` //! //! otherwise you can specify the features you want to add: //! //! ```toml //! tuirealm = { version = "^1.3.0", default-features = false, features = [ "derive", "with-termion" ] } //! ``` //! //! Supported features are: //! //! - `derive` (*default*): add the `#[derive(MockComponent)]` proc macro to automatically implement `MockComponent` for `Component`. [Read more](https://github.com/veeso/tuirealm_derive). //! - `with-crossterm` (*default*): use [crossterm](https://github.com/crossterm-rs/crossterm) as backend for tui. //! - `with-termion` (*default*): use [termion](https://github.com/redox-os/termion) as backend for tui. //! //! > ⚠️ You can enable only one backend at the time and at least one must be enabled in order to build. //! > ❗ You don't need tui as a dependency, since you can access to tui types via `use tuirealm::tui::` //! //! ### Create a tui-realm application 🪂 //! //! You can read the guide to get started with tui-realm on [Github](https://github.com/veeso/tui-realm/blob/main/docs/en/get-started.md) //! //! ### Run examples 🔍 //!Still confused about how tui-realm works? Don't worry, try with the examples: //! //!- [demo](https://github.com/veeso/tui-realm/blob/main/examples/demo.rs): a simple application which shows how tui-realm works //! //! ```sh //! cargo run --example demo //! ``` //! #![doc(html_playground_url = "https://play.rust-lang.org")] #![doc( html_favicon_url = "https://raw.githubusercontent.com/veeso/tui-realm/main/docs/images/cargo/tui-realm-128.png" )] #![doc( html_logo_url = "https://raw.githubusercontent.com/veeso/tui-realm/main/docs/images/cargo/tui-realm-512.png" )] /** * MIT License * * tui-realm - Copyright (C) 2021 Christian Visintin * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ extern crate bitflags; #[macro_use] extern crate lazy_static; extern crate regex; extern crate self as tuirealm; extern crate thiserror; extern crate tui as tuirs; #[cfg(feature = "derive")] #[allow(unused_imports)] #[macro_use] extern crate tuirealm_derive; // -- modules pub mod adapter; mod core; pub mod listener; #[cfg(test)] pub mod mock; pub mod terminal; pub mod tui; pub mod utils; // -- export pub use self::core::application::{self, Application, ApplicationError, PollStrategy}; pub use self::core::command; pub use self::core::event::{self, Event, NoUserEvent}; pub use self::core::props::{self, AttrValue, Attribute, Props}; pub use self::core::subscription::{EventClause as SubEventClause, Sub, SubClause}; pub use self::core::{Component, MockComponent, State, StateValue, Update, ViewError}; pub use adapter::{Frame, Terminal}; pub use listener::{EventListenerCfg, ListenerError}; // -- derive #[cfg(feature = "derive")] #[doc(hidden)] pub use tuirealm_derive::*;
39.966942
188
0.711332
f901801757e81cf004d65bf7d66f3d1e290c77bc
1,375
#[doc = "Reader of register JTAGUSERCODE"] pub type R = crate::R<u32, super::JTAGUSERCODE>; #[doc = "Writer for register JTAGUSERCODE"] pub type W = crate::W<u32, super::JTAGUSERCODE>; #[doc = "Register JTAGUSERCODE `reset()`'s with value 0x0b99_a02f"] impl crate::ResetValue for super::JTAGUSERCODE { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0x0b99_a02f } } #[doc = "Reader of field `USER_CODE`"] pub type USER_CODE_R = crate::R<u32, u32>; #[doc = "Write proxy for field `USER_CODE`"] pub struct USER_CODE_W<'a> { w: &'a mut W, } impl<'a> USER_CODE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff); self.w } } impl R { #[doc = "Bits 0:31 - 31:0\\] 32-bit JTAG USERCODE register feeding main JTAG TAP Note: This field can be locked by LOCKCFG.LOCK"] #[inline(always)] pub fn user_code(&self) -> USER_CODE_R { USER_CODE_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31 - 31:0\\] 32-bit JTAG USERCODE register feeding main JTAG TAP Note: This field can be locked by LOCKCFG.LOCK"] #[inline(always)] pub fn user_code(&mut self) -> USER_CODE_W { USER_CODE_W { w: self } } }
31.976744
100
0.628364
216f81be10458a45477597589f6724b8a699b4f6
442
use crate::{ElementID, Key, OpID}; impl From<&str> for Key { fn from(s: &str) -> Self { Key::Map(s.into()) } } impl From<OpID> for Key { fn from(id: OpID) -> Self { Key::Seq(ElementID::ID(id)) } } impl From<&OpID> for Key { fn from(id: &OpID) -> Self { Key::Seq(ElementID::ID(id.clone())) } } impl From<ElementID> for Key { fn from(id: ElementID) -> Self { Key::Seq(id) } }
17
43
0.5181
1d4ab279c0278c2b574338d7a7af3a4b0a4660df
661
use rand::SeedableRng; use rand_xorshift::XorShiftRng; use std::ops::{AddAssign, MulAssign, SubAssign}; use algebra::{ biginteger::{BigInteger384 as FrRepr, BigInteger768 as FqRepr}, bw6::{G1Prepared, G2Prepared}, bw6_761::{ fq::Fq, fq3::Fq3, fr::Fr, Fq6, G1Affine, G1Projective as G1, G2Affine, G2Projective as G2, Parameters, BW6_761, }, BigInteger, Field, PairingEngine, PrimeField, ProjectiveCurve, SquareRootField, UniformRand, }; ec_bench!(); f_bench!(1, Fq3, Fq3, fq3); f_bench!(2, Fq6, Fq6, fq6); f_bench!(Fq, Fq, FqRepr, FqRepr, fq); f_bench!(Fr, Fr, FrRepr, FrRepr, fr); pairing_bench!(BW6_761, Fq6, prepared_v);
31.47619
98
0.694402
8af9cfb951401d73040fd3130429d016a866fbab
222,847
// DO NOT EDIT ! // This file was generated automatically from 'src/mako/cli/main.rs.mako' // DO NOT EDIT ! #![allow(unused_variables, unused_imports, dead_code, unused_mut)] #[macro_use] extern crate clap; extern crate yup_oauth2 as oauth2; extern crate yup_hyper_mock as mock; extern crate hyper_rustls; extern crate serde; extern crate serde_json; extern crate hyper; extern crate mime; extern crate strsim; extern crate google_adexchangebuyer1d4 as api; use std::env; use std::io::{self, Write}; use clap::{App, SubCommand, Arg}; mod cmn; use cmn::{InvalidOptionsError, CLIError, JsonTokenStorage, arg_from_str, writer_from_opts, parse_kv_arg, input_file_from_opts, input_mime_from_opts, FieldCursor, FieldError, CallType, UploadProtocol, calltype_from_str, remove_json_null_values, ComplexType, JsonType, JsonTypeInfo}; use std::default::Default; use std::str::FromStr; use oauth2::{Authenticator, DefaultAuthenticatorDelegate, FlowType}; use serde_json as json; use clap::ArgMatches; enum DoitError { IoError(String, io::Error), ApiError(api::Error), } struct Engine<'n> { opt: ArgMatches<'n>, hub: api::AdExchangeBuyer<hyper::Client, Authenticator<DefaultAuthenticatorDelegate, JsonTokenStorage, hyper::Client>>, gp: Vec<&'static str>, gpm: Vec<(&'static str, &'static str)>, } impl<'n> Engine<'n> { fn _accounts_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let id: i32 = arg_from_str(&opt.value_of("id").unwrap_or(""), err, "<id>", "integer"); let mut call = self.hub.accounts().get(id); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.accounts().list(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "maximum-total-qps" => Some(("maximumTotalQps", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "maximum-active-creatives" => Some(("maximumActiveCreatives", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "apply-pretargeting-to-non-guaranteed-deals" => Some(("applyPretargetingToNonGuaranteedDeals", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cookie-matching-nid" => Some(("cookieMatchingNid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "number-active-creatives" => Some(("numberActiveCreatives", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cookie-matching-url" => Some(("cookieMatchingUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["apply-pretargeting-to-non-guaranteed-deals", "cookie-matching-nid", "cookie-matching-url", "id", "kind", "maximum-active-creatives", "maximum-total-qps", "number-active-creatives"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Account = json::value::from_value(object).unwrap(); let id: i32 = arg_from_str(&opt.value_of("id").unwrap_or(""), err, "<id>", "integer"); let mut call = self.hub.accounts().patch(request, id); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "confirm-unsafe-account-change" => { call = call.confirm_unsafe_account_change(arg_from_str(value.unwrap_or("false"), err, "confirm-unsafe-account-change", "boolean")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["confirm-unsafe-account-change"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _accounts_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "maximum-total-qps" => Some(("maximumTotalQps", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "maximum-active-creatives" => Some(("maximumActiveCreatives", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "apply-pretargeting-to-non-guaranteed-deals" => Some(("applyPretargetingToNonGuaranteedDeals", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "cookie-matching-nid" => Some(("cookieMatchingNid", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "number-active-creatives" => Some(("numberActiveCreatives", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "cookie-matching-url" => Some(("cookieMatchingUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["apply-pretargeting-to-non-guaranteed-deals", "cookie-matching-nid", "cookie-matching-url", "id", "kind", "maximum-active-creatives", "maximum-total-qps", "number-active-creatives"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Account = json::value::from_value(object).unwrap(); let id: i32 = arg_from_str(&opt.value_of("id").unwrap_or(""), err, "<id>", "integer"); let mut call = self.hub.accounts().update(request, id); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "confirm-unsafe-account-change" => { call = call.confirm_unsafe_account_change(arg_from_str(value.unwrap_or("false"), err, "confirm-unsafe-account-change", "boolean")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["confirm-unsafe-account-change"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _billing_info_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let account_id: i32 = arg_from_str(&opt.value_of("account-id").unwrap_or(""), err, "<account-id>", "integer"); let mut call = self.hub.billing_info().get(account_id); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _billing_info_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.billing_info().list(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _budget_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.budget().get(opt.value_of("account-id").unwrap_or(""), opt.value_of("billing-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _budget_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "budget-amount" => Some(("budgetAmount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "currency-code" => Some(("currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "billing-id" => Some(("billingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "billing-id", "budget-amount", "currency-code", "id", "kind"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Budget = json::value::from_value(object).unwrap(); let mut call = self.hub.budget().patch(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("billing-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _budget_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "budget-amount" => Some(("budgetAmount", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "currency-code" => Some(("currencyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "billing-id" => Some(("billingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "id" => Some(("id", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "billing-id", "budget-amount", "currency-code", "id", "kind"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Budget = json::value::from_value(object).unwrap(); let mut call = self.hub.budget().update(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("billing-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _creatives_add_deal(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let account_id: i32 = arg_from_str(&opt.value_of("account-id").unwrap_or(""), err, "<account-id>", "integer"); let mut call = self.hub.creatives().add_deal(account_id, opt.value_of("buyer-creative-id").unwrap_or(""), opt.value_of("deal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _creatives_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let account_id: i32 = arg_from_str(&opt.value_of("account-id").unwrap_or(""), err, "<account-id>", "integer"); let mut call = self.hub.creatives().get(account_id, opt.value_of("buyer-creative-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _creatives_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "attribute" => Some(("attribute", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), "height" => Some(("height", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "advertiser-name" => Some(("advertiserName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "html-snippet" => Some(("HTMLSnippet", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "open-auction-status" => Some(("openAuctionStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "buyer-creative-id" => Some(("buyerCreativeId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ad-choices-destination-url" => Some(("adChoicesDestinationUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "impression-tracking-url" => Some(("impressionTrackingUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "account-id" => Some(("accountId", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "video-url" => Some(("videoURL", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "video-vast-xml" => Some(("videoVastXML", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "click-through-url" => Some(("clickThroughUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "languages" => Some(("languages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "width" => Some(("width", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.body" => Some(("nativeAd.body", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.advertiser" => Some(("nativeAd.advertiser", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.video-url" => Some(("nativeAd.videoURL", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.headline" => Some(("nativeAd.headline", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.image.url" => Some(("nativeAd.image.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.image.width" => Some(("nativeAd.image.width", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.image.height" => Some(("nativeAd.image.height", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.star-rating" => Some(("nativeAd.starRating", JsonTypeInfo { jtype: JsonType::Float, ctype: ComplexType::Pod })), "native-ad.call-to-action" => Some(("nativeAd.callToAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.logo.url" => Some(("nativeAd.logo.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.logo.width" => Some(("nativeAd.logo.width", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.logo.height" => Some(("nativeAd.logo.height", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.app-icon.url" => Some(("nativeAd.appIcon.url", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.app-icon.width" => Some(("nativeAd.appIcon.width", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.app-icon.height" => Some(("nativeAd.appIcon.height", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "native-ad.impression-tracking-url" => Some(("nativeAd.impressionTrackingUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "native-ad.price" => Some(("nativeAd.price", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.click-tracking-url" => Some(("nativeAd.clickTrackingUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "native-ad.click-link-url" => Some(("nativeAd.clickLinkUrl", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "version" => Some(("version", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "api-upload-timestamp" => Some(("apiUploadTimestamp", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "vendor-type" => Some(("vendorType", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), "sensitive-categories" => Some(("sensitiveCategories", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), "product-categories" => Some(("productCategories", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), "agency-id" => Some(("agencyId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "ad-technology-providers.has-unidentified-provider" => Some(("adTechnologyProviders.hasUnidentifiedProvider", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "ad-technology-providers.detected-provider-ids" => Some(("adTechnologyProviders.detectedProviderIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "detected-domains" => Some(("detectedDomains", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creative-status-identity-type" => Some(("creativeStatusIdentityType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deals-status" => Some(("dealsStatus", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "advertiser-id" => Some(("advertiserId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "restricted-categories" => Some(("restrictedCategories", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Vec })), "filtering-reasons.date" => Some(("filteringReasons.date", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["html-snippet", "account-id", "ad-choices-destination-url", "ad-technology-providers", "advertiser", "advertiser-id", "advertiser-name", "agency-id", "api-upload-timestamp", "app-icon", "attribute", "body", "buyer-creative-id", "call-to-action", "click-link-url", "click-through-url", "click-tracking-url", "creative-status-identity-type", "date", "deals-status", "detected-domains", "detected-provider-ids", "filtering-reasons", "has-unidentified-provider", "headline", "height", "image", "impression-tracking-url", "kind", "languages", "logo", "native-ad", "open-auction-status", "price", "product-categories", "restricted-categories", "sensitive-categories", "star-rating", "url", "vendor-type", "version", "video-url", "video-vast-xml", "width"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Creative = json::value::from_value(object).unwrap(); let mut call = self.hub.creatives().insert(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _creatives_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.creatives().list(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "open-auction-status-filter" => { call = call.open_auction_status_filter(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, "deals-status-filter" => { call = call.deals_status_filter(value.unwrap_or("")); }, "buyer-creative-id" => { call = call.add_buyer_creative_id(value.unwrap_or("")); }, "account-id" => { call = call.add_account_id(arg_from_str(value.unwrap_or("-0"), err, "account-id", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["open-auction-status-filter", "max-results", "page-token", "buyer-creative-id", "deals-status-filter", "account-id"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _creatives_list_deals(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let account_id: i32 = arg_from_str(&opt.value_of("account-id").unwrap_or(""), err, "<account-id>", "integer"); let mut call = self.hub.creatives().list_deals(account_id, opt.value_of("buyer-creative-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _creatives_remove_deal(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let account_id: i32 = arg_from_str(&opt.value_of("account-id").unwrap_or(""), err, "<account-id>", "integer"); let mut call = self.hub.creatives().remove_deal(account_id, opt.value_of("buyer-creative-id").unwrap_or(""), opt.value_of("deal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _marketplacedeals_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "proposal-revision-number" => Some(("proposalRevisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-action" => Some(("updateAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "deal-ids" => Some(("dealIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["deal-ids", "proposal-revision-number", "update-action"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::DeleteOrderDealsRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.marketplacedeals().delete(request, opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _marketplacedeals_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "proposal-revision-number" => Some(("proposalRevisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-action" => Some(("updateAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["proposal-revision-number", "update-action"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::AddOrderDealsRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.marketplacedeals().insert(request, opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _marketplacedeals_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.marketplacedeals().list(opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "pql-query" => { call = call.pql_query(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["pql-query"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _marketplacedeals_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "proposal.kind" => Some(("proposal.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.proposal-state" => Some(("proposal.proposalState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.name" => Some(("proposal.name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.revision-time-ms" => Some(("proposal.revisionTimeMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.private-auction-id" => Some(("proposal.privateAuctionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.buyer-private-data.reference-id" => Some(("proposal.buyerPrivateData.referenceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.buyer-private-data.reference-payload" => Some(("proposal.buyerPrivateData.referencePayload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.billed-buyer.account-id" => Some(("proposal.billedBuyer.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.originator-role" => Some(("proposal.originatorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.seller.sub-account-id" => Some(("proposal.seller.subAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.seller.account-id" => Some(("proposal.seller.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.negotiation-id" => Some(("proposal.negotiationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.proposal-id" => Some(("proposal.proposalId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.inventory-source" => Some(("proposal.inventorySource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.last-updater-or-commentor-role" => Some(("proposal.lastUpdaterOrCommentorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.has-seller-signed-off" => Some(("proposal.hasSellerSignedOff", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "proposal.is-renegotiating" => Some(("proposal.isRenegotiating", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "proposal.buyer.account-id" => Some(("proposal.buyer.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal.has-buyer-signed-off" => Some(("proposal.hasBuyerSignedOff", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "proposal.dbm-advertiser-ids" => Some(("proposal.dbmAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "proposal.is-setup-complete" => Some(("proposal.isSetupComplete", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "proposal.revision-number" => Some(("proposal.revisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal-revision-number" => Some(("proposalRevisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-action" => Some(("updateAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "billed-buyer", "buyer", "buyer-private-data", "dbm-advertiser-ids", "has-buyer-signed-off", "has-seller-signed-off", "inventory-source", "is-renegotiating", "is-setup-complete", "kind", "last-updater-or-commentor-role", "name", "negotiation-id", "originator-role", "private-auction-id", "proposal", "proposal-id", "proposal-revision-number", "proposal-state", "reference-id", "reference-payload", "revision-number", "revision-time-ms", "seller", "sub-account-id", "update-action"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::EditAllOrderDealsRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.marketplacedeals().update(request, opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _marketplacenotes_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { _ => { let suggestion = FieldCursor::did_you_mean(key, &vec![]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::AddOrderNotesRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.marketplacenotes().insert(request, opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _marketplacenotes_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.marketplacenotes().list(opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "pql-query" => { call = call.pql_query(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["pql-query"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _marketplaceprivateauction_updateproposal(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "note.kind" => Some(("note.kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.proposal-revision-number" => Some(("note.proposalRevisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.deal-id" => Some(("note.dealId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.note" => Some(("note.note", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.proposal-id" => Some(("note.proposalId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.creator-role" => Some(("note.creatorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.note-id" => Some(("note.noteId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "note.timestamp-ms" => Some(("note.timestampMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "external-deal-id" => Some(("externalDealId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal-revision-number" => Some(("proposalRevisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "update-action" => Some(("updateAction", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["creator-role", "deal-id", "external-deal-id", "kind", "note", "note-id", "proposal-id", "proposal-revision-number", "timestamp-ms", "update-action"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::UpdatePrivateAuctionProposalRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.marketplaceprivateauction().updateproposal(request, opt.value_of("private-auction-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _performance_report_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.performance_report().list(opt.value_of("account-id").unwrap_or(""), opt.value_of("end-date-time").unwrap_or(""), opt.value_of("start-date-time").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "page-token" => { call = call.page_token(value.unwrap_or("")); }, "max-results" => { call = call.max_results(arg_from_str(value.unwrap_or("-0"), err, "max-results", "integer")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["page-token", "max-results"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _pretargeting_config_delete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.pretargeting_config().delete(opt.value_of("account-id").unwrap_or(""), opt.value_of("config-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _pretargeting_config_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.pretargeting_config().get(opt.value_of("account-id").unwrap_or(""), opt.value_of("config-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _pretargeting_config_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "verticals" => Some(("verticals", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "platforms" => Some(("platforms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config-id" => Some(("configId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "excluded-verticals" => Some(("excludedVerticals", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config-name" => Some(("configName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "excluded-geo-criteria-ids" => Some(("excludedGeoCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "excluded-content-labels" => Some(("excludedContentLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "languages" => Some(("languages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-devices" => Some(("mobileDevices", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-carriers" => Some(("mobileCarriers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "billing-id" => Some(("billingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "geo-criteria-ids" => Some(("geoCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-operating-system-versions" => Some(("mobileOperatingSystemVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "excluded-user-lists" => Some(("excludedUserLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "is-active" => Some(("isActive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "minimum-viewability-decile" => Some(("minimumViewabilityDecile", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "supported-creative-attributes" => Some(("supportedCreativeAttributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "user-identifier-data-required" => Some(("userIdentifierDataRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "vendor-types" => Some(("vendorTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "user-lists" => Some(("userLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creative-type" => Some(("creativeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["billing-id", "config-id", "config-name", "creative-type", "excluded-content-labels", "excluded-geo-criteria-ids", "excluded-user-lists", "excluded-verticals", "geo-criteria-ids", "is-active", "kind", "languages", "minimum-viewability-decile", "mobile-carriers", "mobile-devices", "mobile-operating-system-versions", "platforms", "supported-creative-attributes", "user-identifier-data-required", "user-lists", "vendor-types", "verticals"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::PretargetingConfig = json::value::from_value(object).unwrap(); let mut call = self.hub.pretargeting_config().insert(request, opt.value_of("account-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _pretargeting_config_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.pretargeting_config().list(opt.value_of("account-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _pretargeting_config_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "verticals" => Some(("verticals", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "platforms" => Some(("platforms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config-id" => Some(("configId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "excluded-verticals" => Some(("excludedVerticals", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config-name" => Some(("configName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "excluded-geo-criteria-ids" => Some(("excludedGeoCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "excluded-content-labels" => Some(("excludedContentLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "languages" => Some(("languages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-devices" => Some(("mobileDevices", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-carriers" => Some(("mobileCarriers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "billing-id" => Some(("billingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "geo-criteria-ids" => Some(("geoCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-operating-system-versions" => Some(("mobileOperatingSystemVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "excluded-user-lists" => Some(("excludedUserLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "is-active" => Some(("isActive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "minimum-viewability-decile" => Some(("minimumViewabilityDecile", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "supported-creative-attributes" => Some(("supportedCreativeAttributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "user-identifier-data-required" => Some(("userIdentifierDataRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "vendor-types" => Some(("vendorTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "user-lists" => Some(("userLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creative-type" => Some(("creativeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["billing-id", "config-id", "config-name", "creative-type", "excluded-content-labels", "excluded-geo-criteria-ids", "excluded-user-lists", "excluded-verticals", "geo-criteria-ids", "is-active", "kind", "languages", "minimum-viewability-decile", "mobile-carriers", "mobile-devices", "mobile-operating-system-versions", "platforms", "supported-creative-attributes", "user-identifier-data-required", "user-lists", "vendor-types", "verticals"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::PretargetingConfig = json::value::from_value(object).unwrap(); let mut call = self.hub.pretargeting_config().patch(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("config-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _pretargeting_config_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "verticals" => Some(("verticals", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "platforms" => Some(("platforms", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config-id" => Some(("configId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "excluded-verticals" => Some(("excludedVerticals", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "config-name" => Some(("configName", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "excluded-geo-criteria-ids" => Some(("excludedGeoCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "excluded-content-labels" => Some(("excludedContentLabels", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "languages" => Some(("languages", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-devices" => Some(("mobileDevices", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-carriers" => Some(("mobileCarriers", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "billing-id" => Some(("billingId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "geo-criteria-ids" => Some(("geoCriteriaIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "mobile-operating-system-versions" => Some(("mobileOperatingSystemVersions", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "excluded-user-lists" => Some(("excludedUserLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "is-active" => Some(("isActive", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "minimum-viewability-decile" => Some(("minimumViewabilityDecile", JsonTypeInfo { jtype: JsonType::Int, ctype: ComplexType::Pod })), "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "supported-creative-attributes" => Some(("supportedCreativeAttributes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "user-identifier-data-required" => Some(("userIdentifierDataRequired", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "vendor-types" => Some(("vendorTypes", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "user-lists" => Some(("userLists", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "creative-type" => Some(("creativeType", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["billing-id", "config-id", "config-name", "creative-type", "excluded-content-labels", "excluded-geo-criteria-ids", "excluded-user-lists", "excluded-verticals", "geo-criteria-ids", "is-active", "kind", "languages", "minimum-viewability-decile", "mobile-carriers", "mobile-devices", "mobile-operating-system-versions", "platforms", "supported-creative-attributes", "user-identifier-data-required", "user-lists", "vendor-types", "verticals"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::PretargetingConfig = json::value::from_value(object).unwrap(); let mut call = self.hub.pretargeting_config().update(request, opt.value_of("account-id").unwrap_or(""), opt.value_of("config-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _products_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.products().get(opt.value_of("product-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _products_search(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.products().search(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "pql-query" => { call = call.pql_query(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["pql-query"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _proposals_get(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.proposals().get(opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _proposals_insert(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "web-property-code" => Some(("webPropertyCode", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["web-property-code"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::CreateOrdersRequest = json::value::from_value(object).unwrap(); let mut call = self.hub.proposals().insert(request); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _proposals_patch(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal-state" => Some(("proposalState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "revision-time-ms" => Some(("revisionTimeMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-auction-id" => Some(("privateAuctionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "buyer-private-data.reference-id" => Some(("buyerPrivateData.referenceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "buyer-private-data.reference-payload" => Some(("buyerPrivateData.referencePayload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "billed-buyer.account-id" => Some(("billedBuyer.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "originator-role" => Some(("originatorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "seller.sub-account-id" => Some(("seller.subAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "seller.account-id" => Some(("seller.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "negotiation-id" => Some(("negotiationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal-id" => Some(("proposalId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inventory-source" => Some(("inventorySource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-updater-or-commentor-role" => Some(("lastUpdaterOrCommentorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "has-seller-signed-off" => Some(("hasSellerSignedOff", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "is-renegotiating" => Some(("isRenegotiating", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "buyer.account-id" => Some(("buyer.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "has-buyer-signed-off" => Some(("hasBuyerSignedOff", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "dbm-advertiser-ids" => Some(("dbmAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "is-setup-complete" => Some(("isSetupComplete", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "revision-number" => Some(("revisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "billed-buyer", "buyer", "buyer-private-data", "dbm-advertiser-ids", "has-buyer-signed-off", "has-seller-signed-off", "inventory-source", "is-renegotiating", "is-setup-complete", "kind", "last-updater-or-commentor-role", "name", "negotiation-id", "originator-role", "private-auction-id", "proposal-id", "proposal-state", "reference-id", "reference-payload", "revision-number", "revision-time-ms", "seller", "sub-account-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Proposal = json::value::from_value(object).unwrap(); let mut call = self.hub.proposals().patch(request, opt.value_of("proposal-id").unwrap_or(""), opt.value_of("revision-number").unwrap_or(""), opt.value_of("update-action").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _proposals_search(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.proposals().search(); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { "pql-query" => { call = call.pql_query(value.unwrap_or("")); }, _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v.extend(["pql-query"].iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _proposals_setupcomplete(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut call = self.hub.proposals().setupcomplete(opt.value_of("proposal-id").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok(mut response) => { Ok(()) } } } } fn _proposals_update(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let mut field_cursor = FieldCursor::default(); let mut object = json::value::Value::Object(Default::default()); for kvarg in opt.values_of("kv").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let last_errc = err.issues.len(); let (key, value) = parse_kv_arg(&*kvarg, err, false); let mut temp_cursor = field_cursor.clone(); if let Err(field_err) = temp_cursor.set(&*key) { err.issues.push(field_err); } if value.is_none() { field_cursor = temp_cursor.clone(); if err.issues.len() > last_errc { err.issues.remove(last_errc); } continue; } let type_info: Option<(&'static str, JsonTypeInfo)> = match &temp_cursor.to_string()[..] { "kind" => Some(("kind", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal-state" => Some(("proposalState", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "name" => Some(("name", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "revision-time-ms" => Some(("revisionTimeMs", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "private-auction-id" => Some(("privateAuctionId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "buyer-private-data.reference-id" => Some(("buyerPrivateData.referenceId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "buyer-private-data.reference-payload" => Some(("buyerPrivateData.referencePayload", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "billed-buyer.account-id" => Some(("billedBuyer.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "originator-role" => Some(("originatorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "seller.sub-account-id" => Some(("seller.subAccountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "seller.account-id" => Some(("seller.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "negotiation-id" => Some(("negotiationId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "proposal-id" => Some(("proposalId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "inventory-source" => Some(("inventorySource", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "last-updater-or-commentor-role" => Some(("lastUpdaterOrCommentorRole", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "has-seller-signed-off" => Some(("hasSellerSignedOff", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "is-renegotiating" => Some(("isRenegotiating", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "buyer.account-id" => Some(("buyer.accountId", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), "has-buyer-signed-off" => Some(("hasBuyerSignedOff", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "dbm-advertiser-ids" => Some(("dbmAdvertiserIds", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Vec })), "is-setup-complete" => Some(("isSetupComplete", JsonTypeInfo { jtype: JsonType::Boolean, ctype: ComplexType::Pod })), "revision-number" => Some(("revisionNumber", JsonTypeInfo { jtype: JsonType::String, ctype: ComplexType::Pod })), _ => { let suggestion = FieldCursor::did_you_mean(key, &vec!["account-id", "billed-buyer", "buyer", "buyer-private-data", "dbm-advertiser-ids", "has-buyer-signed-off", "has-seller-signed-off", "inventory-source", "is-renegotiating", "is-setup-complete", "kind", "last-updater-or-commentor-role", "name", "negotiation-id", "originator-role", "private-auction-id", "proposal-id", "proposal-state", "reference-id", "reference-payload", "revision-number", "revision-time-ms", "seller", "sub-account-id"]); err.issues.push(CLIError::Field(FieldError::Unknown(temp_cursor.to_string(), suggestion, value.map(|v| v.to_string())))); None } }; if let Some((field_cursor_str, type_info)) = type_info { FieldCursor::from(field_cursor_str).set_json_value(&mut object, value.unwrap(), type_info, err, &temp_cursor); } } let mut request: api::Proposal = json::value::from_value(object).unwrap(); let mut call = self.hub.proposals().update(request, opt.value_of("proposal-id").unwrap_or(""), opt.value_of("revision-number").unwrap_or(""), opt.value_of("update-action").unwrap_or("")); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _pubprofiles_list(&self, opt: &ArgMatches<'n>, dry_run: bool, err: &mut InvalidOptionsError) -> Result<(), DoitError> { let account_id: i32 = arg_from_str(&opt.value_of("account-id").unwrap_or(""), err, "<account-id>", "integer"); let mut call = self.hub.pubprofiles().list(account_id); for parg in opt.values_of("v").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { let (key, value) = parse_kv_arg(&*parg, err, false); match key { _ => { let mut found = false; for param in &self.gp { if key == *param { found = true; call = call.param(self.gpm.iter().find(|t| t.0 == key).unwrap_or(&("", key)).1, value.unwrap_or("unset")); break; } } if !found { err.issues.push(CLIError::UnknownParameter(key.to_string(), {let mut v = Vec::new(); v.extend(self.gp.iter().map(|v|*v)); v } )); } } } } let protocol = CallType::Standard; if dry_run { Ok(()) } else { assert!(err.issues.len() == 0); for scope in self.opt.values_of("url").map(|i|i.collect()).unwrap_or(Vec::new()).iter() { call = call.add_scope(scope); } let mut ostream = match writer_from_opts(opt.value_of("out")) { Ok(mut f) => f, Err(io_err) => return Err(DoitError::IoError(opt.value_of("out").unwrap_or("-").to_string(), io_err)), }; match match protocol { CallType::Standard => call.doit(), _ => unreachable!() } { Err(api_err) => Err(DoitError::ApiError(api_err)), Ok((mut response, output_schema)) => { let mut value = json::value::to_value(&output_schema).expect("serde to work"); remove_json_null_values(&mut value); json::to_writer_pretty(&mut ostream, &value).unwrap(); ostream.flush().unwrap(); Ok(()) } } } } fn _doit(&self, dry_run: bool) -> Result<Result<(), DoitError>, Option<InvalidOptionsError>> { let mut err = InvalidOptionsError::new(); let mut call_result: Result<(), DoitError> = Ok(()); let mut err_opt: Option<InvalidOptionsError> = None; match self.opt.subcommand() { ("accounts", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._accounts_get(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._accounts_list(opt, dry_run, &mut err); }, ("patch", Some(opt)) => { call_result = self._accounts_patch(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._accounts_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("accounts".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("billing-info", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._billing_info_get(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._billing_info_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("billing-info".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("budget", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._budget_get(opt, dry_run, &mut err); }, ("patch", Some(opt)) => { call_result = self._budget_patch(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._budget_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("budget".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("creatives", Some(opt)) => { match opt.subcommand() { ("add-deal", Some(opt)) => { call_result = self._creatives_add_deal(opt, dry_run, &mut err); }, ("get", Some(opt)) => { call_result = self._creatives_get(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._creatives_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._creatives_list(opt, dry_run, &mut err); }, ("list-deals", Some(opt)) => { call_result = self._creatives_list_deals(opt, dry_run, &mut err); }, ("remove-deal", Some(opt)) => { call_result = self._creatives_remove_deal(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("creatives".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("marketplacedeals", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { call_result = self._marketplacedeals_delete(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._marketplacedeals_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._marketplacedeals_list(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._marketplacedeals_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("marketplacedeals".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("marketplacenotes", Some(opt)) => { match opt.subcommand() { ("insert", Some(opt)) => { call_result = self._marketplacenotes_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._marketplacenotes_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("marketplacenotes".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("marketplaceprivateauction", Some(opt)) => { match opt.subcommand() { ("updateproposal", Some(opt)) => { call_result = self._marketplaceprivateauction_updateproposal(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("marketplaceprivateauction".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("performance-report", Some(opt)) => { match opt.subcommand() { ("list", Some(opt)) => { call_result = self._performance_report_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("performance-report".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("pretargeting-config", Some(opt)) => { match opt.subcommand() { ("delete", Some(opt)) => { call_result = self._pretargeting_config_delete(opt, dry_run, &mut err); }, ("get", Some(opt)) => { call_result = self._pretargeting_config_get(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._pretargeting_config_insert(opt, dry_run, &mut err); }, ("list", Some(opt)) => { call_result = self._pretargeting_config_list(opt, dry_run, &mut err); }, ("patch", Some(opt)) => { call_result = self._pretargeting_config_patch(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._pretargeting_config_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("pretargeting-config".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("products", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._products_get(opt, dry_run, &mut err); }, ("search", Some(opt)) => { call_result = self._products_search(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("products".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("proposals", Some(opt)) => { match opt.subcommand() { ("get", Some(opt)) => { call_result = self._proposals_get(opt, dry_run, &mut err); }, ("insert", Some(opt)) => { call_result = self._proposals_insert(opt, dry_run, &mut err); }, ("patch", Some(opt)) => { call_result = self._proposals_patch(opt, dry_run, &mut err); }, ("search", Some(opt)) => { call_result = self._proposals_search(opt, dry_run, &mut err); }, ("setupcomplete", Some(opt)) => { call_result = self._proposals_setupcomplete(opt, dry_run, &mut err); }, ("update", Some(opt)) => { call_result = self._proposals_update(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("proposals".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, ("pubprofiles", Some(opt)) => { match opt.subcommand() { ("list", Some(opt)) => { call_result = self._pubprofiles_list(opt, dry_run, &mut err); }, _ => { err.issues.push(CLIError::MissingMethodError("pubprofiles".to_string())); writeln!(io::stderr(), "{}\n", opt.usage()).ok(); } } }, _ => { err.issues.push(CLIError::MissingCommandError); writeln!(io::stderr(), "{}\n", self.opt.usage()).ok(); } } if dry_run { if err.issues.len() > 0 { err_opt = Some(err); } Err(err_opt) } else { Ok(call_result) } } // Please note that this call will fail if any part of the opt can't be handled fn new(opt: ArgMatches<'n>) -> Result<Engine<'n>, InvalidOptionsError> { let (config_dir, secret) = { let config_dir = match cmn::assure_config_dir_exists(opt.value_of("folder").unwrap_or("~/.google-service-cli")) { Err(e) => return Err(InvalidOptionsError::single(e, 3)), Ok(p) => p, }; match cmn::application_secret_from_directory(&config_dir, "adexchangebuyer1d4-secret.json", "{\"installed\":{\"auth_uri\":\"https://accounts.google.com/o/oauth2/auth\",\"client_secret\":\"hCsslbCUyfehWMmbkG8vTYxG\",\"token_uri\":\"https://accounts.google.com/o/oauth2/token\",\"client_email\":\"\",\"redirect_uris\":[\"urn:ietf:wg:oauth:2.0:oob\",\"oob\"],\"client_x509_cert_url\":\"\",\"client_id\":\"620010449518-9ngf7o4dhs0dka470npqvor6dc5lqb9b.apps.googleusercontent.com\",\"auth_provider_x509_cert_url\":\"https://www.googleapis.com/oauth2/v1/certs\"}}") { Ok(secret) => (config_dir, secret), Err(e) => return Err(InvalidOptionsError::single(e, 4)) } }; let auth = Authenticator::new( &secret, DefaultAuthenticatorDelegate, if opt.is_present("debug-auth") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()) }) } else { hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())) }, JsonTokenStorage { program_name: "adexchangebuyer1d4", db_dir: config_dir.clone(), }, Some(FlowType::InstalledRedirect(54324))); let client = if opt.is_present("debug") { hyper::Client::with_connector(mock::TeeConnector { connector: hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new()) }) } else { hyper::Client::with_connector(hyper::net::HttpsConnector::new(hyper_rustls::TlsClient::new())) }; let engine = Engine { opt: opt, hub: api::AdExchangeBuyer::new(client, auth), gp: vec!["alt", "fields", "key", "oauth-token", "pretty-print", "quota-user", "user-ip"], gpm: vec![ ("oauth-token", "oauth_token"), ("pretty-print", "prettyPrint"), ("quota-user", "quotaUser"), ("user-ip", "userIp"), ] }; match engine._doit(true) { Err(Some(err)) => Err(err), Err(None) => Ok(engine), Ok(_) => unreachable!(), } } fn doit(&self) -> Result<(), DoitError> { match self._doit(false) { Ok(res) => res, Err(_) => unreachable!(), } } } fn main() { let mut exit_status = 0i32; let arg_data = [ ("accounts", "methods: 'get', 'list', 'patch' and 'update'", vec![ ("get", Some(r##"Gets one account by ID."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/accounts_get", vec![ (Some(r##"id"##), None, Some(r##"The account id"##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Retrieves the authenticated user's list of accounts."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/accounts_list", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("patch", Some(r##"Updates an existing account. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/accounts_patch", vec![ (Some(r##"id"##), None, Some(r##"The account id"##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("update", Some(r##"Updates an existing account."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/accounts_update", vec![ (Some(r##"id"##), None, Some(r##"The account id"##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("billing-info", "methods: 'get' and 'list'", vec![ ("get", Some(r##"Returns the billing information for one account specified by account ID."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/billing-info_get", vec![ (Some(r##"account-id"##), None, Some(r##"The account id."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Retrieves a list of billing information for all accounts of the authenticated user."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/billing-info_list", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("budget", "methods: 'get', 'patch' and 'update'", vec![ ("get", Some(r##"Returns the budget information for the adgroup specified by the accountId and billingId."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/budget_get", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to get the budget information for."##), Some(true), Some(false)), (Some(r##"billing-id"##), None, Some(r##"The billing id to get the budget information for."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("patch", Some(r##"Updates the budget amount for the budget of the adgroup specified by the accountId and billingId, with the budget amount in the request. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/budget_patch", vec![ (Some(r##"account-id"##), None, Some(r##"The account id associated with the budget being updated."##), Some(true), Some(false)), (Some(r##"billing-id"##), None, Some(r##"The billing id associated with the budget being updated."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("update", Some(r##"Updates the budget amount for the budget of the adgroup specified by the accountId and billingId, with the budget amount in the request."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/budget_update", vec![ (Some(r##"account-id"##), None, Some(r##"The account id associated with the budget being updated."##), Some(true), Some(false)), (Some(r##"billing-id"##), None, Some(r##"The billing id associated with the budget being updated."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("creatives", "methods: 'add-deal', 'get', 'insert', 'list', 'list-deals' and 'remove-deal'", vec![ ("add-deal", Some(r##"Add a deal id association for the creative."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/creatives_add-deal", vec![ (Some(r##"account-id"##), None, Some(r##"The id for the account that will serve this creative."##), Some(true), Some(false)), (Some(r##"buyer-creative-id"##), None, Some(r##"The buyer-specific id for this creative."##), Some(true), Some(false)), (Some(r##"deal-id"##), None, Some(r##"The id of the deal id to associate with this creative."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ("get", Some(r##"Gets the status for a single creative. A creative will be available 30-40 minutes after submission."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/creatives_get", vec![ (Some(r##"account-id"##), None, Some(r##"The id for the account that will serve this creative."##), Some(true), Some(false)), (Some(r##"buyer-creative-id"##), None, Some(r##"The buyer-specific id for this creative."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("insert", Some(r##"Submit a new creative."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/creatives_insert", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Retrieves a list of the authenticated user's active creatives. A creative will be available 30-40 minutes after submission."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/creatives_list", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list-deals", Some(r##"Lists the external deal ids associated with the creative."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/creatives_list-deals", vec![ (Some(r##"account-id"##), None, Some(r##"The id for the account that will serve this creative."##), Some(true), Some(false)), (Some(r##"buyer-creative-id"##), None, Some(r##"The buyer-specific id for this creative."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("remove-deal", Some(r##"Remove a deal id associated with the creative."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/creatives_remove-deal", vec![ (Some(r##"account-id"##), None, Some(r##"The id for the account that will serve this creative."##), Some(true), Some(false)), (Some(r##"buyer-creative-id"##), None, Some(r##"The buyer-specific id for this creative."##), Some(true), Some(false)), (Some(r##"deal-id"##), None, Some(r##"The id of the deal id to disassociate with this creative."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ]), ("marketplacedeals", "methods: 'delete', 'insert', 'list' and 'update'", vec![ ("delete", Some(r##"Delete the specified deals from the proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplacedeals_delete", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposalId to delete deals from."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("insert", Some(r##"Add new deals for the specified proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplacedeals_insert", vec![ (Some(r##"proposal-id"##), None, Some(r##"proposalId for which deals need to be added."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"List all the deals for a given proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplacedeals_list", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposalId to get deals for. To search across all proposals specify order_id = '-' as part of the URL."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("update", Some(r##"Replaces all the deals in the proposal with the passed in deals"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplacedeals_update", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposalId to edit deals on."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("marketplacenotes", "methods: 'insert' and 'list'", vec![ ("insert", Some(r##"Add notes to the proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplacenotes_insert", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposalId to add notes for."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Get all the notes associated with a proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplacenotes_list", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposalId to get notes for. To search across all proposals specify order_id = '-' as part of the URL."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("marketplaceprivateauction", "methods: 'updateproposal'", vec![ ("updateproposal", Some(r##"Update a given private auction proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/marketplaceprivateauction_updateproposal", vec![ (Some(r##"private-auction-id"##), None, Some(r##"The private auction id to be updated."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ]), ("performance-report", "methods: 'list'", vec![ ("list", Some(r##"Retrieves the authenticated user's list of performance metrics."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/performance-report_list", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to get the reports."##), Some(true), Some(false)), (Some(r##"end-date-time"##), None, Some(r##"The end time of the report in ISO 8601 timestamp format using UTC."##), Some(true), Some(false)), (Some(r##"start-date-time"##), None, Some(r##"The start time of the report in ISO 8601 timestamp format using UTC."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("pretargeting-config", "methods: 'delete', 'get', 'insert', 'list', 'patch' and 'update'", vec![ ("delete", Some(r##"Deletes an existing pretargeting config."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pretargeting-config_delete", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to delete the pretargeting config for."##), Some(true), Some(false)), (Some(r##"config-id"##), None, Some(r##"The specific id of the configuration to delete."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ("get", Some(r##"Gets a specific pretargeting configuration"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pretargeting-config_get", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to get the pretargeting config for."##), Some(true), Some(false)), (Some(r##"config-id"##), None, Some(r##"The specific id of the configuration to retrieve."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("insert", Some(r##"Inserts a new pretargeting configuration."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pretargeting-config_insert", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to insert the pretargeting config for."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("list", Some(r##"Retrieves a list of the authenticated user's pretargeting configurations."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pretargeting-config_list", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to get the pretargeting configs for."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("patch", Some(r##"Updates an existing pretargeting config. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pretargeting-config_patch", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to update the pretargeting config for."##), Some(true), Some(false)), (Some(r##"config-id"##), None, Some(r##"The specific id of the configuration to update."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("update", Some(r##"Updates an existing pretargeting config."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pretargeting-config_update", vec![ (Some(r##"account-id"##), None, Some(r##"The account id to update the pretargeting config for."##), Some(true), Some(false)), (Some(r##"config-id"##), None, Some(r##"The specific id of the configuration to update."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("products", "methods: 'get' and 'search'", vec![ ("get", Some(r##"Gets the requested product by id."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/products_get", vec![ (Some(r##"product-id"##), None, Some(r##"The id for the product to get the head revision for."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("search", Some(r##"Gets the requested product."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/products_search", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("proposals", "methods: 'get', 'insert', 'patch', 'search', 'setupcomplete' and 'update'", vec![ ("get", Some(r##"Get a proposal given its id"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/proposals_get", vec![ (Some(r##"proposal-id"##), None, Some(r##"Id of the proposal to retrieve."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("insert", Some(r##"Create the given list of proposals"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/proposals_insert", vec![ (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("patch", Some(r##"Update the given proposal. This method supports patch semantics."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/proposals_patch", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposal id to update."##), Some(true), Some(false)), (Some(r##"revision-number"##), None, Some(r##"The last known revision number to update. If the head revision in the marketplace database has since changed, an error will be thrown. The caller should then fetch the latest proposal at head revision and retry the update at that revision."##), Some(true), Some(false)), (Some(r##"update-action"##), None, Some(r##"The proposed action to take on the proposal. This field is required and it must be set when updating a proposal."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("search", Some(r##"Search for proposals using pql query"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/proposals_search", vec![ (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ("setupcomplete", Some(r##"Update the given proposal to indicate that setup has been completed."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/proposals_setupcomplete", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposal id for which the setup is complete"##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), ]), ("update", Some(r##"Update the given proposal"##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/proposals_update", vec![ (Some(r##"proposal-id"##), None, Some(r##"The proposal id to update."##), Some(true), Some(false)), (Some(r##"revision-number"##), None, Some(r##"The last known revision number to update. If the head revision in the marketplace database has since changed, an error will be thrown. The caller should then fetch the latest proposal at head revision and retry the update at that revision."##), Some(true), Some(false)), (Some(r##"update-action"##), None, Some(r##"The proposed action to take on the proposal. This field is required and it must be set when updating a proposal."##), Some(true), Some(false)), (Some(r##"kv"##), Some(r##"r"##), Some(r##"Set various fields of the request structure, matching the key=value form"##), Some(true), Some(true)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ("pubprofiles", "methods: 'list'", vec![ ("list", Some(r##"Gets the requested publisher profile(s) by publisher accountId."##), "Details at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli/pubprofiles_list", vec![ (Some(r##"account-id"##), None, Some(r##"The accountId of the publisher to get profiles for."##), Some(true), Some(false)), (Some(r##"v"##), Some(r##"p"##), Some(r##"Set various optional parameters, matching the key=value form"##), Some(false), Some(true)), (Some(r##"out"##), Some(r##"o"##), Some(r##"Specify the file into which to write the program's output"##), Some(false), Some(false)), ]), ]), ]; let mut app = App::new("adexchangebuyer1d4") .author("Sebastian Thiel <[email protected]>") .version("1.0.11+20190614") .about("Accesses your bidding-account information, submits creatives for validation, finds available direct deals, and retrieves performance reports.") .after_help("All documentation details can be found at http://byron.github.io/google-apis-rs/google_adexchangebuyer1d4_cli") .arg(Arg::with_name("url") .long("scope") .help("Specify the authentication a method should be executed in. Each scope requires the user to grant this application permission to use it.If unset, it defaults to the shortest scope url for a particular method.") .multiple(true) .takes_value(true)) .arg(Arg::with_name("folder") .long("config-dir") .help("A directory into which we will store our persistent data. Defaults to a user-writable directory that we will create during the first invocation.[default: ~/.google-service-cli") .multiple(false) .takes_value(true)) .arg(Arg::with_name("debug") .long("debug") .help("Output all server communication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)) .arg(Arg::with_name("debug-auth") .long("debug-auth") .help("Output all communication related to authentication to standard error. `tx` and `rx` are placed into the same stream.") .multiple(false) .takes_value(false)); for &(main_command_name, about, ref subcommands) in arg_data.iter() { let mut mcmd = SubCommand::with_name(main_command_name).about(about); for &(sub_command_name, ref desc, url_info, ref args) in subcommands { let mut scmd = SubCommand::with_name(sub_command_name); if let &Some(desc) = desc { scmd = scmd.about(desc); } scmd = scmd.after_help(url_info); for &(ref arg_name, ref flag, ref desc, ref required, ref multi) in args { let arg_name_str = match (arg_name, flag) { (&Some(an), _ ) => an, (_ , &Some(f)) => f, _ => unreachable!(), }; let mut arg = Arg::with_name(arg_name_str) .empty_values(false); if let &Some(short_flag) = flag { arg = arg.short(short_flag); } if let &Some(desc) = desc { arg = arg.help(desc); } if arg_name.is_some() && flag.is_some() { arg = arg.takes_value(true); } if let &Some(required) = required { arg = arg.required(required); } if let &Some(multi) = multi { arg = arg.multiple(multi); } scmd = scmd.arg(arg); } mcmd = mcmd.subcommand(scmd); } app = app.subcommand(mcmd); } let matches = app.get_matches(); let debug = matches.is_present("debug"); match Engine::new(matches) { Err(err) => { exit_status = err.exit_code; writeln!(io::stderr(), "{}", err).ok(); }, Ok(engine) => { if let Err(doit_err) = engine.doit() { exit_status = 1; match doit_err { DoitError::IoError(path, err) => { writeln!(io::stderr(), "Failed to open output file '{}': {}", path, err).ok(); }, DoitError::ApiError(err) => { if debug { writeln!(io::stderr(), "{:#?}", err).ok(); } else { writeln!(io::stderr(), "{}", err).ok(); } } } } } } std::process::exit(exit_status); }
53.223549
829
0.452162
ffa9cbe6bd45f54519ac23d5b097a71c1022996b
30,115
#![cfg(not(debug_assertions))] use beacon_chain::{ test_utils::{ AttestationStrategy, BeaconChainHarness, BlockStrategy, NullMigratorEphemeralHarnessType, }, BeaconChain, BeaconChainError, BeaconForkChoiceStore, ForkChoiceError, StateSkipConfig, }; use fork_choice::{ ForkChoiceStore, InvalidAttestation, InvalidBlock, QueuedAttestation, SAFE_SLOTS_TO_UPDATE_JUSTIFIED, }; use std::sync::Mutex; use store::{MemoryStore, StoreConfig}; use types::{ test_utils::{generate_deterministic_keypair, generate_deterministic_keypairs}, Epoch, EthSpec, IndexedAttestation, MainnetEthSpec, Slot, SubnetId, }; use types::{BeaconBlock, BeaconState, Hash256, SignedBeaconBlock}; pub type E = MainnetEthSpec; pub const VALIDATOR_COUNT: usize = 32; /// Defines some delay between when an attestation is created and when it is mutated. pub enum MutationDelay { /// No delay between creation and mutation. NoDelay, /// Create `n` blocks before mutating the attestation. Blocks(usize), } /// A helper struct to make testing fork choice more ergonomic and less repetitive. struct ForkChoiceTest { harness: BeaconChainHarness<NullMigratorEphemeralHarnessType<E>>, } impl ForkChoiceTest { /// Creates a new tester. pub fn new() -> Self { let harness = BeaconChainHarness::new_with_target_aggregators( MainnetEthSpec, generate_deterministic_keypairs(VALIDATOR_COUNT), // Ensure we always have an aggregator for each slot. u64::max_value(), StoreConfig::default(), ); Self { harness } } /// Get a value from the `ForkChoice` instantiation. fn get<T, U>(&self, func: T) -> U where T: Fn(&BeaconForkChoiceStore<E, MemoryStore<E>, MemoryStore<E>>) -> U, { func(&self.harness.chain.fork_choice.read().fc_store()) } /// Assert the epochs match. pub fn assert_finalized_epoch(self, epoch: u64) -> Self { assert_eq!( self.get(|fc_store| fc_store.finalized_checkpoint().epoch), Epoch::new(epoch), "finalized_epoch" ); self } /// Assert the epochs match. pub fn assert_justified_epoch(self, epoch: u64) -> Self { assert_eq!( self.get(|fc_store| fc_store.justified_checkpoint().epoch), Epoch::new(epoch), "justified_epoch" ); self } /// Assert the epochs match. pub fn assert_best_justified_epoch(self, epoch: u64) -> Self { assert_eq!( self.get(|fc_store| fc_store.best_justified_checkpoint().epoch), Epoch::new(epoch), "best_justified_epoch" ); self } /// Inspect the queued attestations in fork choice. pub fn inspect_queued_attestations<F>(self, mut func: F) -> Self where F: FnMut(&[QueuedAttestation]), { self.harness .chain .fork_choice .write() .update_time(self.harness.chain.slot().unwrap()) .unwrap(); func(self.harness.chain.fork_choice.read().queued_attestations()); self } /// Skip a slot, without producing a block. pub fn skip_slot(self) -> Self { self.harness.advance_slot(); self } /// Skips `count` slots, without producing a block. pub fn skip_slots(self, count: usize) -> Self { for _ in 0..count { self.harness.advance_slot(); } self } /// Build the chain whilst `predicate` returns `true`. pub fn apply_blocks_while<F>(mut self, mut predicate: F) -> Self where F: FnMut(&BeaconBlock<E>, &BeaconState<E>) -> bool, { self.harness.advance_slot(); let mut state = self.harness.get_current_state(); let validators = self.harness.get_all_validators(); loop { let slot = self.harness.get_current_slot(); let (block, state_) = self.harness.make_block(state, slot); state = state_; if !predicate(&block.message, &state) { break; } let block_hash = self.harness.process_block(slot, block.clone()); self.harness .attest_block(&state, block_hash, &block, &validators); self.harness.advance_slot(); } self } /// Apply `count` blocks to the chain (with attestations). pub fn apply_blocks(mut self, count: usize) -> Self { self.harness.advance_slot(); self.harness.extend_chain( count, BlockStrategy::OnCanonicalHead, AttestationStrategy::AllValidators, ); self } /// Apply `count` blocks to the chain (without attestations). pub fn apply_blocks_without_new_attestations(mut self, count: usize) -> Self { self.harness.advance_slot(); self.harness.extend_chain( count, BlockStrategy::OnCanonicalHead, AttestationStrategy::SomeValidators(vec![]), ); self } /// Moves to the next slot that is *outside* the `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` range. /// /// If the chain is presently in an unsafe period, transition through it and the following safe /// period. pub fn move_to_next_unsafe_period(self) -> Self { self.move_inside_safe_to_update() .move_outside_safe_to_update() } /// Moves to the next slot that is *outside* the `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` range. pub fn move_outside_safe_to_update(self) -> Self { while is_safe_to_update(self.harness.chain.slot().unwrap()) { self.harness.advance_slot() } self } /// Moves to the next slot that is *inside* the `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` range. pub fn move_inside_safe_to_update(self) -> Self { while !is_safe_to_update(self.harness.chain.slot().unwrap()) { self.harness.advance_slot() } self } /// Applies a block directly to fork choice, bypassing the beacon chain. /// /// Asserts the block was applied successfully. pub fn apply_block_directly_to_fork_choice<F>(mut self, mut func: F) -> Self where F: FnMut(&mut BeaconBlock<E>, &mut BeaconState<E>), { let state = self .harness .chain .state_at_slot( self.harness.get_current_slot() - 1, StateSkipConfig::WithStateRoots, ) .unwrap(); let slot = self.harness.get_current_slot(); let (mut block, mut state) = self.harness.make_block(state, slot); func(&mut block.message, &mut state); let current_slot = self.harness.get_current_slot(); self.harness .chain .fork_choice .write() .on_block(current_slot, &block.message, block.canonical_root(), &state) .unwrap(); self } /// Applies a block directly to fork choice, bypassing the beacon chain. /// /// Asserts that an error occurred and allows inspecting it via `comparison_func`. pub fn apply_invalid_block_directly_to_fork_choice<F, G>( mut self, mut mutation_func: F, mut comparison_func: G, ) -> Self where F: FnMut(&mut BeaconBlock<E>, &mut BeaconState<E>), G: FnMut(ForkChoiceError), { let state = self .harness .chain .state_at_slot( self.harness.get_current_slot() - 1, StateSkipConfig::WithStateRoots, ) .unwrap(); let slot = self.harness.get_current_slot(); let (mut block, mut state) = self.harness.make_block(state, slot); mutation_func(&mut block.message, &mut state); let current_slot = self.harness.get_current_slot(); let err = self .harness .chain .fork_choice .write() .on_block(current_slot, &block.message, block.canonical_root(), &state) .err() .expect("on_block did not return an error"); comparison_func(err); self } /// Compares the justified balances in the `ForkChoiceStore` verses a direct lookup from the /// database. fn check_justified_balances(&self) { let harness = &self.harness; let fc = self.harness.chain.fork_choice.read(); let state_root = harness .chain .store .get_item::<SignedBeaconBlock<E>>(&fc.fc_store().justified_checkpoint().root) .unwrap() .unwrap() .message .state_root; let state = harness .chain .store .get_state(&state_root, None) .unwrap() .unwrap(); let balances = state .validators .into_iter() .map(|v| { if v.is_active_at(state.current_epoch()) { v.effective_balance } else { 0 } }) .collect::<Vec<_>>(); assert_eq!( &balances[..], fc.fc_store().justified_balances(), "balances should match" ) } /// Returns an attestation that is valid for some slot in the given `chain`. /// /// Also returns some info about who created it. fn apply_attestation_to_chain<F, G>( mut self, delay: MutationDelay, mut mutation_func: F, mut comparison_func: G, ) -> Self where F: FnMut(&mut IndexedAttestation<E>, &BeaconChain<NullMigratorEphemeralHarnessType<E>>), G: FnMut(Result<(), BeaconChainError>), { let head = self.harness.chain.head().expect("should get head"); let current_slot = self.harness.chain.slot().expect("should get slot"); let mut attestation = self .harness .chain .produce_unaggregated_attestation(current_slot, 0) .expect("should not error while producing attestation"); let validator_committee_index = 0; let validator_index = *head .beacon_state .get_beacon_committee(current_slot, attestation.data.index) .expect("should get committees") .committee .get(validator_committee_index) .expect("there should be an attesting validator"); let committee_count = head .beacon_state .get_committee_count_at_slot(current_slot) .expect("should not error while getting committee count"); let subnet_id = SubnetId::compute_subnet::<E>( current_slot, 0, committee_count, &self.harness.chain.spec, ) .expect("should compute subnet id"); let validator_sk = generate_deterministic_keypair(validator_index).sk; attestation .sign( &validator_sk, validator_committee_index, &head.beacon_state.fork, self.harness.chain.genesis_validators_root, &self.harness.chain.spec, ) .expect("should sign attestation"); let mut verified_attestation = self .harness .chain .verify_unaggregated_attestation_for_gossip(attestation, subnet_id) .expect("precondition: should gossip verify attestation"); if let MutationDelay::Blocks(slots) = delay { self.harness.advance_slot(); self.harness.extend_chain( slots, BlockStrategy::OnCanonicalHead, AttestationStrategy::SomeValidators(vec![]), ); } mutation_func( verified_attestation.__indexed_attestation_mut(), &self.harness.chain, ); let result = self .harness .chain .apply_attestation_to_fork_choice(&verified_attestation); comparison_func(result); self } /// Check to ensure that we can read the finalized block. This is a regression test. pub fn check_finalized_block_is_accessible(self) -> Self { self.harness .chain .fork_choice .write() .get_block( &self .harness .chain .head_info() .unwrap() .finalized_checkpoint .root, ) .unwrap(); self } } fn is_safe_to_update(slot: Slot) -> bool { slot % E::slots_per_epoch() < SAFE_SLOTS_TO_UPDATE_JUSTIFIED } /// - The new justified checkpoint descends from the current. /// - Current slot is within `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` #[test] fn justified_checkpoint_updates_with_descendent_inside_safe_slots() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch == 0) .move_inside_safe_to_update() .assert_justified_epoch(0) .apply_blocks(1) .assert_justified_epoch(2); } /// - The new justified checkpoint descends from the current. /// - Current slot is **not** within `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` /// - This is **not** the first justification since genesis #[test] fn justified_checkpoint_updates_with_descendent_outside_safe_slots() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch <= 2) .move_outside_safe_to_update() .assert_justified_epoch(2) .assert_best_justified_epoch(2) .apply_blocks(1) .assert_justified_epoch(3); } /// - The new justified checkpoint descends from the current. /// - Current slot is **not** within `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` /// - This is the first justification since genesis #[test] fn justified_checkpoint_updates_first_justification_outside_safe_to_update() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch == 0) .move_to_next_unsafe_period() .assert_justified_epoch(0) .assert_best_justified_epoch(0) .apply_blocks(1) .assert_justified_epoch(2) .assert_best_justified_epoch(2); } /// - The new justified checkpoint **does not** descend from the current. /// - Current slot is within `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` /// - Finalized epoch has **not** increased. #[test] fn justified_checkpoint_updates_with_non_descendent_inside_safe_slots_without_finality() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch == 0) .apply_blocks(1) .move_inside_safe_to_update() .assert_justified_epoch(2) .apply_block_directly_to_fork_choice(|_, state| { // The finalized checkpoint should not change. state.finalized_checkpoint.epoch = Epoch::new(0); // The justified checkpoint has changed. state.current_justified_checkpoint.epoch = Epoch::new(3); // The new block should **not** include the current justified block as an ancestor. state.current_justified_checkpoint.root = *state .get_block_root(Epoch::new(1).start_slot(E::slots_per_epoch())) .unwrap(); }) .assert_justified_epoch(3) .assert_best_justified_epoch(3); } /// - The new justified checkpoint **does not** descend from the current. /// - Current slot is **not** within `SAFE_SLOTS_TO_UPDATE_JUSTIFIED`. /// - Finalized epoch has **not** increased. #[test] fn justified_checkpoint_updates_with_non_descendent_outside_safe_slots_without_finality() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch == 0) .apply_blocks(1) .move_to_next_unsafe_period() .assert_justified_epoch(2) .apply_block_directly_to_fork_choice(|_, state| { // The finalized checkpoint should not change. state.finalized_checkpoint.epoch = Epoch::new(0); // The justified checkpoint has changed. state.current_justified_checkpoint.epoch = Epoch::new(3); // The new block should **not** include the current justified block as an ancestor. state.current_justified_checkpoint.root = *state .get_block_root(Epoch::new(1).start_slot(E::slots_per_epoch())) .unwrap(); }) .assert_justified_epoch(2) .assert_best_justified_epoch(3); } /// - The new justified checkpoint **does not** descend from the current. /// - Current slot is **not** within `SAFE_SLOTS_TO_UPDATE_JUSTIFIED` /// - Finalized epoch has increased. #[test] fn justified_checkpoint_updates_with_non_descendent_outside_safe_slots_with_finality() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch == 0) .apply_blocks(1) .move_to_next_unsafe_period() .assert_justified_epoch(2) .apply_block_directly_to_fork_choice(|_, state| { // The finalized checkpoint should change. state.finalized_checkpoint.epoch = Epoch::new(1); // The justified checkpoint has changed. state.current_justified_checkpoint.epoch = Epoch::new(3); // The new block should **not** include the current justified block as an ancestor. state.current_justified_checkpoint.root = *state .get_block_root(Epoch::new(1).start_slot(E::slots_per_epoch())) .unwrap(); }) .assert_justified_epoch(3) .assert_best_justified_epoch(3); } /// Check that the balances are obtained correctly. #[test] fn justified_balances() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.current_justified_checkpoint.epoch == 0) .apply_blocks(1) .assert_justified_epoch(2) .check_justified_balances() } macro_rules! assert_invalid_block { ($err: tt, $($error: pat) |+ $( if $guard: expr )?) => { assert!( matches!( $err, $( ForkChoiceError::InvalidBlock($error) ) |+ $( if $guard )? ), ); }; } /// Specification v0.12.1 /// /// assert block.parent_root in store.block_states #[test] fn invalid_block_unknown_parent() { let junk = Hash256::from_low_u64_be(42); ForkChoiceTest::new() .apply_blocks(2) .apply_invalid_block_directly_to_fork_choice( |block, _| { block.parent_root = junk; }, |err| { assert_invalid_block!( err, InvalidBlock::UnknownParent(parent) if parent == junk ) }, ); } /// Specification v0.12.1 /// /// assert get_current_slot(store) >= block.slot #[test] fn invalid_block_future_slot() { ForkChoiceTest::new() .apply_blocks(2) .apply_invalid_block_directly_to_fork_choice( |block, _| { block.slot = block.slot + 1; }, |err| { assert_invalid_block!( err, InvalidBlock::FutureSlot { .. } ) }, ); } /// Specification v0.12.1 /// /// assert block.slot > finalized_slot #[test] fn invalid_block_finalized_slot() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.finalized_checkpoint.epoch == 0) .apply_blocks(1) .apply_invalid_block_directly_to_fork_choice( |block, _| { block.slot = Epoch::new(2).start_slot(E::slots_per_epoch()) - 1; }, |err| { assert_invalid_block!( err, InvalidBlock::FinalizedSlot { finalized_slot, .. } if finalized_slot == Epoch::new(2).start_slot(E::slots_per_epoch()) ) }, ); } /// Specification v0.12.1 /// /// assert get_ancestor(store, hash_tree_root(block), finalized_slot) == /// store.finalized_checkpoint.root /// /// Note: we technically don't do this exact check, but an equivalent check. Reference: /// /// https://github.com/ethereum/eth2.0-specs/pull/1884 #[test] fn invalid_block_finalized_descendant() { let invalid_ancestor = Mutex::new(Hash256::zero()); ForkChoiceTest::new() .apply_blocks_while(|_, state| state.finalized_checkpoint.epoch == 0) .apply_blocks(1) .assert_finalized_epoch(2) .apply_invalid_block_directly_to_fork_choice( |block, state| { block.parent_root = *state .get_block_root(Epoch::new(1).start_slot(E::slots_per_epoch())) .unwrap(); *invalid_ancestor.lock().unwrap() = block.parent_root; }, |err| { assert_invalid_block!( err, InvalidBlock::NotFinalizedDescendant { block_ancestor, .. } if block_ancestor == Some(*invalid_ancestor.lock().unwrap()) ) }, ); } macro_rules! assert_invalid_attestation { ($err: tt, $($error: pat) |+ $( if $guard: expr )?) => { assert!( matches!( $err, $( Err(BeaconChainError::ForkChoiceError(ForkChoiceError::InvalidAttestation($error))) ) |+ $( if $guard )? ), "{:?}", $err ); }; } /// Ensure we can process a valid attestation. #[test] fn valid_attestation() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::NoDelay, |_, _| {}, |result| assert_eq!(result.unwrap(), ()), ); } /// This test is not in the specification, however we reject an attestation with an empty /// aggregation bitfield since it has no purpose beyond wasting our time. #[test] fn invalid_attestation_empty_bitfield() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _| { attestation.attesting_indices = vec![].into(); }, |result| { assert_invalid_attestation!(result, InvalidAttestation::EmptyAggregationBitfield) }, ); } /// Specification v0.12.1: /// /// assert target.epoch in [expected_current_epoch, previous_epoch] /// /// (tests epoch after current epoch) #[test] fn invalid_attestation_future_epoch() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _| { attestation.data.target.epoch = Epoch::new(2); }, |result| { assert_invalid_attestation!( result, InvalidAttestation::FutureEpoch { attestation_epoch, current_epoch } if attestation_epoch == Epoch::new(2) && current_epoch == Epoch::new(0) ) }, ); } /// Specification v0.12.1: /// /// assert target.epoch in [expected_current_epoch, previous_epoch] /// /// (tests epoch prior to previous epoch) #[test] fn invalid_attestation_past_epoch() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(E::slots_per_epoch() as usize * 3 + 1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _| { attestation.data.target.epoch = Epoch::new(0); }, |result| { assert_invalid_attestation!( result, InvalidAttestation::PastEpoch { attestation_epoch, current_epoch } if attestation_epoch == Epoch::new(0) && current_epoch == Epoch::new(3) ) }, ); } /// Specification v0.12.1: /// /// assert target.epoch == compute_epoch_at_slot(attestation.data.slot) #[test] fn invalid_attestation_target_epoch() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(E::slots_per_epoch() as usize + 1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _| { attestation.data.slot = Slot::new(1); }, |result| { assert_invalid_attestation!( result, InvalidAttestation::BadTargetEpoch { target, slot } if target == Epoch::new(1) && slot == Slot::new(1) ) }, ); } /// Specification v0.12.1: /// /// assert target.root in store.blocks #[test] fn invalid_attestation_unknown_target_root() { let junk = Hash256::from_low_u64_be(42); ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _| { attestation.data.target.root = junk; }, |result| { assert_invalid_attestation!( result, InvalidAttestation::UnknownTargetRoot(root) if root == junk ) }, ); } /// Specification v0.12.1: /// /// assert attestation.data.beacon_block_root in store.blocks #[test] fn invalid_attestation_unknown_beacon_block_root() { let junk = Hash256::from_low_u64_be(42); ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _| { attestation.data.beacon_block_root = junk; }, |result| { assert_invalid_attestation!( result, InvalidAttestation::UnknownHeadBlock { beacon_block_root } if beacon_block_root == junk ) }, ); } /// Specification v0.12.1: /// /// assert store.blocks[attestation.data.beacon_block_root].slot <= attestation.data.slot #[test] fn invalid_attestation_future_block() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::Blocks(1), |attestation, chain| { attestation.data.beacon_block_root = chain .block_at_slot(chain.slot().unwrap()) .unwrap() .unwrap() .canonical_root(); }, |result| { assert_invalid_attestation!( result, InvalidAttestation::AttestsToFutureBlock { block, attestation } if block == 2 && attestation == 1 ) }, ); } /// Specification v0.12.1: /// /// assert target.root == get_ancestor(store, attestation.data.beacon_block_root, target_slot) #[test] fn invalid_attestation_inconsistent_ffg_vote() { let local_opt = Mutex::new(None); let attestation_opt = Mutex::new(None); ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, chain| { attestation.data.target.root = chain .block_at_slot(Slot::new(1)) .unwrap() .unwrap() .canonical_root(); *attestation_opt.lock().unwrap() = Some(attestation.data.target.root); *local_opt.lock().unwrap() = Some( chain .block_at_slot(Slot::new(0)) .unwrap() .unwrap() .canonical_root(), ); }, |result| { assert_invalid_attestation!( result, InvalidAttestation::InvalidTarget { attestation, local } if attestation == attestation_opt.lock().unwrap().unwrap() && local == local_opt.lock().unwrap().unwrap() ) }, ); } /// Specification v0.12.1: /// /// assert get_current_slot(store) >= attestation.data.slot + 1 #[test] fn invalid_attestation_delayed_slot() { ForkChoiceTest::new() .apply_blocks_without_new_attestations(1) .inspect_queued_attestations(|queue| assert_eq!(queue.len(), 0)) .apply_attestation_to_chain( MutationDelay::NoDelay, |_, _| {}, |result| assert_eq!(result.unwrap(), ()), ) .inspect_queued_attestations(|queue| assert_eq!(queue.len(), 1)) .skip_slot() .inspect_queued_attestations(|queue| assert_eq!(queue.len(), 0)); } /// Tests that the correct target root is used when the attested-to block is in a prior epoch to /// the attestation. #[test] fn valid_attestation_skip_across_epoch() { ForkChoiceTest::new() .apply_blocks(E::slots_per_epoch() as usize - 1) .skip_slots(2) .apply_attestation_to_chain( MutationDelay::NoDelay, |attestation, _chain| { assert_eq!( attestation.data.target.root, attestation.data.beacon_block_root ) }, |result| result.unwrap(), ); } #[test] fn can_read_finalized_block() { ForkChoiceTest::new() .apply_blocks_while(|_, state| state.finalized_checkpoint.epoch == 0) .apply_blocks(1) .check_finalized_block_is_accessible(); }
33.093407
123
0.585223
fb67bf4fd556fdb9f6c57fa1e723cb0cfdd6d20d
78,877
#![allow(non_snake_case, non_upper_case_globals)] #![allow(non_camel_case_types)] //! USB on the go high speed //! //! Used by: stm32f215, stm32f217 use crate::{RORegister, RWRegister}; #[cfg(not(feature = "nosync"))] use core::marker::PhantomData; /// OTG_HS device configuration register pub mod OTG_HS_DCFG { /// Device speed pub mod DSPD { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (2 bits: 0b11 << 0) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Nonzero-length status OUT handshake pub mod NZLSOHSK { /// Offset (2 bits) pub const offset: u32 = 2; /// Mask (1 bit: 1 << 2) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Device address pub mod DAD { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (7 bits: 0x7f << 4) pub const mask: u32 = 0x7f << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Periodic (micro)frame interval pub mod PFIVL { /// Offset (11 bits) pub const offset: u32 = 11; /// Mask (2 bits: 0b11 << 11) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Periodic scheduling interval pub mod PERSCHIVL { /// Offset (24 bits) pub const offset: u32 = 24; /// Mask (2 bits: 0b11 << 24) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device control register pub mod OTG_HS_DCTL { /// Remote wakeup signaling pub mod RWUSIG { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Soft disconnect pub mod SDIS { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Global IN NAK status pub mod GINSTS { /// Offset (2 bits) pub const offset: u32 = 2; /// Mask (1 bit: 1 << 2) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Global OUT NAK status pub mod GONSTS { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Test control pub mod TCTL { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (3 bits: 0b111 << 4) pub const mask: u32 = 0b111 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set global IN NAK pub mod SGINAK { /// Offset (7 bits) pub const offset: u32 = 7; /// Mask (1 bit: 1 << 7) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Clear global IN NAK pub mod CGINAK { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (1 bit: 1 << 8) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set global OUT NAK pub mod SGONAK { /// Offset (9 bits) pub const offset: u32 = 9; /// Mask (1 bit: 1 << 9) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Clear global OUT NAK pub mod CGONAK { /// Offset (10 bits) pub const offset: u32 = 10; /// Mask (1 bit: 1 << 10) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Power-on programming done pub mod POPRGDNE { /// Offset (11 bits) pub const offset: u32 = 11; /// Mask (1 bit: 1 << 11) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device status register pub mod OTG_HS_DSTS { /// Suspend status pub mod SUSPSTS { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Enumerated speed pub mod ENUMSPD { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (2 bits: 0b11 << 1) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Erratic error pub mod EERR { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Frame number of the received SOF pub mod FNSOF { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (14 bits: 0x3fff << 8) pub const mask: u32 = 0x3fff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device IN endpoint common interrupt mask register pub mod OTG_HS_DIEPMSK { /// Transfer completed interrupt mask pub mod XFRCM { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disabled interrupt mask pub mod EPDM { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Timeout condition mask (nonisochronous endpoints) pub mod TOM { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received when TxFIFO empty mask pub mod ITTXFEMSK { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received with EP mismatch mask pub mod INEPNMM { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN endpoint NAK effective mask pub mod INEPNEM { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// FIFO underrun mask pub mod TXFURM { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (1 bit: 1 << 8) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// BNA interrupt mask pub mod BIM { /// Offset (9 bits) pub const offset: u32 = 9; /// Mask (1 bit: 1 << 9) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device OUT endpoint common interrupt mask register pub mod OTG_HS_DOEPMSK { /// Transfer completed interrupt mask pub mod XFRCM { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disabled interrupt mask pub mod EPDM { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// SETUP phase done mask pub mod STUPM { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT token received when endpoint disabled mask pub mod OTEPDM { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Back-to-back SETUP packets received mask pub mod B2BSTUP { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT packet error mask pub mod OPEM { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (1 bit: 1 << 8) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// BNA interrupt mask pub mod BOIM { /// Offset (9 bits) pub const offset: u32 = 9; /// Mask (1 bit: 1 << 9) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device all endpoints interrupt register pub mod OTG_HS_DAINT { /// IN endpoint interrupt bits pub mod IEPINT { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (16 bits: 0xffff << 0) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT endpoint interrupt bits pub mod OEPINT { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (16 bits: 0xffff << 16) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS all endpoints interrupt mask register pub mod OTG_HS_DAINTMSK { /// IN EP interrupt mask bits pub mod IEPM { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (16 bits: 0xffff << 0) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT EP interrupt mask bits pub mod OEPM { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (16 bits: 0xffff << 16) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device VBUS discharge time register pub mod OTG_HS_DVBUSDIS { /// Device VBUS discharge time pub mod VBUSDT { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (16 bits: 0xffff << 0) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device VBUS pulsing time register pub mod OTG_HS_DVBUSPULSE { /// Device VBUS pulsing time pub mod DVBUSP { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (12 bits: 0xfff << 0) pub const mask: u32 = 0xfff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS Device threshold control register pub mod OTG_HS_DTHRCTL { /// Nonisochronous IN endpoints threshold enable pub mod NONISOTHREN { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// ISO IN endpoint threshold enable pub mod ISOTHREN { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Transmit threshold length pub mod TXTHRLEN { /// Offset (2 bits) pub const offset: u32 = 2; /// Mask (9 bits: 0x1ff << 2) pub const mask: u32 = 0x1ff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Receive threshold enable pub mod RXTHREN { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (1 bit: 1 << 16) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Receive threshold length pub mod RXTHRLEN { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (9 bits: 0x1ff << 17) pub const mask: u32 = 0x1ff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Arbiter parking enable pub mod ARPEN { /// Offset (27 bits) pub const offset: u32 = 27; /// Mask (1 bit: 1 << 27) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device IN endpoint FIFO empty interrupt mask register pub mod OTG_HS_DIEPEMPMSK { /// IN EP Tx FIFO empty interrupt mask bits pub mod INEPTXFEM { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (16 bits: 0xffff << 0) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device each endpoint interrupt register pub mod OTG_HS_DEACHINT { /// IN endpoint 1interrupt bit pub mod IEP1INT { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT endpoint 1 interrupt bit pub mod OEP1INT { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (1 bit: 1 << 17) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device each endpoint interrupt register mask pub mod OTG_HS_DEACHINTMSK { /// IN Endpoint 1 interrupt mask bit pub mod IEP1INTM { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT Endpoint 1 interrupt mask bit pub mod OEP1INTM { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (1 bit: 1 << 17) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device each in endpoint-1 interrupt register pub mod OTG_HS_DIEPEACHMSK1 { /// Transfer completed interrupt mask pub mod XFRCM { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disabled interrupt mask pub mod EPDM { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Timeout condition mask (nonisochronous endpoints) pub mod TOM { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received when TxFIFO empty mask pub mod ITTXFEMSK { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received with EP mismatch mask pub mod INEPNMM { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN endpoint NAK effective mask pub mod INEPNEM { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// FIFO underrun mask pub mod TXFURM { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (1 bit: 1 << 8) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// BNA interrupt mask pub mod BIM { /// Offset (9 bits) pub const offset: u32 = 9; /// Mask (1 bit: 1 << 9) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NAK interrupt mask pub mod NAKM { /// Offset (13 bits) pub const offset: u32 = 13; /// Mask (1 bit: 1 << 13) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device each OUT endpoint-1 interrupt register pub mod OTG_HS_DOEPEACHMSK1 { /// Transfer completed interrupt mask pub mod XFRCM { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disabled interrupt mask pub mod EPDM { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Timeout condition mask pub mod TOM { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received when TxFIFO empty mask pub mod ITTXFEMSK { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received with EP mismatch mask pub mod INEPNMM { /// Offset (5 bits) pub const offset: u32 = 5; /// Mask (1 bit: 1 << 5) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN endpoint NAK effective mask pub mod INEPNEM { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT packet error mask pub mod TXFURM { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (1 bit: 1 << 8) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// BNA interrupt mask pub mod BIM { /// Offset (9 bits) pub const offset: u32 = 9; /// Mask (1 bit: 1 << 9) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Bubble error interrupt mask pub mod BERRM { /// Offset (12 bits) pub const offset: u32 = 12; /// Mask (1 bit: 1 << 12) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NAK interrupt mask pub mod NAKM { /// Offset (13 bits) pub const offset: u32 = 13; /// Mask (1 bit: 1 << 13) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NYET interrupt mask pub mod NYETM { /// Offset (14 bits) pub const offset: u32 = 14; /// Mask (1 bit: 1 << 14) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG device endpoint-0 control register pub mod OTG_HS_DIEPCTL0 { /// Maximum packet size pub mod MPSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (11 bits: 0x7ff << 0) pub const mask: u32 = 0x7ff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// USB active endpoint pub mod USBAEP { /// Offset (15 bits) pub const offset: u32 = 15; /// Mask (1 bit: 1 << 15) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Even/odd frame pub mod EONUM_DPID { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (1 bit: 1 << 16) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NAK status pub mod NAKSTS { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (1 bit: 1 << 17) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint type pub mod EPTYP { /// Offset (18 bits) pub const offset: u32 = 18; /// Mask (2 bits: 0b11 << 18) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// STALL handshake pub mod Stall { /// Offset (21 bits) pub const offset: u32 = 21; /// Mask (1 bit: 1 << 21) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// TxFIFO number pub mod TXFNUM { /// Offset (22 bits) pub const offset: u32 = 22; /// Mask (4 bits: 0b1111 << 22) pub const mask: u32 = 0b1111 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Clear NAK pub mod CNAK { /// Offset (26 bits) pub const offset: u32 = 26; /// Mask (1 bit: 1 << 26) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set NAK pub mod SNAK { /// Offset (27 bits) pub const offset: u32 = 27; /// Mask (1 bit: 1 << 27) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set DATA0 PID pub mod SD0PID_SEVNFRM { /// Offset (28 bits) pub const offset: u32 = 28; /// Mask (1 bit: 1 << 28) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set odd frame pub mod SODDFRM { /// Offset (29 bits) pub const offset: u32 = 29; /// Mask (1 bit: 1 << 29) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disable pub mod EPDIS { /// Offset (30 bits) pub const offset: u32 = 30; /// Mask (1 bit: 1 << 30) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint enable pub mod EPENA { /// Offset (31 bits) pub const offset: u32 = 31; /// Mask (1 bit: 1 << 31) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG device endpoint-1 control register pub mod OTG_HS_DIEPCTL1 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-2 control register pub mod OTG_HS_DIEPCTL2 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-3 control register pub mod OTG_HS_DIEPCTL3 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-4 control register pub mod OTG_HS_DIEPCTL4 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-5 control register pub mod OTG_HS_DIEPCTL5 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-6 control register pub mod OTG_HS_DIEPCTL6 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-7 control register pub mod OTG_HS_DIEPCTL7 { pub use super::OTG_HS_DIEPCTL0::Stall; pub use super::OTG_HS_DIEPCTL0::CNAK; pub use super::OTG_HS_DIEPCTL0::EONUM_DPID; pub use super::OTG_HS_DIEPCTL0::EPDIS; pub use super::OTG_HS_DIEPCTL0::EPENA; pub use super::OTG_HS_DIEPCTL0::EPTYP; pub use super::OTG_HS_DIEPCTL0::MPSIZ; pub use super::OTG_HS_DIEPCTL0::NAKSTS; pub use super::OTG_HS_DIEPCTL0::SD0PID_SEVNFRM; pub use super::OTG_HS_DIEPCTL0::SNAK; pub use super::OTG_HS_DIEPCTL0::SODDFRM; pub use super::OTG_HS_DIEPCTL0::TXFNUM; pub use super::OTG_HS_DIEPCTL0::USBAEP; } /// OTG device endpoint-0 interrupt register pub mod OTG_HS_DIEPINT0 { /// Transfer completed interrupt pub mod XFRC { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disabled interrupt pub mod EPDISD { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Timeout condition pub mod TOC { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN token received when TxFIFO is empty pub mod ITTXFE { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// IN endpoint NAK effective pub mod INEPNE { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Transmit FIFO empty pub mod TXFE { /// Offset (7 bits) pub const offset: u32 = 7; /// Mask (1 bit: 1 << 7) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Transmit Fifo Underrun pub mod TXFIFOUDRN { /// Offset (8 bits) pub const offset: u32 = 8; /// Mask (1 bit: 1 << 8) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Buffer not available interrupt pub mod BNA { /// Offset (9 bits) pub const offset: u32 = 9; /// Mask (1 bit: 1 << 9) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Packet dropped status pub mod PKTDRPSTS { /// Offset (11 bits) pub const offset: u32 = 11; /// Mask (1 bit: 1 << 11) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Babble error interrupt pub mod BERR { /// Offset (12 bits) pub const offset: u32 = 12; /// Mask (1 bit: 1 << 12) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NAK interrupt pub mod NAK { /// Offset (13 bits) pub const offset: u32 = 13; /// Mask (1 bit: 1 << 13) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG device endpoint-1 interrupt register pub mod OTG_HS_DIEPINT1 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG device endpoint-2 interrupt register pub mod OTG_HS_DIEPINT2 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG device endpoint-3 interrupt register pub mod OTG_HS_DIEPINT3 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG device endpoint-4 interrupt register pub mod OTG_HS_DIEPINT4 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG device endpoint-5 interrupt register pub mod OTG_HS_DIEPINT5 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG device endpoint-6 interrupt register pub mod OTG_HS_DIEPINT6 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG device endpoint-7 interrupt register pub mod OTG_HS_DIEPINT7 { pub use super::OTG_HS_DIEPINT0::BERR; pub use super::OTG_HS_DIEPINT0::BNA; pub use super::OTG_HS_DIEPINT0::EPDISD; pub use super::OTG_HS_DIEPINT0::INEPNE; pub use super::OTG_HS_DIEPINT0::ITTXFE; pub use super::OTG_HS_DIEPINT0::NAK; pub use super::OTG_HS_DIEPINT0::PKTDRPSTS; pub use super::OTG_HS_DIEPINT0::TOC; pub use super::OTG_HS_DIEPINT0::TXFE; pub use super::OTG_HS_DIEPINT0::TXFIFOUDRN; pub use super::OTG_HS_DIEPINT0::XFRC; } /// OTG_HS device IN endpoint 0 transfer size register pub mod OTG_HS_DIEPTSIZ0 { /// Transfer size pub mod XFRSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (7 bits: 0x7f << 0) pub const mask: u32 = 0x7f << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Packet count pub mod PKTCNT { /// Offset (19 bits) pub const offset: u32 = 19; /// Mask (2 bits: 0b11 << 19) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device endpoint-1 DMA address register pub mod OTG_HS_DIEPDMA1 { /// DMA address pub mod DMAADDR { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (32 bits: 0xffffffff << 0) pub const mask: u32 = 0xffffffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device endpoint-2 DMA address register pub mod OTG_HS_DIEPDMA2 { pub use super::OTG_HS_DIEPDMA1::DMAADDR; } /// OTG_HS device endpoint-3 DMA address register pub mod OTG_HS_DIEPDMA3 { pub use super::OTG_HS_DIEPDMA1::DMAADDR; } /// OTG_HS device endpoint-4 DMA address register pub mod OTG_HS_DIEPDMA4 { pub use super::OTG_HS_DIEPDMA1::DMAADDR; } /// OTG_HS device endpoint-5 DMA address register pub mod OTG_HS_DIEPDMA5 { pub use super::OTG_HS_DIEPDMA1::DMAADDR; } /// OTG_HS device IN endpoint transmit FIFO status register pub mod OTG_HS_DTXFSTS0 { /// IN endpoint TxFIFO space avail pub mod INEPTFSAV { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (16 bits: 0xffff << 0) pub const mask: u32 = 0xffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device IN endpoint transmit FIFO status register pub mod OTG_HS_DTXFSTS1 { pub use super::OTG_HS_DTXFSTS0::INEPTFSAV; } /// OTG_HS device IN endpoint transmit FIFO status register pub mod OTG_HS_DTXFSTS2 { pub use super::OTG_HS_DTXFSTS0::INEPTFSAV; } /// OTG_HS device IN endpoint transmit FIFO status register pub mod OTG_HS_DTXFSTS3 { pub use super::OTG_HS_DTXFSTS0::INEPTFSAV; } /// OTG_HS device IN endpoint transmit FIFO status register pub mod OTG_HS_DTXFSTS4 { pub use super::OTG_HS_DTXFSTS0::INEPTFSAV; } /// OTG_HS device IN endpoint transmit FIFO status register pub mod OTG_HS_DTXFSTS5 { pub use super::OTG_HS_DTXFSTS0::INEPTFSAV; } /// OTG_HS device endpoint transfer size register pub mod OTG_HS_DIEPTSIZ1 { /// Transfer size pub mod XFRSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (19 bits: 0x7ffff << 0) pub const mask: u32 = 0x7ffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Packet count pub mod PKTCNT { /// Offset (19 bits) pub const offset: u32 = 19; /// Mask (10 bits: 0x3ff << 19) pub const mask: u32 = 0x3ff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Multi count pub mod MCNT { /// Offset (29 bits) pub const offset: u32 = 29; /// Mask (2 bits: 0b11 << 29) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device endpoint transfer size register pub mod OTG_HS_DIEPTSIZ2 { pub use super::OTG_HS_DIEPTSIZ1::MCNT; pub use super::OTG_HS_DIEPTSIZ1::PKTCNT; pub use super::OTG_HS_DIEPTSIZ1::XFRSIZ; } /// OTG_HS device endpoint transfer size register pub mod OTG_HS_DIEPTSIZ3 { pub use super::OTG_HS_DIEPTSIZ1::MCNT; pub use super::OTG_HS_DIEPTSIZ1::PKTCNT; pub use super::OTG_HS_DIEPTSIZ1::XFRSIZ; } /// OTG_HS device endpoint transfer size register pub mod OTG_HS_DIEPTSIZ4 { pub use super::OTG_HS_DIEPTSIZ1::MCNT; pub use super::OTG_HS_DIEPTSIZ1::PKTCNT; pub use super::OTG_HS_DIEPTSIZ1::XFRSIZ; } /// OTG_HS device endpoint transfer size register pub mod OTG_HS_DIEPTSIZ5 { pub use super::OTG_HS_DIEPTSIZ1::MCNT; pub use super::OTG_HS_DIEPTSIZ1::PKTCNT; pub use super::OTG_HS_DIEPTSIZ1::XFRSIZ; } /// OTG_HS device control OUT endpoint 0 control register pub mod OTG_HS_DOEPCTL0 { /// Maximum packet size pub mod MPSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (2 bits: 0b11 << 0) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// USB active endpoint pub mod USBAEP { /// Offset (15 bits) pub const offset: u32 = 15; /// Mask (1 bit: 1 << 15) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NAK status pub mod NAKSTS { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (1 bit: 1 << 17) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint type pub mod EPTYP { /// Offset (18 bits) pub const offset: u32 = 18; /// Mask (2 bits: 0b11 << 18) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Snoop mode pub mod SNPM { /// Offset (20 bits) pub const offset: u32 = 20; /// Mask (1 bit: 1 << 20) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// STALL handshake pub mod Stall { /// Offset (21 bits) pub const offset: u32 = 21; /// Mask (1 bit: 1 << 21) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Clear NAK pub mod CNAK { /// Offset (26 bits) pub const offset: u32 = 26; /// Mask (1 bit: 1 << 26) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set NAK pub mod SNAK { /// Offset (27 bits) pub const offset: u32 = 27; /// Mask (1 bit: 1 << 27) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disable pub mod EPDIS { /// Offset (30 bits) pub const offset: u32 = 30; /// Mask (1 bit: 1 << 30) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint enable pub mod EPENA { /// Offset (31 bits) pub const offset: u32 = 31; /// Mask (1 bit: 1 << 31) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG device endpoint-1 control register pub mod OTG_HS_DOEPCTL1 { /// Maximum packet size pub mod MPSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (11 bits: 0x7ff << 0) pub const mask: u32 = 0x7ff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// USB active endpoint pub mod USBAEP { /// Offset (15 bits) pub const offset: u32 = 15; /// Mask (1 bit: 1 << 15) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Even odd frame/Endpoint data PID pub mod EONUM_DPID { /// Offset (16 bits) pub const offset: u32 = 16; /// Mask (1 bit: 1 << 16) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NAK status pub mod NAKSTS { /// Offset (17 bits) pub const offset: u32 = 17; /// Mask (1 bit: 1 << 17) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint type pub mod EPTYP { /// Offset (18 bits) pub const offset: u32 = 18; /// Mask (2 bits: 0b11 << 18) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Snoop mode pub mod SNPM { /// Offset (20 bits) pub const offset: u32 = 20; /// Mask (1 bit: 1 << 20) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// STALL handshake pub mod Stall { /// Offset (21 bits) pub const offset: u32 = 21; /// Mask (1 bit: 1 << 21) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Clear NAK pub mod CNAK { /// Offset (26 bits) pub const offset: u32 = 26; /// Mask (1 bit: 1 << 26) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set NAK pub mod SNAK { /// Offset (27 bits) pub const offset: u32 = 27; /// Mask (1 bit: 1 << 27) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set DATA0 PID/Set even frame pub mod SD0PID_SEVNFRM { /// Offset (28 bits) pub const offset: u32 = 28; /// Mask (1 bit: 1 << 28) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Set odd frame pub mod SODDFRM { /// Offset (29 bits) pub const offset: u32 = 29; /// Mask (1 bit: 1 << 29) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disable pub mod EPDIS { /// Offset (30 bits) pub const offset: u32 = 30; /// Mask (1 bit: 1 << 30) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint enable pub mod EPENA { /// Offset (31 bits) pub const offset: u32 = 31; /// Mask (1 bit: 1 << 31) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG device endpoint-2 control register pub mod OTG_HS_DOEPCTL2 { pub use super::OTG_HS_DOEPCTL1::Stall; pub use super::OTG_HS_DOEPCTL1::CNAK; pub use super::OTG_HS_DOEPCTL1::EONUM_DPID; pub use super::OTG_HS_DOEPCTL1::EPDIS; pub use super::OTG_HS_DOEPCTL1::EPENA; pub use super::OTG_HS_DOEPCTL1::EPTYP; pub use super::OTG_HS_DOEPCTL1::MPSIZ; pub use super::OTG_HS_DOEPCTL1::NAKSTS; pub use super::OTG_HS_DOEPCTL1::SD0PID_SEVNFRM; pub use super::OTG_HS_DOEPCTL1::SNAK; pub use super::OTG_HS_DOEPCTL1::SNPM; pub use super::OTG_HS_DOEPCTL1::SODDFRM; pub use super::OTG_HS_DOEPCTL1::USBAEP; } /// OTG device endpoint-3 control register pub mod OTG_HS_DOEPCTL3 { pub use super::OTG_HS_DOEPCTL1::Stall; pub use super::OTG_HS_DOEPCTL1::CNAK; pub use super::OTG_HS_DOEPCTL1::EONUM_DPID; pub use super::OTG_HS_DOEPCTL1::EPDIS; pub use super::OTG_HS_DOEPCTL1::EPENA; pub use super::OTG_HS_DOEPCTL1::EPTYP; pub use super::OTG_HS_DOEPCTL1::MPSIZ; pub use super::OTG_HS_DOEPCTL1::NAKSTS; pub use super::OTG_HS_DOEPCTL1::SD0PID_SEVNFRM; pub use super::OTG_HS_DOEPCTL1::SNAK; pub use super::OTG_HS_DOEPCTL1::SNPM; pub use super::OTG_HS_DOEPCTL1::SODDFRM; pub use super::OTG_HS_DOEPCTL1::USBAEP; } /// OTG_HS device endpoint-0 interrupt register pub mod OTG_HS_DOEPINT0 { /// Transfer completed interrupt pub mod XFRC { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (1 bit: 1 << 0) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Endpoint disabled interrupt pub mod EPDISD { /// Offset (1 bits) pub const offset: u32 = 1; /// Mask (1 bit: 1 << 1) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// SETUP phase done pub mod STUP { /// Offset (3 bits) pub const offset: u32 = 3; /// Mask (1 bit: 1 << 3) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// OUT token received when endpoint disabled pub mod OTEPDIS { /// Offset (4 bits) pub const offset: u32 = 4; /// Mask (1 bit: 1 << 4) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Back-to-back SETUP packets received pub mod B2BSTUP { /// Offset (6 bits) pub const offset: u32 = 6; /// Mask (1 bit: 1 << 6) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// NYET interrupt pub mod NYET { /// Offset (14 bits) pub const offset: u32 = 14; /// Mask (1 bit: 1 << 14) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device endpoint-1 interrupt register pub mod OTG_HS_DOEPINT1 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-2 interrupt register pub mod OTG_HS_DOEPINT2 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-3 interrupt register pub mod OTG_HS_DOEPINT3 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-4 interrupt register pub mod OTG_HS_DOEPINT4 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-5 interrupt register pub mod OTG_HS_DOEPINT5 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-6 interrupt register pub mod OTG_HS_DOEPINT6 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-7 interrupt register pub mod OTG_HS_DOEPINT7 { pub use super::OTG_HS_DOEPINT0::B2BSTUP; pub use super::OTG_HS_DOEPINT0::EPDISD; pub use super::OTG_HS_DOEPINT0::NYET; pub use super::OTG_HS_DOEPINT0::OTEPDIS; pub use super::OTG_HS_DOEPINT0::STUP; pub use super::OTG_HS_DOEPINT0::XFRC; } /// OTG_HS device endpoint-1 transfer size register pub mod OTG_HS_DOEPTSIZ0 { /// Transfer size pub mod XFRSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (7 bits: 0x7f << 0) pub const mask: u32 = 0x7f << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Packet count pub mod PKTCNT { /// Offset (19 bits) pub const offset: u32 = 19; /// Mask (1 bit: 1 << 19) pub const mask: u32 = 1 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// SETUP packet count pub mod STUPCNT { /// Offset (29 bits) pub const offset: u32 = 29; /// Mask (2 bits: 0b11 << 29) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device endpoint-2 transfer size register pub mod OTG_HS_DOEPTSIZ1 { /// Transfer size pub mod XFRSIZ { /// Offset (0 bits) pub const offset: u32 = 0; /// Mask (19 bits: 0x7ffff << 0) pub const mask: u32 = 0x7ffff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Packet count pub mod PKTCNT { /// Offset (19 bits) pub const offset: u32 = 19; /// Mask (10 bits: 0x3ff << 19) pub const mask: u32 = 0x3ff << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } /// Received data PID/SETUP packet count pub mod RXDPID_STUPCNT { /// Offset (29 bits) pub const offset: u32 = 29; /// Mask (2 bits: 0b11 << 29) pub const mask: u32 = 0b11 << offset; /// Read-only values (empty) pub mod R {} /// Write-only values (empty) pub mod W {} /// Read-write values (empty) pub mod RW {} } } /// OTG_HS device endpoint-3 transfer size register pub mod OTG_HS_DOEPTSIZ2 { pub use super::OTG_HS_DOEPTSIZ1::PKTCNT; pub use super::OTG_HS_DOEPTSIZ1::RXDPID_STUPCNT; pub use super::OTG_HS_DOEPTSIZ1::XFRSIZ; } /// OTG_HS device endpoint-4 transfer size register pub mod OTG_HS_DOEPTSIZ3 { pub use super::OTG_HS_DOEPTSIZ1::PKTCNT; pub use super::OTG_HS_DOEPTSIZ1::RXDPID_STUPCNT; pub use super::OTG_HS_DOEPTSIZ1::XFRSIZ; } /// OTG_HS device endpoint-5 transfer size register pub mod OTG_HS_DOEPTSIZ4 { pub use super::OTG_HS_DOEPTSIZ1::PKTCNT; pub use super::OTG_HS_DOEPTSIZ1::RXDPID_STUPCNT; pub use super::OTG_HS_DOEPTSIZ1::XFRSIZ; } #[repr(C)] pub struct RegisterBlock { /// OTG_HS device configuration register pub OTG_HS_DCFG: RWRegister<u32>, /// OTG_HS device control register pub OTG_HS_DCTL: RWRegister<u32>, /// OTG_HS device status register pub OTG_HS_DSTS: RORegister<u32>, _reserved1: [u32; 1], /// OTG_HS device IN endpoint common interrupt mask register pub OTG_HS_DIEPMSK: RWRegister<u32>, /// OTG_HS device OUT endpoint common interrupt mask register pub OTG_HS_DOEPMSK: RWRegister<u32>, /// OTG_HS device all endpoints interrupt register pub OTG_HS_DAINT: RORegister<u32>, /// OTG_HS all endpoints interrupt mask register pub OTG_HS_DAINTMSK: RWRegister<u32>, _reserved2: [u32; 2], /// OTG_HS device VBUS discharge time register pub OTG_HS_DVBUSDIS: RWRegister<u32>, /// OTG_HS device VBUS pulsing time register pub OTG_HS_DVBUSPULSE: RWRegister<u32>, /// OTG_HS Device threshold control register pub OTG_HS_DTHRCTL: RWRegister<u32>, /// OTG_HS device IN endpoint FIFO empty interrupt mask register pub OTG_HS_DIEPEMPMSK: RWRegister<u32>, /// OTG_HS device each endpoint interrupt register pub OTG_HS_DEACHINT: RWRegister<u32>, /// OTG_HS device each endpoint interrupt register mask pub OTG_HS_DEACHINTMSK: RWRegister<u32>, /// OTG_HS device each in endpoint-1 interrupt register pub OTG_HS_DIEPEACHMSK1: RWRegister<u32>, _reserved3: [u32; 15], /// OTG_HS device each OUT endpoint-1 interrupt register pub OTG_HS_DOEPEACHMSK1: RWRegister<u32>, _reserved4: [u32; 31], /// OTG device endpoint-0 control register pub OTG_HS_DIEPCTL0: RWRegister<u32>, _reserved5: [u32; 1], /// OTG device endpoint-0 interrupt register pub OTG_HS_DIEPINT0: RWRegister<u32>, _reserved6: [u32; 1], /// OTG_HS device IN endpoint 0 transfer size register pub OTG_HS_DIEPTSIZ0: RWRegister<u32>, /// OTG_HS device endpoint-1 DMA address register pub OTG_HS_DIEPDMA1: RWRegister<u32>, /// OTG_HS device IN endpoint transmit FIFO status register pub OTG_HS_DTXFSTS0: RORegister<u32>, _reserved7: [u32; 1], /// OTG device endpoint-1 control register pub OTG_HS_DIEPCTL1: RWRegister<u32>, _reserved8: [u32; 1], /// OTG device endpoint-1 interrupt register pub OTG_HS_DIEPINT1: RWRegister<u32>, _reserved9: [u32; 1], /// OTG_HS device endpoint transfer size register pub OTG_HS_DIEPTSIZ1: RWRegister<u32>, /// OTG_HS device endpoint-2 DMA address register pub OTG_HS_DIEPDMA2: RWRegister<u32>, /// OTG_HS device IN endpoint transmit FIFO status register pub OTG_HS_DTXFSTS1: RORegister<u32>, _reserved10: [u32; 1], /// OTG device endpoint-2 control register pub OTG_HS_DIEPCTL2: RWRegister<u32>, _reserved11: [u32; 1], /// OTG device endpoint-2 interrupt register pub OTG_HS_DIEPINT2: RWRegister<u32>, _reserved12: [u32; 1], /// OTG_HS device endpoint transfer size register pub OTG_HS_DIEPTSIZ2: RWRegister<u32>, /// OTG_HS device endpoint-3 DMA address register pub OTG_HS_DIEPDMA3: RWRegister<u32>, /// OTG_HS device IN endpoint transmit FIFO status register pub OTG_HS_DTXFSTS2: RORegister<u32>, _reserved13: [u32; 1], /// OTG device endpoint-3 control register pub OTG_HS_DIEPCTL3: RWRegister<u32>, _reserved14: [u32; 1], /// OTG device endpoint-3 interrupt register pub OTG_HS_DIEPINT3: RWRegister<u32>, _reserved15: [u32; 1], /// OTG_HS device endpoint transfer size register pub OTG_HS_DIEPTSIZ3: RWRegister<u32>, /// OTG_HS device endpoint-4 DMA address register pub OTG_HS_DIEPDMA4: RWRegister<u32>, /// OTG_HS device IN endpoint transmit FIFO status register pub OTG_HS_DTXFSTS3: RORegister<u32>, _reserved16: [u32; 1], /// OTG device endpoint-4 control register pub OTG_HS_DIEPCTL4: RWRegister<u32>, _reserved17: [u32; 1], /// OTG device endpoint-4 interrupt register pub OTG_HS_DIEPINT4: RWRegister<u32>, _reserved18: [u32; 1], /// OTG_HS device endpoint transfer size register pub OTG_HS_DIEPTSIZ4: RWRegister<u32>, /// OTG_HS device endpoint-5 DMA address register pub OTG_HS_DIEPDMA5: RWRegister<u32>, /// OTG_HS device IN endpoint transmit FIFO status register pub OTG_HS_DTXFSTS4: RORegister<u32>, _reserved19: [u32; 1], /// OTG device endpoint-5 control register pub OTG_HS_DIEPCTL5: RWRegister<u32>, _reserved20: [u32; 1], /// OTG device endpoint-5 interrupt register pub OTG_HS_DIEPINT5: RWRegister<u32>, _reserved21: [u32; 1], /// OTG_HS device endpoint transfer size register pub OTG_HS_DIEPTSIZ5: RWRegister<u32>, _reserved22: [u32; 1], /// OTG_HS device IN endpoint transmit FIFO status register pub OTG_HS_DTXFSTS5: RORegister<u32>, _reserved23: [u32; 1], /// OTG device endpoint-6 control register pub OTG_HS_DIEPCTL6: RWRegister<u32>, _reserved24: [u32; 1], /// OTG device endpoint-6 interrupt register pub OTG_HS_DIEPINT6: RWRegister<u32>, _reserved25: [u32; 5], /// OTG device endpoint-7 control register pub OTG_HS_DIEPCTL7: RWRegister<u32>, _reserved26: [u32; 1], /// OTG device endpoint-7 interrupt register pub OTG_HS_DIEPINT7: RWRegister<u32>, _reserved27: [u32; 69], /// OTG_HS device control OUT endpoint 0 control register pub OTG_HS_DOEPCTL0: RWRegister<u32>, _reserved28: [u32; 1], /// OTG_HS device endpoint-0 interrupt register pub OTG_HS_DOEPINT0: RWRegister<u32>, _reserved29: [u32; 1], /// OTG_HS device endpoint-1 transfer size register pub OTG_HS_DOEPTSIZ0: RWRegister<u32>, _reserved30: [u32; 3], /// OTG device endpoint-1 control register pub OTG_HS_DOEPCTL1: RWRegister<u32>, _reserved31: [u32; 1], /// OTG_HS device endpoint-1 interrupt register pub OTG_HS_DOEPINT1: RWRegister<u32>, _reserved32: [u32; 1], /// OTG_HS device endpoint-2 transfer size register pub OTG_HS_DOEPTSIZ1: RWRegister<u32>, _reserved33: [u32; 3], /// OTG device endpoint-2 control register pub OTG_HS_DOEPCTL2: RWRegister<u32>, _reserved34: [u32; 1], /// OTG_HS device endpoint-2 interrupt register pub OTG_HS_DOEPINT2: RWRegister<u32>, _reserved35: [u32; 1], /// OTG_HS device endpoint-3 transfer size register pub OTG_HS_DOEPTSIZ2: RWRegister<u32>, _reserved36: [u32; 3], /// OTG device endpoint-3 control register pub OTG_HS_DOEPCTL3: RWRegister<u32>, _reserved37: [u32; 1], /// OTG_HS device endpoint-3 interrupt register pub OTG_HS_DOEPINT3: RWRegister<u32>, _reserved38: [u32; 1], /// OTG_HS device endpoint-4 transfer size register pub OTG_HS_DOEPTSIZ3: RWRegister<u32>, _reserved39: [u32; 5], /// OTG_HS device endpoint-4 interrupt register pub OTG_HS_DOEPINT4: RWRegister<u32>, _reserved40: [u32; 1], /// OTG_HS device endpoint-5 transfer size register pub OTG_HS_DOEPTSIZ4: RWRegister<u32>, _reserved41: [u32; 5], /// OTG_HS device endpoint-5 interrupt register pub OTG_HS_DOEPINT5: RWRegister<u32>, _reserved42: [u32; 7], /// OTG_HS device endpoint-6 interrupt register pub OTG_HS_DOEPINT6: RWRegister<u32>, _reserved43: [u32; 7], /// OTG_HS device endpoint-7 interrupt register pub OTG_HS_DOEPINT7: RWRegister<u32>, } pub struct ResetValues { pub OTG_HS_DCFG: u32, pub OTG_HS_DCTL: u32, pub OTG_HS_DSTS: u32, pub OTG_HS_DIEPMSK: u32, pub OTG_HS_DOEPMSK: u32, pub OTG_HS_DAINT: u32, pub OTG_HS_DAINTMSK: u32, pub OTG_HS_DVBUSDIS: u32, pub OTG_HS_DVBUSPULSE: u32, pub OTG_HS_DTHRCTL: u32, pub OTG_HS_DIEPEMPMSK: u32, pub OTG_HS_DEACHINT: u32, pub OTG_HS_DEACHINTMSK: u32, pub OTG_HS_DIEPEACHMSK1: u32, pub OTG_HS_DOEPEACHMSK1: u32, pub OTG_HS_DIEPCTL0: u32, pub OTG_HS_DIEPINT0: u32, pub OTG_HS_DIEPTSIZ0: u32, pub OTG_HS_DIEPDMA1: u32, pub OTG_HS_DTXFSTS0: u32, pub OTG_HS_DIEPCTL1: u32, pub OTG_HS_DIEPINT1: u32, pub OTG_HS_DIEPTSIZ1: u32, pub OTG_HS_DIEPDMA2: u32, pub OTG_HS_DTXFSTS1: u32, pub OTG_HS_DIEPCTL2: u32, pub OTG_HS_DIEPINT2: u32, pub OTG_HS_DIEPTSIZ2: u32, pub OTG_HS_DIEPDMA3: u32, pub OTG_HS_DTXFSTS2: u32, pub OTG_HS_DIEPCTL3: u32, pub OTG_HS_DIEPINT3: u32, pub OTG_HS_DIEPTSIZ3: u32, pub OTG_HS_DIEPDMA4: u32, pub OTG_HS_DTXFSTS3: u32, pub OTG_HS_DIEPCTL4: u32, pub OTG_HS_DIEPINT4: u32, pub OTG_HS_DIEPTSIZ4: u32, pub OTG_HS_DIEPDMA5: u32, pub OTG_HS_DTXFSTS4: u32, pub OTG_HS_DIEPCTL5: u32, pub OTG_HS_DIEPINT5: u32, pub OTG_HS_DIEPTSIZ5: u32, pub OTG_HS_DTXFSTS5: u32, pub OTG_HS_DIEPCTL6: u32, pub OTG_HS_DIEPINT6: u32, pub OTG_HS_DIEPCTL7: u32, pub OTG_HS_DIEPINT7: u32, pub OTG_HS_DOEPCTL0: u32, pub OTG_HS_DOEPINT0: u32, pub OTG_HS_DOEPTSIZ0: u32, pub OTG_HS_DOEPCTL1: u32, pub OTG_HS_DOEPINT1: u32, pub OTG_HS_DOEPTSIZ1: u32, pub OTG_HS_DOEPCTL2: u32, pub OTG_HS_DOEPINT2: u32, pub OTG_HS_DOEPTSIZ2: u32, pub OTG_HS_DOEPCTL3: u32, pub OTG_HS_DOEPINT3: u32, pub OTG_HS_DOEPTSIZ3: u32, pub OTG_HS_DOEPINT4: u32, pub OTG_HS_DOEPTSIZ4: u32, pub OTG_HS_DOEPINT5: u32, pub OTG_HS_DOEPINT6: u32, pub OTG_HS_DOEPINT7: u32, } #[cfg(not(feature = "nosync"))] pub struct Instance { pub(crate) addr: u32, pub(crate) _marker: PhantomData<*const RegisterBlock>, } #[cfg(not(feature = "nosync"))] impl ::core::ops::Deref for Instance { type Target = RegisterBlock; #[inline(always)] fn deref(&self) -> &RegisterBlock { unsafe { &*(self.addr as *const _) } } } #[cfg(feature = "rtic")] unsafe impl Send for Instance {}
27.98049
68
0.554775
3a9242f1e7e2558aae775b71b49346a2bdbc63ea
2,949
use crate::prelude::{RaknetPacket, RaknetPacketData, RaknetUInt24Le}; #[derive(Debug, PartialEq, Clone)] pub struct Ack { pub record: Vec<Record>, } impl RaknetPacket for Ack { const RANGE: std::ops::Range<u8> = 0xc0..0xc1; fn id(&self) -> u8 { 0xc0 } } impl RaknetPacketData for Ack { fn decode(reader: &mut impl crate::prelude::Reader) -> Option<Self> { let count = i16::decode(reader)?; Some(Self { record: (0..count) .map(|_| Record::decode(reader)) .collect::<Option<_>>()?, }) } fn encode(&self, writer: &mut impl crate::prelude::Writer) -> Option<()> { (self.record.len() as u16).encode(writer); for i in &self.record { i.encode(writer)?; } Some(()) } } // impl PacketDecoder for Ack { // fn read(iter: &mut crate::packets::traits::U8Iter) -> Option<Self> { // let count: i16 = iter.read()?; // Some(Self { // record: (0..count).map(|_| iter.read()).collect::<Option<_>>()?, // }) // } // fn write(self, vec: &mut Vec<u8>) -> Option<()> { // (self.record.len() as i16).write(vec); // for i in self.record.into_iter() { // i.write(vec)?; // } // Some(()) // } // } #[derive(Debug, PartialEq, Clone)] pub struct Record(pub u32, pub u32); // START - END impl RaknetPacketData for Record { fn decode(reader: &mut impl crate::prelude::Reader) -> Option<Self> { let (start, end) = if bool::decode(reader)? { let o = RaknetUInt24Le::decode(reader)?.0; (o, o) } else { ( RaknetUInt24Le::decode(reader)?.0, RaknetUInt24Le::decode(reader)?.0, ) }; Some(Record(start, end)) } fn encode(&self, writer: &mut impl crate::prelude::Writer) -> Option<()> { if self.0 == self.1 { writer.write(1)?; RaknetUInt24Le(self.0).encode(writer) } else { writer.write(0)?; RaknetUInt24Le(self.0.min(self.1)).encode(writer)?; RaknetUInt24Le(self.1.max(self.0)).encode(writer) } } } // impl PacketDecoder for Record { // fn read(iter: &mut crate::packets::traits::U8Iter) -> Option<Self> { // Some(Record(if iter.read()? { // Either::Left(iter.read()?) // } else { // Either::Right((iter.read()?, iter.read()?)) // })) // } // fn write(self, vec: &mut Vec<u8>) -> Option<()> { // match self.0 { // Either::Left(e) => { // true.write(vec)?; // e.write(vec)?; // } // Either::Right((x, y)) => { // false.write(vec)?; // x.write(vec)?; // y.write(vec)?; // } // } // Some(()) // } // }
27.820755
79
0.476772
7192c2e2243c359899617eeea4d62d13a7ad2b5c
1,021
use super::*; #[test] fn without_registered_returns_empty_list() { with_process_arc(|unregistered_process_arc| { assert_eq!( native( &unregistered_process_arc, unregistered_process_arc.pid_term(), item() ), Ok(Term::NIL) ); }); } #[test] fn with_registered_returns_empty_list() { with_process_arc(|registered_process_arc| { let registered_name = registered_name(); let registered_name_atom: Atom = registered_name.try_into().unwrap(); assert!(registry::put_atom_to_process( registered_name_atom, registered_process_arc.clone() )); assert_eq!( native( &registered_process_arc, registered_process_arc.pid_term(), item() ), Ok(registered_process_arc .tuple_from_slice(&[item(), registered_name]) .unwrap()) ); }); }
25.525
77
0.546523
765f68eba105071470581306044a0e86721cb377
13,848
#![cfg(test)] use crate::{ chaintypes::ConsensusType, config::ConfigParam, fragment::{config::ConfigParams, Fragment}, ledger::{ ledger::{ Block0Error, Error::{Block0, ExpectingInitialMessage}, }, Ledger, }, milli::Milli, testing::{ arbitrary::{AccountStatesVerifier, ArbitraryValidTransactionData, UtxoVerifier}, builders::{OldAddressBuilder, TestTxBuilder}, data::AddressDataValue, ledger::{ConfigBuilder, LedgerBuilder}, TestGen, }, }; use chain_addr::Discrimination; use quickcheck::TestResult; use quickcheck_macros::quickcheck; #[quickcheck] pub fn ledger_accepts_correct_transaction( faucet: AddressDataValue, receiver: AddressDataValue, ) -> TestResult { let mut ledger = LedgerBuilder::from_config(ConfigBuilder::new(0)) .initial_fund(&faucet) .build() .unwrap(); let fragment = TestTxBuilder::new(ledger.block0_hash) .move_funds(&mut ledger, &faucet, &receiver, faucet.value) .get_fragment(); let total_funds_before = ledger.total_funds(); let result = ledger.apply_transaction(fragment); if result.is_err() { return TestResult::error(format!("Error from ledger: {}", result.err().unwrap())); } let total_funds_after = ledger.total_funds(); if total_funds_before == total_funds_after { TestResult::passed() } else { TestResult::error(format!( "Total funds in ledger before and after transaction is not equal {} <> {} ", total_funds_before, total_funds_after )) } } #[quickcheck] pub fn total_funds_are_const_in_ledger( transaction_data: ArbitraryValidTransactionData, ) -> TestResult { let config = ConfigBuilder::new(0) .with_discrimination(Discrimination::Test) .with_fee(transaction_data.fee); let mut ledger = LedgerBuilder::from_config(config) .initial_funds(&transaction_data.addresses) .build() .unwrap(); let signed_tx = TestTxBuilder::new(ledger.block0_hash).move_funds_multiple( &mut ledger, &transaction_data.input_addresses, &transaction_data.output_addresses, ); let total_funds_before = ledger.total_funds(); let result = ledger.apply_transaction(signed_tx.get_fragment()); if result.is_err() { return TestResult::error(format!("Error from ledger: {:?}", result.err())); } let total_funds_after = ledger.total_funds(); if total_funds_before != total_funds_after { return TestResult::error(format!( "Total funds in ledger before and after transaction is not equal {} <> {}", total_funds_before, total_funds_after )); } let utxo_verifier = UtxoVerifier::new(transaction_data.clone()); let utxo_verification_result = utxo_verifier.verify(&ledger); if utxo_verification_result.is_err() { return TestResult::error(format!("{}", utxo_verification_result.err().unwrap())); } let account_state_verifier = AccountStatesVerifier::new(transaction_data); let account_state_verification_result = account_state_verifier.verify(ledger.accounts()); if account_state_verification_result.is_err() { return TestResult::error(format!( "{}", account_state_verification_result.err().unwrap() )); } TestResult::passed() } #[test] pub fn test_first_initial_fragment_empty() { let header_id = TestGen::hash(); let content = Vec::new(); assert_eq!( Ledger::new(header_id, content).err().unwrap(), Block0(Block0Error::InitialMessageMissing) ); } #[test] pub fn test_first_initial_fragment_wrong_type() { let header_id = TestGen::hash(); let fragment = Fragment::OldUtxoDeclaration(OldAddressBuilder::build_utxo_declaration(Some(1))); assert_eq!( Ledger::new(header_id, &vec![fragment]).err().unwrap(), ExpectingInitialMessage ); } #[test] pub fn ledger_new_no_block_start_time() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); assert_eq!( Ledger::new(header_id, vec![&Fragment::Initial(ie)]) .err() .unwrap(), Block0(Block0Error::InitialMessageNoDate) ); } #[test] pub fn ledger_new_dupicated_initial_fragments() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); assert_eq!( Ledger::new( header_id, vec![&Fragment::Initial(ie.clone()), &Fragment::Initial(ie)] ) .err() .unwrap(), Block0(Block0Error::InitialMessageMany) ); } #[test] pub fn ledger_new_duplicated_block0() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); Ledger::new(header_id, vec![&Fragment::Initial(ie)]).unwrap(); } #[test] pub fn ledger_new_duplicated_discrimination() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); Ledger::new(header_id, vec![&Fragment::Initial(ie)]).unwrap(); } #[test] pub fn ledger_new_duplicated_consensus_version() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::ConsensusVersion(ConsensusType::Bft)); ie.push(ConfigParam::ConsensusVersion(ConsensusType::Bft)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); Ledger::new(header_id, vec![&Fragment::Initial(ie)]).unwrap(); } #[test] pub fn ledger_new_duplicated_slot_duration() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotDuration(11u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); Ledger::new(header_id, vec![&Fragment::Initial(ie)]).unwrap(); } #[test] pub fn ledger_new_duplicated_epoch_stability_depth() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::ConsensusVersion(ConsensusType::Bft)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::EpochStabilityDepth(10u32)); ie.push(ConfigParam::EpochStabilityDepth(11u32)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); Ledger::new(header_id, vec![&Fragment::Initial(ie)]).unwrap(); } #[test] pub fn ledger_new_duplicated_active_slots_coeff() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::ConsensusVersion(ConsensusType::Bft)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::ConsensusGenesisPraosActiveSlotsCoeff( Milli::from_millis(500), )); ie.push(ConfigParam::ConsensusGenesisPraosActiveSlotsCoeff( Milli::from_millis(600), )); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); Ledger::new(header_id, vec![&Fragment::Initial(ie)]).unwrap(); } #[test] pub fn ledger_new_no_discrimination() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); assert_eq!( Ledger::new(header_id, vec![&Fragment::Initial(ie)]) .err() .unwrap(), Block0(Block0Error::InitialMessageNoDiscrimination) ); } #[test] pub fn ledger_new_no_slot_duration() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); assert_eq!( Ledger::new(header_id, vec![&Fragment::Initial(ie)]) .err() .unwrap(), Block0(Block0Error::InitialMessageNoSlotDuration) ); } #[test] pub fn ledger_new_no_slots_per_epoch() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::KESUpdateSpeed(3600)); assert_eq!( Ledger::new(header_id, vec![&Fragment::Initial(ie)]) .err() .unwrap(), Block0(Block0Error::InitialMessageNoSlotsPerEpoch) ); } #[test] pub fn ledger_new_no_kes_update_speed() { let leader_pair = TestGen::leader_pair(); let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); assert_eq!( Ledger::new(header_id, vec![&Fragment::Initial(ie)]) .err() .unwrap(), Block0(Block0Error::InitialMessageNoKesUpdateSpeed) ); } #[test] pub fn ledger_new_no_bft_leader() { let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); assert_eq!( Ledger::new(header_id, vec![&Fragment::Initial(ie)]) .err() .unwrap(), Block0(Block0Error::InitialMessageNoConsensusLeaderId) ); } #[quickcheck] pub fn wrong_fragment_at_block0(fragment: Fragment) -> TestResult { match fragment { Fragment::OldUtxoDeclaration(_) => return TestResult::discard(), Fragment::Transaction(_) => return TestResult::discard(), Fragment::StakeDelegation(_) => return TestResult::discard(), Fragment::PoolRegistration(_) => return TestResult::discard(), Fragment::VotePlan(_) => return TestResult::discard(), _ => (), }; let header_id = TestGen::hash(); let mut ie = ConfigParams::new(); let leader_pair = TestGen::leader_pair(); ie.push(ConfigParam::Block0Date(crate::config::Block0Date(0))); ie.push(ConfigParam::Discrimination(Discrimination::Test)); ie.push(ConfigParam::AddBftLeader(leader_pair.leader_id)); ie.push(ConfigParam::SlotDuration(10u8)); ie.push(ConfigParam::SlotsPerEpoch(10u32)); ie.push(ConfigParam::KESUpdateSpeed(3600)); TestResult::from_bool(Ledger::new(header_id, vec![&Fragment::Initial(ie), &fragment]).is_err()) }
35.41688
100
0.681615
4ac07de880cabc06074882cb4765be3616c109bc
1,778
extern crate sandbox_execution_environment; extern crate hex; use sandbox_execution_environment::{ Header, Setup }; use sp_version::{ApiId, RuntimeVersion}; use sp_core::{ traits::{ CallInWasm, MissingHostFunctions }}; use parity_scale_codec::{Encode, Decode}; use sp_runtime::{ RuntimeString }; use std::borrow::Cow; #[test] fn test_core_version() { let mut setup = Setup::new(); let test_api: Vec<(ApiId, u32)> = vec![([1,1,1,1,1,1,1,1], 10)]; let version = RuntimeVersion { spec_name: RuntimeString::Borrowed("node-template"), impl_name: RuntimeString::Borrowed("AssemblyScript"), authoring_version: 1, spec_version: 1, impl_version: 1, apis: Cow::<[([u8; 8], u32)]>::Owned(test_api), transaction_version: 1 }; let res = setup.executor.call_in_wasm( &setup.wasm_code_array, None, "Core_version", &[], &mut setup.ext.ext(), MissingHostFunctions::Allow).unwrap(); let wasm_version = <RuntimeVersion>::decode(&mut res.as_ref()); println!("{:?}", Some(&version)); assert_eq!(wasm_version.is_ok(), true); assert_eq!(wasm_version.iter().next(), Some(&version)); } #[test] fn test_core_initialize_block() { let mut setup = Setup::new(); let h = Header { parent_hash: [69u8; 32].into(), number: 1, state_root: Default::default(), extrinsics_root: Default::default(), digest: Default::default(), }; let result = setup.executor.call_in_wasm( &setup.wasm_code_array, None, "Core_initialize_block", &h.encode(), &mut setup.ext.ext(), MissingHostFunctions::Allow).unwrap(); println!("{:?}", result); assert_eq!(result, [0u8; 0]); }
31.192982
68
0.617548
1cf4bb8b67ea319a84d58644a4c570255c485551
4,787
//! Bitmap display object use crate::avm1; use crate::backend::render::BitmapHandle; use crate::context::{RenderContext, UpdateContext}; use crate::display_object::{DisplayObjectBase, TDisplayObject}; use crate::prelude::*; use crate::types::{Degrees, Percent}; use crate::vminterface::{AvmType, Instantiator}; use gc_arena::{Collect, Gc, GcCell}; /// A Bitmap display object is a raw bitamp on the stage. /// This can only be instanitated on the display list in SWFv9 AVM2 files. /// In AVM1, this is only a library symbol that is referenced by `Graphic`. /// Normally bitmaps are drawn in Flash as part of a Shape tag (`Graphic`), /// but starting in AVM2, a raw `Bitmap` display object can be created /// with the `PlaceObject3` tag. /// It can also be created in ActionScript using the `Bitmap` class. #[derive(Clone, Debug, Collect, Copy)] #[collect(no_drop)] pub struct Bitmap<'gc>(GcCell<'gc, BitmapData<'gc>>); #[derive(Clone, Debug, Collect)] #[collect(no_drop)] pub struct BitmapData<'gc> { base: DisplayObjectBase<'gc>, static_data: Gc<'gc, BitmapStatic>, bitmap_data: Option<GcCell<'gc, crate::bitmap::bitmap_data::BitmapData>>, smoothing: bool, } impl<'gc> Bitmap<'gc> { pub fn new_with_bitmap_data( context: &mut UpdateContext<'_, 'gc, '_>, id: CharacterId, bitmap_handle: BitmapHandle, width: u16, height: u16, bitmap_data: Option<GcCell<'gc, crate::bitmap::bitmap_data::BitmapData>>, smoothing: bool, ) -> Self { Bitmap(GcCell::allocate( context.gc_context, BitmapData { base: Default::default(), static_data: Gc::allocate( context.gc_context, BitmapStatic { id, bitmap_handle, width, height, }, ), bitmap_data, smoothing, }, )) } pub fn new( context: &mut UpdateContext<'_, 'gc, '_>, id: CharacterId, bitmap_handle: BitmapHandle, width: u16, height: u16, ) -> Self { Self::new_with_bitmap_data(context, id, bitmap_handle, width, height, None, true) } #[allow(dead_code)] pub fn bitmap_handle(self) -> BitmapHandle { self.0.read().static_data.bitmap_handle } pub fn width(self) -> u16 { self.0.read().static_data.width } pub fn height(self) -> u16 { self.0.read().static_data.height } } impl<'gc> TDisplayObject<'gc> for Bitmap<'gc> { impl_display_object!(base); fn id(&self) -> CharacterId { self.0.read().static_data.id } fn self_bounds(&self) -> BoundingBox { BoundingBox { x_min: Twips::ZERO, y_min: Twips::ZERO, x_max: Twips::from_pixels(Bitmap::width(*self).into()), y_max: Twips::from_pixels(Bitmap::height(*self).into()), valid: true, } } fn post_instantiation( &self, context: &mut UpdateContext<'_, 'gc, '_>, _display_object: DisplayObject<'gc>, _init_object: Option<avm1::Object<'gc>>, _instantiated_by: Instantiator, run_frame: bool, ) { if self.avm_type() == AvmType::Avm1 { context .avm1 .add_to_exec_list(context.gc_context, (*self).into()); } if run_frame { self.run_frame(context); } } fn run_frame(&self, context: &mut UpdateContext<'_, 'gc, '_>) { if let Some(bitmap_data) = &self.0.read().bitmap_data { let bd = bitmap_data.read(); if bd.dirty() { let _ = context.renderer.update_texture( self.0.read().static_data.bitmap_handle, bd.width(), bd.height(), bd.pixels_rgba(), ); drop(bd); bitmap_data.write(context.gc_context).set_dirty(false); } } } fn render_self(&self, context: &mut RenderContext) { if !self.world_bounds().intersects(&context.stage.view_bounds()) { // Off-screen; culled return; } let bitmap_data = self.0.read(); context.renderer.render_bitmap( bitmap_data.static_data.bitmap_handle, context.transform_stack.transform(), bitmap_data.smoothing, ); } } /// Static data shared between all instances of a bitmap. #[derive(Clone, Collect)] #[collect(no_drop)] struct BitmapStatic { id: CharacterId, bitmap_handle: BitmapHandle, width: u16, height: u16, }
29.732919
89
0.565072
1ef0f3fe4e6b5f3d3cf2ebea7c55fd91cccf7045
13,988
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 //! Constants for the binary format. //! //! Definition for the constants of the binary format, used by the serializer and the deserializer. //! This module also offers helpers for the serialization and deserialization of certain //! integer indexes. //! //! We use LEB128 for integer compression. LEB128 is a representation from the DWARF3 spec, //! http://dwarfstd.org/Dwarf3Std.php or https://en.wikipedia.org/wiki/LEB128. //! It's used to compress mostly indexes into the main binary tables. use crate::file_format::Bytecode; use anyhow::{bail, Result}; use std::{ io::{Cursor, Read}, mem::size_of, }; /// Constant values for the binary format header. /// /// The binary header is magic + version info + table count. pub enum BinaryConstants {} impl BinaryConstants { /// The blob that must start a binary. pub const LIBRA_MAGIC_SIZE: usize = 4; pub const LIBRA_MAGIC: [u8; BinaryConstants::LIBRA_MAGIC_SIZE] = [0xA1, 0x1C, 0xEB, 0x0B]; /// The `LIBRA_MAGIC` size, 1 byte for major version, 1 byte for minor version and 1 byte /// for table count. pub const HEADER_SIZE: usize = BinaryConstants::LIBRA_MAGIC_SIZE + 3; /// A (Table Type, Start Offset, Byte Count) size, which is 1 byte for the type and /// 4 bytes for the offset/count. pub const TABLE_HEADER_SIZE: u8 = size_of::<u32>() as u8 * 2 + 1; } pub const TABLE_COUNT_MAX: u64 = 255; pub const TABLE_OFFSET_MAX: u64 = 0xffff_ffff; pub const TABLE_SIZE_MAX: u64 = 0xffff_ffff; pub const TABLE_CONTENT_SIZE_MAX: u64 = 0xffff_ffff; pub const TABLE_INDEX_MAX: u64 = 65535; pub const SIGNATURE_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const ADDRESS_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const IDENTIFIER_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const MODULE_HANDLE_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const STRUCT_HANDLE_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const STRUCT_DEF_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const FUNCTION_HANDLE_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const FUNCTION_INST_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const FIELD_HANDLE_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const FIELD_INST_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const STRUCT_DEF_INST_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const CONSTANT_INDEX_MAX: u64 = TABLE_INDEX_MAX; pub const BYTECODE_COUNT_MAX: u64 = 65535; pub const BYTECODE_INDEX_MAX: u64 = 65535; pub const LOCAL_INDEX_MAX: u64 = 255; pub const IDENTIFIER_SIZE_MAX: u64 = 65535; pub const CONSTANT_SIZE_MAX: u64 = 65535; pub const SIGNATURE_SIZE_MAX: u64 = 255; pub const ACQUIRES_COUNT_MAX: u64 = 255; pub const FIELD_COUNT_MAX: u64 = 255; pub const FIELD_OFFSET_MAX: u64 = 255; pub const TYPE_PARAMETER_COUNT_MAX: u64 = 255; pub const TYPE_PARAMETER_INDEX_MAX: u64 = 65536; /// Constants for table types in the binary. /// /// The binary contains a subset of those tables. A table specification is a tuple (table type, /// start offset, byte count) for a given table. #[rustfmt::skip] #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum TableType { MODULE_HANDLES = 0x1, STRUCT_HANDLES = 0x2, FUNCTION_HANDLES = 0x3, FUNCTION_INST = 0x4, SIGNATURES = 0x5, CONSTANT_POOL = 0x6, IDENTIFIERS = 0x7, ADDRESS_IDENTIFIERS = 0x8, STRUCT_DEFS = 0xA, STRUCT_DEF_INST = 0xB, FUNCTION_DEFS = 0xC, FIELD_HANDLE = 0xD, FIELD_INST = 0xE, } /// Constants for signature blob values. #[rustfmt::skip] #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, Debug)] pub enum SerializedType { BOOL = 0x1, U8 = 0x2, U64 = 0x3, U128 = 0x4, ADDRESS = 0x5, REFERENCE = 0x6, MUTABLE_REFERENCE = 0x7, STRUCT = 0x8, TYPE_PARAMETER = 0x9, VECTOR = 0xA, STRUCT_INST = 0xB, SIGNER = 0xC, } #[rustfmt::skip] #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, Debug)] pub enum SerializedNominalResourceFlag { NOMINAL_RESOURCE = 0x1, NORMAL_STRUCT = 0x2, } #[rustfmt::skip] #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, Debug)] pub enum SerializedKind { ALL = 0x1, COPYABLE = 0x2, RESOURCE = 0x3, } #[rustfmt::skip] #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, Debug)] pub enum SerializedNativeStructFlag { NATIVE = 0x1, DECLARED = 0x2, } /// List of opcodes constants. #[rustfmt::skip] #[allow(non_camel_case_types)] #[repr(u8)] #[derive(Clone, Copy, Debug)] pub enum Opcodes { POP = 0x01, RET = 0x02, BR_TRUE = 0x03, BR_FALSE = 0x04, BRANCH = 0x05, LD_U64 = 0x06, LD_CONST = 0x07, LD_TRUE = 0x08, LD_FALSE = 0x09, COPY_LOC = 0x0A, MOVE_LOC = 0x0B, ST_LOC = 0x0C, MUT_BORROW_LOC = 0x0D, IMM_BORROW_LOC = 0x0E, MUT_BORROW_FIELD = 0x0F, IMM_BORROW_FIELD = 0x10, CALL = 0x11, PACK = 0x12, UNPACK = 0x13, READ_REF = 0x14, WRITE_REF = 0x15, ADD = 0x16, SUB = 0x17, MUL = 0x18, MOD = 0x19, DIV = 0x1A, BIT_OR = 0x1B, BIT_AND = 0x1C, XOR = 0x1D, OR = 0x1E, AND = 0x1F, NOT = 0x20, EQ = 0x21, NEQ = 0x22, LT = 0x23, GT = 0x24, LE = 0x25, GE = 0x26, ABORT = 0x27, GET_TXN_SENDER = 0x28, EXISTS = 0x29, MUT_BORROW_GLOBAL = 0x2A, IMM_BORROW_GLOBAL = 0x2B, MOVE_FROM = 0x2C, MOVE_TO_SENDER = 0x2D, FREEZE_REF = 0x2E, SHL = 0x2F, SHR = 0x30, LD_U8 = 0x31, LD_U128 = 0x32, CAST_U8 = 0x33, CAST_U64 = 0x34, CAST_U128 = 0x35, MUT_BORROW_FIELD_GENERIC = 0x36, IMM_BORROW_FIELD_GENERIC = 0x37, CALL_GENERIC = 0x38, PACK_GENERIC = 0x39, UNPACK_GENERIC = 0x3A, EXISTS_GENERIC = 0x3B, MUT_BORROW_GLOBAL_GENERIC = 0x3C, IMM_BORROW_GLOBAL_GENERIC = 0x3D, MOVE_FROM_GENERIC = 0x3E, MOVE_TO_SENDER_GENERIC = 0x3F, NOP = 0x40, MOVE_TO = 0x41, MOVE_TO_GENERIC = 0x42, } /// Upper limit on the binary size pub const BINARY_SIZE_LIMIT: usize = usize::max_value(); /// A wrapper for the binary vector #[derive(Default, Debug)] pub struct BinaryData { _binary: Vec<u8>, } /// The wrapper mirrors Vector operations but provides additional checks against overflow impl BinaryData { pub fn new() -> Self { BinaryData { _binary: Vec::new(), } } pub fn as_inner(&self) -> &[u8] { &self._binary } pub fn into_inner(self) -> Vec<u8> { self._binary } pub fn push(&mut self, item: u8) -> Result<()> { if self.len().checked_add(1).is_some() { self._binary.push(item); } else { bail!( "binary size ({}) + 1 is greater than limit ({})", self.len(), BINARY_SIZE_LIMIT, ); } Ok(()) } pub fn extend(&mut self, vec: &[u8]) -> Result<()> { let vec_len: usize = vec.len(); if self.len().checked_add(vec_len).is_some() { self._binary.extend(vec); } else { bail!( "binary size ({}) + {} is greater than limit ({})", self.len(), vec.len(), BINARY_SIZE_LIMIT, ); } Ok(()) } pub fn len(&self) -> usize { self._binary.len() } pub fn is_empty(&self) -> bool { self._binary.is_empty() } pub fn clear(&mut self) { self._binary.clear(); } } impl From<Vec<u8>> for BinaryData { fn from(vec: Vec<u8>) -> Self { BinaryData { _binary: vec } } } pub fn write_u64_as_uleb128(binary: &mut BinaryData, mut val: u64) -> Result<()> { loop { let cur = val & 0x7f; if cur != val { binary.push((cur | 0x80) as u8)?; val >>= 7; } else { binary.push(cur as u8)?; break; } } Ok(()) } /// Write a `u16` in Little Endian format. pub fn write_u16(binary: &mut BinaryData, value: u16) -> Result<()> { binary.extend(&value.to_le_bytes()) } /// Write a `u32` in Little Endian format. pub fn write_u32(binary: &mut BinaryData, value: u32) -> Result<()> { binary.extend(&value.to_le_bytes()) } /// Write a `u64` in Little Endian format. pub fn write_u64(binary: &mut BinaryData, value: u64) -> Result<()> { binary.extend(&value.to_le_bytes()) } /// Write a `u128` in Little Endian format. pub fn write_u128(binary: &mut BinaryData, value: u128) -> Result<()> { binary.extend(&value.to_le_bytes()) } pub fn read_u8(cursor: &mut Cursor<&[u8]>) -> Result<u8> { let mut buf = [0; 1]; cursor.read_exact(&mut buf)?; Ok(buf[0]) } pub fn read_uleb128_as_u64(cursor: &mut Cursor<&[u8]>) -> Result<u64> { let mut value: u64 = 0; let mut shift = 0; while let Ok(byte) = read_u8(cursor) { let cur = (byte & 0x7f) as u64; if (cur << shift) >> shift != cur { bail!("invalid ULEB128 repr for usize"); } value |= cur << shift; if (byte & 0x80) == 0 { if shift > 0 && cur == 0 { bail!("invalid ULEB128 repr for usize"); } return Ok(value); } shift += 7; if shift > size_of::<u64>() * 8 { break; } } bail!("invalid ULEB128 repr for usize"); } /// The encoding of the instruction is the serialized form of it, but disregarding the /// serialization of the instruction's argument(s). pub fn instruction_key(instruction: &Bytecode) -> u8 { use Bytecode::*; let opcode = match instruction { Pop => Opcodes::POP, Ret => Opcodes::RET, BrTrue(_) => Opcodes::BR_TRUE, BrFalse(_) => Opcodes::BR_FALSE, Branch(_) => Opcodes::BRANCH, LdU8(_) => Opcodes::LD_U8, LdU64(_) => Opcodes::LD_U64, LdU128(_) => Opcodes::LD_U128, CastU8 => Opcodes::CAST_U8, CastU64 => Opcodes::CAST_U64, CastU128 => Opcodes::CAST_U128, LdConst(_) => Opcodes::LD_CONST, LdTrue => Opcodes::LD_TRUE, LdFalse => Opcodes::LD_FALSE, CopyLoc(_) => Opcodes::COPY_LOC, MoveLoc(_) => Opcodes::MOVE_LOC, StLoc(_) => Opcodes::ST_LOC, Call(_) => Opcodes::CALL, CallGeneric(_) => Opcodes::CALL_GENERIC, Pack(_) => Opcodes::PACK, PackGeneric(_) => Opcodes::PACK_GENERIC, Unpack(_) => Opcodes::UNPACK, UnpackGeneric(_) => Opcodes::UNPACK_GENERIC, ReadRef => Opcodes::READ_REF, WriteRef => Opcodes::WRITE_REF, FreezeRef => Opcodes::FREEZE_REF, MutBorrowLoc(_) => Opcodes::MUT_BORROW_LOC, ImmBorrowLoc(_) => Opcodes::IMM_BORROW_LOC, MutBorrowField(_) => Opcodes::MUT_BORROW_FIELD, MutBorrowFieldGeneric(_) => Opcodes::MUT_BORROW_FIELD_GENERIC, ImmBorrowField(_) => Opcodes::IMM_BORROW_FIELD, ImmBorrowFieldGeneric(_) => Opcodes::IMM_BORROW_FIELD_GENERIC, MutBorrowGlobal(_) => Opcodes::MUT_BORROW_GLOBAL, MutBorrowGlobalGeneric(_) => Opcodes::MUT_BORROW_GLOBAL_GENERIC, ImmBorrowGlobal(_) => Opcodes::IMM_BORROW_GLOBAL, ImmBorrowGlobalGeneric(_) => Opcodes::IMM_BORROW_GLOBAL_GENERIC, Add => Opcodes::ADD, Sub => Opcodes::SUB, Mul => Opcodes::MUL, Mod => Opcodes::MOD, Div => Opcodes::DIV, BitOr => Opcodes::BIT_OR, BitAnd => Opcodes::BIT_AND, Xor => Opcodes::XOR, Shl => Opcodes::SHL, Shr => Opcodes::SHR, Or => Opcodes::OR, And => Opcodes::AND, Not => Opcodes::NOT, Eq => Opcodes::EQ, Neq => Opcodes::NEQ, Lt => Opcodes::LT, Gt => Opcodes::GT, Le => Opcodes::LE, Ge => Opcodes::GE, Abort => Opcodes::ABORT, GetTxnSenderAddress => Opcodes::GET_TXN_SENDER, Exists(_) => Opcodes::EXISTS, ExistsGeneric(_) => Opcodes::EXISTS_GENERIC, MoveFrom(_) => Opcodes::MOVE_FROM, MoveFromGeneric(_) => Opcodes::MOVE_FROM_GENERIC, MoveToSender(_) => Opcodes::MOVE_TO_SENDER, MoveToSenderGeneric(_) => Opcodes::MOVE_TO_SENDER_GENERIC, Nop => Opcodes::NOP, MoveTo(_) => Opcodes::MOVE_TO, MoveToGeneric(_) => Opcodes::MOVE_TO_GENERIC, }; opcode as u8 }
32.37963
99
0.545468
4a55f45e1586a8730060066efd944f0f61c86960
1,848
use crate::{ arch::cmos::{self, CMOS}, csh::{ExitCode, ShellArgs}, println, }; const DAYS_BEFORE_MONTH: [u64; 13] = [0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365]; #[derive(Debug)] pub struct TimeStamp(u64); // NOTE: This clock is not monotonic pub fn realtime() -> TimeStamp { let rtc = CMOS::new().rtc(); // Assuming GMT let timestamp = 86400 * days_before_year(rtc.year as u64) + 86400 * days_before_month(rtc.year as u64, rtc.month as u64) + 86400 * (rtc.day - 1) as u64 + 3600 * rtc.hour as u64 + 60 * rtc.minute as u64 + rtc.second as u64; TimeStamp(timestamp) } fn days_before_year(year: u64) -> u64 { (1970..year).fold(0, |days, y| days + if is_leap_year(y) { 366 } else { 365 }) } fn days_before_month(year: u64, month: u64) -> u64 { let leap_day = is_leap_year(year) && month > 2; DAYS_BEFORE_MONTH[(month as usize) - 1] + if leap_day { 1 } else { 0 } } fn is_leap_year(year: u64) -> bool { if year % 4 != 0 { false } else if year % 100 != 0 { true } else if year % 400 != 0 { false } else { true } } pub fn time(_: ShellArgs) -> ExitCode { let rt = CMOS::new().rtc(); println!( "Time: {}:{}:{} - {}/{}/{} - Uptime: {}", rt.hour, rt.minute, rt.second, rt.day, rt.month, rt.year, seconds() ); ExitCode::Ok } static mut RTC_TICKS: usize = 0; static mut UPDATE_FREQ: usize = 1; pub fn rtc_tick() { unsafe { RTC_TICKS += 1 } } pub fn ticks() -> usize { unsafe { RTC_TICKS } } pub fn seconds() -> f64 { unsafe { (RTC_TICKS as f64) / (UPDATE_FREQ as f64) } } pub fn set_rate(rate: usize) { unsafe { UPDATE_FREQ = 8 << 2u32.pow(rate as u32); cmos::set_rate(rate as u8); } }
22.536585
98
0.551407
e5b18e468045d0119073e446161a7243436969a6
339
use quick_protobuf::*; use crate::rust_protobuf::hex::{encode_hex, decode_hex}; use super::basic::*; #[test] fn test_zeros_are_not_written() { let mut m = TestTypesSingular::default(); m.bool_field = Some(false); m.enum_field = Some(TestEnumDescriptor::UNKNOWN); m.fixed32_field = Some(0); test_serialize!("", &m); }
24.214286
56
0.681416
1466774259cd3bf4e31fd5a89e6de8b272fb9bdb
1,390
//! Peer database types pub mod in_memory; use std::collections::HashMap; use crate::{peer_addr::PeerAddr, peer_info::PeerInfo}; type PeerMap = HashMap<PeerAddr, PeerInfo>; /// PeerDatabase errors #[derive(PartialEq, Eq, Debug)] pub enum PeerDatabaseError { /// Peer was not found in the database NotFound, /// The peer had no address associated with it /// The node spec had no declared address and the node isn't using the LocalAddress peer feature NoPeerAddr, } /// Peer database trait /// Common operations for managing peers pub trait PeerDatabase { /// Get `PeerInfo` based on the address fn get_peer_by_addr(&self, addr: PeerAddr) -> Option<PeerInfo>; /// Add a peer to the database /// If a peer with the address already exists then it is updated and the old value is returned /// If the database did not contain the peer info then None is returned /// If the peer has no associated address PeerDatabaseError::NoPeerAddr is returned fn add_or_update_peer( &mut self, peer_info: PeerInfo, ) -> Result<Option<PeerInfo>, PeerDatabaseError>; /// Remove peer from the database /// Returns the peer if it existed in the database fn remove_peer(&mut self, addr: PeerAddr) -> Result<(), PeerDatabaseError>; /// Return a mapping of peer address -> peer info fn known_peers(&self) -> PeerMap; }
32.325581
100
0.7
764938c2a41c3a4bb9de991475acc2e02b606027
715
use core::fmt::{Arguments, Write}; use super::{DriverNames, GpuGraphics, DRIVER}; #[cfg(target_arch = "x86_64")] fn vga_print(x: usize, y: usize, color: usize, args: Arguments) { use vgag::{Color16, display::VgaDisplay}; let vga_color = super::color::convert_to_vga_color(color); let mut vga = VgaDisplay::new(x, y, vga_color); vga.write_fmt(args); } impl GpuGraphics { pub fn graphics_print(&mut self, x: usize, y: usize, color: usize, args: Arguments) { unsafe { if DRIVER == DriverNames::Vgag { #[cfg(target_arch = "x86_64")] vga_print(x, y, color, args); } else { return; } } } }
28.6
89
0.570629
09b90f032afac63d46b8da82624d051efcff4dd1
3,527
use super::*; struct Tokenizer<'a> { text: &'a str, last: usize, current: usize, } impl<'a> Tokenizer<'a> { fn new(text: &'a str) -> Self { Tokenizer { text, last: 0, current: 0, } } fn current_str(&self) -> &str { &self.text[self.last..self.current] } fn next_str(&self) -> &str { &self.text[self.current..] } fn next_char(&self) -> char { self.next_str().chars().next().unwrap_or('\0') } fn bump(&mut self) { self.current += self.next_char().len_utf8(); } fn skip(&mut self, len: usize) { self.current += len; } fn commit(&mut self, kind: Token) -> TokenData { let span = (self.last, self.current); self.last = self.current; TokenData::new(kind, span) } } pub(crate) fn tokenize(text: &str) -> Vec<TokenData> { let mut t = Tokenizer::new(text); let mut tokens = vec![]; static TABLE: &[(&str, Token)] = &[ ("(", Token::ParenL), (")", Token::ParenR), ("[", Token::BracketL), ("]", Token::BracketR), ("{", Token::BraceL), ("}", Token::BraceR), (":", Token::Colon), (",", Token::Comma), ("=", Token::Eq), (">", Token::Gt), ("-", Token::Hyphen), ("<", Token::Lt), ("+", Token::Plus), (";", Token::Semi), ("*", Token::Star), ]; static KEYWORDS: &[(&str, Token)] = &[ ("print", Token::Print), ("if", Token::If), ("else", Token::Else), ("while", Token::While), ("for", Token::For), ("in", Token::In), ("fn", Token::Fn), ("type", Token::Type), ("pub", Token::Pub), ]; loop { let c = t.next_char(); if c == '\0' { break; } if c.is_whitespace() { while t.next_char().is_whitespace() { t.bump(); } // NOTE: 空白はトークン列に含めない。 t.commit(Token::Eof); continue; } if t.next_str().starts_with("//") { while t.next_char() != '\n' && t.next_char() != '\0' { t.bump(); } // NOTE: コメント行はトークン列に含めない。 t.commit(Token::Eof); continue; } if c.is_ascii_digit() { while t.next_char().is_ascii_digit() { t.bump(); } tokens.push(t.commit(Token::Int)); continue; } if c.is_alphabetic() { while t.next_char().is_alphanumeric() { t.bump(); } let kind = KEYWORDS .iter() .filter_map(|&(word, kind)| { if t.current_str() == word { Some(kind) } else { None } }) .next() .unwrap_or(Token::Ident); tokens.push(t.commit(kind)); continue; } match TABLE .iter() .filter(|&(word, _)| t.next_str().starts_with(word)) .next() { Some(&(word, kind)) => { t.skip(word.len()); tokens.push(t.commit(kind)); continue; } None => {} } panic!("invalid char {:?}", c) } tokens.push(t.commit(Token::Eof)); tokens }
23.513333
66
0.402892
0103e4bd628d7e757dc70c14aa78639c53238755
92,204
//! Traits, helpers, and type definitions for core I/O functionality. //! //! The `std::io` module contains a number of common things you'll need //! when doing input and output. The most core part of this module is //! the [`Read`] and [`Write`] traits, which provide the //! most general interface for reading and writing input and output. //! //! # Read and Write //! //! Because they are traits, [`Read`] and [`Write`] are implemented by a number //! of other types, and you can implement them for your types too. As such, //! you'll see a few different types of I/O throughout the documentation in //! this module: [`File`]s, [`TcpStream`]s, and sometimes even [`Vec<T>`]s. For //! example, [`Read`] adds a [`read`][`Read::read`] method, which we can use on //! [`File`]s: //! //! ```no_run //! use std::io; //! use std::io::prelude::*; //! use std::fs::File; //! //! fn main() -> io::Result<()> { //! let mut f = File::open("foo.txt")?; //! let mut buffer = [0; 10]; //! //! // read up to 10 bytes //! let n = f.read(&mut buffer)?; //! //! println!("The bytes: {:?}", &buffer[..n]); //! Ok(()) //! } //! ``` //! //! [`Read`] and [`Write`] are so important, implementors of the two traits have a //! nickname: readers and writers. So you'll sometimes see 'a reader' instead //! of 'a type that implements the [`Read`] trait'. Much easier! //! //! ## Seek and BufRead //! //! Beyond that, there are two important traits that are provided: [`Seek`] //! and [`BufRead`]. Both of these build on top of a reader to control //! how the reading happens. [`Seek`] lets you control where the next byte is //! coming from: //! //! ```no_run //! use std::io; //! use std::io::prelude::*; //! use std::io::SeekFrom; //! use std::fs::File; //! //! fn main() -> io::Result<()> { //! let mut f = File::open("foo.txt")?; //! let mut buffer = [0; 10]; //! //! // skip to the last 10 bytes of the file //! f.seek(SeekFrom::End(-10))?; //! //! // read up to 10 bytes //! let n = f.read(&mut buffer)?; //! //! println!("The bytes: {:?}", &buffer[..n]); //! Ok(()) //! } //! ``` //! //! [`BufRead`] uses an internal buffer to provide a number of other ways to read, but //! to show it off, we'll need to talk about buffers in general. Keep reading! //! //! ## BufReader and BufWriter //! //! Byte-based interfaces are unwieldy and can be inefficient, as we'd need to be //! making near-constant calls to the operating system. To help with this, //! `std::io` comes with two structs, [`BufReader`] and [`BufWriter`], which wrap //! readers and writers. The wrapper uses a buffer, reducing the number of //! calls and providing nicer methods for accessing exactly what you want. //! //! For example, [`BufReader`] works with the [`BufRead`] trait to add extra //! methods to any reader: //! //! ```no_run //! use std::io; //! use std::io::prelude::*; //! use std::io::BufReader; //! use std::fs::File; //! //! fn main() -> io::Result<()> { //! let f = File::open("foo.txt")?; //! let mut reader = BufReader::new(f); //! let mut buffer = String::new(); //! //! // read a line into buffer //! reader.read_line(&mut buffer)?; //! //! println!("{}", buffer); //! Ok(()) //! } //! ``` //! //! [`BufWriter`] doesn't add any new ways of writing; it just buffers every call //! to [`write`][`Write::write`]: //! //! ```no_run //! use std::io; //! use std::io::prelude::*; //! use std::io::BufWriter; //! use std::fs::File; //! //! fn main() -> io::Result<()> { //! let f = File::create("foo.txt")?; //! { //! let mut writer = BufWriter::new(f); //! //! // write a byte to the buffer //! writer.write(&[42])?; //! //! } // the buffer is flushed once writer goes out of scope //! //! Ok(()) //! } //! ``` //! //! ## Standard input and output //! //! A very common source of input is standard input: //! //! ```no_run //! use std::io; //! //! fn main() -> io::Result<()> { //! let mut input = String::new(); //! //! io::stdin().read_line(&mut input)?; //! //! println!("You typed: {}", input.trim()); //! Ok(()) //! } //! ``` //! //! Note that you cannot use the [`?` operator] in functions that do not return //! a [`Result<T, E>`][`Result`]. Instead, you can call [`.unwrap()`] //! or `match` on the return value to catch any possible errors: //! //! ```no_run //! use std::io; //! //! let mut input = String::new(); //! //! io::stdin().read_line(&mut input).unwrap(); //! ``` //! //! And a very common source of output is standard output: //! //! ```no_run //! use std::io; //! use std::io::prelude::*; //! //! fn main() -> io::Result<()> { //! io::stdout().write(&[42])?; //! Ok(()) //! } //! ``` //! //! Of course, using [`io::stdout`] directly is less common than something like //! [`println!`]. //! //! ## Iterator types //! //! A large number of the structures provided by `std::io` are for various //! ways of iterating over I/O. For example, [`Lines`] is used to split over //! lines: //! //! ```no_run //! use std::io; //! use std::io::prelude::*; //! use std::io::BufReader; //! use std::fs::File; //! //! fn main() -> io::Result<()> { //! let f = File::open("foo.txt")?; //! let reader = BufReader::new(f); //! //! for line in reader.lines() { //! println!("{}", line?); //! } //! Ok(()) //! } //! ``` //! //! ## Functions //! //! There are a number of [functions][functions-list] that offer access to various //! features. For example, we can use three of these functions to copy everything //! from standard input to standard output: //! //! ```no_run //! use std::io; //! //! fn main() -> io::Result<()> { //! io::copy(&mut io::stdin(), &mut io::stdout())?; //! Ok(()) //! } //! ``` //! //! [functions-list]: #functions-1 //! //! ## io::Result //! //! Last, but certainly not least, is [`io::Result`]. This type is used //! as the return type of many `std::io` functions that can cause an error, and //! can be returned from your own functions as well. Many of the examples in this //! module use the [`?` operator]: //! //! ``` //! use std::io; //! //! fn read_input() -> io::Result<()> { //! let mut input = String::new(); //! //! io::stdin().read_line(&mut input)?; //! //! println!("You typed: {}", input.trim()); //! //! Ok(()) //! } //! ``` //! //! The return type of `read_input()`, [`io::Result<()>`][`io::Result`], is a very //! common type for functions which don't have a 'real' return value, but do want to //! return errors if they happen. In this case, the only purpose of this function is //! to read the line and print it, so we use `()`. //! //! ## Platform-specific behavior //! //! Many I/O functions throughout the standard library are documented to indicate //! what various library or syscalls they are delegated to. This is done to help //! applications both understand what's happening under the hood as well as investigate //! any possibly unclear semantics. Note, however, that this is informative, not a binding //! contract. The implementation of many of these functions are subject to change over //! time and may call fewer or more syscalls/library functions. //! //! [`Read`]: trait.Read.html //! [`Write`]: trait.Write.html //! [`Seek`]: trait.Seek.html //! [`BufRead`]: trait.BufRead.html //! [`File`]: ../fs/struct.File.html //! [`TcpStream`]: ../net/struct.TcpStream.html //! [`Vec<T>`]: ../vec/struct.Vec.html //! [`BufReader`]: struct.BufReader.html //! [`BufWriter`]: struct.BufWriter.html //! [`Write::write`]: trait.Write.html#tymethod.write //! [`io::stdout`]: fn.stdout.html //! [`println!`]: ../macro.println.html //! [`Lines`]: struct.Lines.html //! [`io::Result`]: type.Result.html //! [`?` operator]: ../../book/appendix-02-operators.html //! [`Read::read`]: trait.Read.html#tymethod.read //! [`Result`]: ../result/enum.Result.html //! [`.unwrap()`]: ../result/enum.Result.html#method.unwrap #![stable(feature = "rust1", since = "1.0.0")] use crate::cmp; use crate::fmt; use crate::memchr; use crate::ops::{Deref, DerefMut}; use crate::ptr; use crate::slice; use crate::str; use crate::sys; #[stable(feature = "rust1", since = "1.0.0")] pub use self::buffered::IntoInnerError; #[stable(feature = "rust1", since = "1.0.0")] pub use self::buffered::{BufReader, BufWriter, LineWriter}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::cursor::Cursor; #[stable(feature = "rust1", since = "1.0.0")] pub use self::error::{Error, ErrorKind, Result}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::stdio::{stderr, stdin, stdout, Stderr, Stdin, Stdout}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::stdio::{StderrLock, StdinLock, StdoutLock}; #[unstable(feature = "print_internals", issue = "none")] pub use self::stdio::{_eprint, _print}; #[unstable(feature = "libstd_io_internals", issue = "42788")] #[doc(no_inline, hidden)] pub use self::stdio::{set_panic, set_print}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::util::{copy, empty, repeat, sink, Empty, Repeat, Sink}; mod buffered; mod cursor; mod error; mod impls; mod lazy; pub mod prelude; mod stdio; mod util; const DEFAULT_BUF_SIZE: usize = crate::sys_common::io::DEFAULT_BUF_SIZE; struct Guard<'a> { buf: &'a mut Vec<u8>, len: usize, } impl Drop for Guard<'_> { fn drop(&mut self) { unsafe { self.buf.set_len(self.len); } } } // A few methods below (read_to_string, read_line) will append data into a // `String` buffer, but we need to be pretty careful when doing this. The // implementation will just call `.as_mut_vec()` and then delegate to a // byte-oriented reading method, but we must ensure that when returning we never // leave `buf` in a state such that it contains invalid UTF-8 in its bounds. // // To this end, we use an RAII guard (to protect against panics) which updates // the length of the string when it is dropped. This guard initially truncates // the string to the prior length and only after we've validated that the // new contents are valid UTF-8 do we allow it to set a longer length. // // The unsafety in this function is twofold: // // 1. We're looking at the raw bytes of `buf`, so we take on the burden of UTF-8 // checks. // 2. We're passing a raw buffer to the function `f`, and it is expected that // the function only *appends* bytes to the buffer. We'll get undefined // behavior if existing bytes are overwritten to have non-UTF-8 data. fn append_to_string<F>(buf: &mut String, f: F) -> Result<usize> where F: FnOnce(&mut Vec<u8>) -> Result<usize>, { unsafe { let mut g = Guard { len: buf.len(), buf: buf.as_mut_vec() }; let ret = f(g.buf); if str::from_utf8(&g.buf[g.len..]).is_err() { ret.and_then(|_| { Err(Error::new(ErrorKind::InvalidData, "stream did not contain valid UTF-8")) }) } else { g.len = g.buf.len(); ret } } } // This uses an adaptive system to extend the vector when it fills. We want to // avoid paying to allocate and zero a huge chunk of memory if the reader only // has 4 bytes while still making large reads if the reader does have a ton // of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every // time is 4,500 times (!) slower than a default reservation size of 32 if the // reader has a very small amount of data to return. // // Because we're extending the buffer with uninitialized data for trusted // readers, we need to make sure to truncate that if any of this panics. fn read_to_end<R: Read + ?Sized>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize> { read_to_end_with_reservation(r, buf, |_| 32) } fn read_to_end_with_reservation<R, F>( r: &mut R, buf: &mut Vec<u8>, mut reservation_size: F, ) -> Result<usize> where R: Read + ?Sized, F: FnMut(&R) -> usize, { let start_len = buf.len(); let mut g = Guard { len: buf.len(), buf }; let ret; loop { if g.len == g.buf.len() { unsafe { // FIXME(danielhenrymantilla): #42788 // // - This creates a (mut) reference to a slice of // _uninitialized_ integers, which is **undefined behavior** // // - Only the standard library gets to soundly "ignore" this, // based on its privileged knowledge of unstable rustc // internals; g.buf.reserve(reservation_size(r)); let capacity = g.buf.capacity(); g.buf.set_len(capacity); r.initializer().initialize(&mut g.buf[g.len..]); } } match r.read(&mut g.buf[g.len..]) { Ok(0) => { ret = Ok(g.len - start_len); break; } Ok(n) => g.len += n, Err(ref e) if e.kind() == ErrorKind::Interrupted => {} Err(e) => { ret = Err(e); break; } } } ret } pub(crate) fn default_read_vectored<F>(read: F, bufs: &mut [IoSliceMut<'_>]) -> Result<usize> where F: FnOnce(&mut [u8]) -> Result<usize>, { let buf = bufs.iter_mut().find(|b| !b.is_empty()).map_or(&mut [][..], |b| &mut **b); read(buf) } pub(crate) fn default_write_vectored<F>(write: F, bufs: &[IoSlice<'_>]) -> Result<usize> where F: FnOnce(&[u8]) -> Result<usize>, { let buf = bufs.iter().find(|b| !b.is_empty()).map_or(&[][..], |b| &**b); write(buf) } /// The `Read` trait allows for reading bytes from a source. /// /// Implementors of the `Read` trait are called 'readers'. /// /// Readers are defined by one required method, [`read()`]. Each call to [`read()`] /// will attempt to pull bytes from this source into a provided buffer. A /// number of other methods are implemented in terms of [`read()`], giving /// implementors a number of ways to read bytes while only needing to implement /// a single method. /// /// Readers are intended to be composable with one another. Many implementors /// throughout [`std::io`] take and provide types which implement the `Read` /// trait. /// /// Please note that each call to [`read()`] may involve a system call, and /// therefore, using something that implements [`BufRead`], such as /// [`BufReader`], will be more efficient. /// /// # Examples /// /// [`File`]s implement `Read`: /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = [0; 10]; /// /// // read up to 10 bytes /// f.read(&mut buffer)?; /// /// let mut buffer = Vec::new(); /// // read the whole file /// f.read_to_end(&mut buffer)?; /// /// // read into a String, so that you don't need to do the conversion. /// let mut buffer = String::new(); /// f.read_to_string(&mut buffer)?; /// /// // and more! See the other methods for more details. /// Ok(()) /// } /// ``` /// /// Read from [`&str`] because [`&[u8]`][slice] implements `Read`: /// /// ```no_run /// # use std::io; /// use std::io::prelude::*; /// /// fn main() -> io::Result<()> { /// let mut b = "This string will be read".as_bytes(); /// let mut buffer = [0; 10]; /// /// // read up to 10 bytes /// b.read(&mut buffer)?; /// /// // etc... it works exactly as a File does! /// Ok(()) /// } /// ``` /// /// [`read()`]: trait.Read.html#tymethod.read /// [`std::io`]: ../../std/io/index.html /// [`File`]: ../fs/struct.File.html /// [`BufRead`]: trait.BufRead.html /// [`BufReader`]: struct.BufReader.html /// [`&str`]: ../../std/primitive.str.html /// [slice]: ../../std/primitive.slice.html #[stable(feature = "rust1", since = "1.0.0")] #[doc(spotlight)] pub trait Read { /// Pull some bytes from this source into the specified buffer, returning /// how many bytes were read. /// /// This function does not provide any guarantees about whether it blocks /// waiting for data, but if an object needs to block for a read but cannot /// it will typically signal this via an [`Err`] return value. /// /// If the return value of this method is [`Ok(n)`], then it must be /// guaranteed that `0 <= n <= buf.len()`. A nonzero `n` value indicates /// that the buffer `buf` has been filled in with `n` bytes of data from this /// source. If `n` is `0`, then it can indicate one of two scenarios: /// /// 1. This reader has reached its "end of file" and will likely no longer /// be able to produce bytes. Note that this does not mean that the /// reader will *always* no longer be able to produce bytes. /// 2. The buffer specified was 0 bytes in length. /// /// No guarantees are provided about the contents of `buf` when this /// function is called, implementations cannot rely on any property of the /// contents of `buf` being true. It is recommended that *implementations* /// only write data to `buf` instead of reading its contents. /// /// Correspondingly, however, *callers* of this method may not assume any guarantees /// about how the implementation uses `buf`. The trait is safe to implement, /// so it is possible that the code that's supposed to write to the buffer might also read /// from it. It is your responsibility to make sure that `buf` is initialized /// before calling `read`. Calling `read` with an uninitialized `buf` (of the kind one /// obtains via [`MaybeUninit<T>`]) is not safe, and can lead to undefined behavior. /// /// [`MaybeUninit<T>`]: ../mem/union.MaybeUninit.html /// /// # Errors /// /// If this function encounters any form of I/O or other error, an error /// variant will be returned. If an error is returned then it must be /// guaranteed that no bytes were read. /// /// An error of the [`ErrorKind::Interrupted`] kind is non-fatal and the read /// operation should be retried if there is nothing else to do. /// /// # Examples /// /// [`File`]s implement `Read`: /// /// [`Err`]: ../../std/result/enum.Result.html#variant.Err /// [`Ok(n)`]: ../../std/result/enum.Result.html#variant.Ok /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted /// [`File`]: ../fs/struct.File.html /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = [0; 10]; /// /// // read up to 10 bytes /// let n = f.read(&mut buffer[..])?; /// /// println!("The bytes: {:?}", &buffer[..n]); /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn read(&mut self, buf: &mut [u8]) -> Result<usize>; /// Like `read`, except that it reads into a slice of buffers. /// /// Data is copied to fill each buffer in order, with the final buffer /// written to possibly being only partially filled. This method must behave /// as a single call to `read` with the buffers concatenated would. /// /// The default implementation calls `read` with either the first nonempty /// buffer provided, or an empty one if none exists. #[stable(feature = "iovec", since = "1.36.0")] fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> Result<usize> { default_read_vectored(|b| self.read(b), bufs) } /// Determines if this `Read`er can work with buffers of uninitialized /// memory. /// /// The default implementation returns an initializer which will zero /// buffers. /// /// If a `Read`er guarantees that it can work properly with uninitialized /// memory, it should call [`Initializer::nop()`]. See the documentation for /// [`Initializer`] for details. /// /// The behavior of this method must be independent of the state of the /// `Read`er - the method only takes `&self` so that it can be used through /// trait objects. /// /// # Safety /// /// This method is unsafe because a `Read`er could otherwise return a /// non-zeroing `Initializer` from another `Read` type without an `unsafe` /// block. /// /// [`Initializer::nop()`]: ../../std/io/struct.Initializer.html#method.nop /// [`Initializer`]: ../../std/io/struct.Initializer.html #[unstable(feature = "read_initializer", issue = "42788")] #[inline] unsafe fn initializer(&self) -> Initializer { Initializer::zeroing() } /// Read all bytes until EOF in this source, placing them into `buf`. /// /// All bytes read from this source will be appended to the specified buffer /// `buf`. This function will continuously call [`read()`] to append more data to /// `buf` until [`read()`] returns either [`Ok(0)`] or an error of /// non-[`ErrorKind::Interrupted`] kind. /// /// If successful, this function will return the total number of bytes read. /// /// # Errors /// /// If this function encounters an error of the kind /// [`ErrorKind::Interrupted`] then the error is ignored and the operation /// will continue. /// /// If any other read error is encountered then this function immediately /// returns. Any bytes which have already been read will be appended to /// `buf`. /// /// # Examples /// /// [`File`]s implement `Read`: /// /// [`read()`]: trait.Read.html#tymethod.read /// [`Ok(0)`]: ../../std/result/enum.Result.html#variant.Ok /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted /// [`File`]: ../fs/struct.File.html /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = Vec::new(); /// /// // read the whole file /// f.read_to_end(&mut buffer)?; /// Ok(()) /// } /// ``` /// /// (See also the [`std::fs::read`] convenience function for reading from a /// file.) /// /// [`std::fs::read`]: ../fs/fn.read.html #[stable(feature = "rust1", since = "1.0.0")] fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> { read_to_end(self, buf) } /// Read all bytes until EOF in this source, appending them to `buf`. /// /// If successful, this function returns the number of bytes which were read /// and appended to `buf`. /// /// # Errors /// /// If the data in this stream is *not* valid UTF-8 then an error is /// returned and `buf` is unchanged. /// /// See [`read_to_end`][readtoend] for other error semantics. /// /// [readtoend]: #method.read_to_end /// /// # Examples /// /// [`File`][file]s implement `Read`: /// /// [file]: ../fs/struct.File.html /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = String::new(); /// /// f.read_to_string(&mut buffer)?; /// Ok(()) /// } /// ``` /// /// (See also the [`std::fs::read_to_string`] convenience function for /// reading from a file.) /// /// [`std::fs::read_to_string`]: ../fs/fn.read_to_string.html #[stable(feature = "rust1", since = "1.0.0")] fn read_to_string(&mut self, buf: &mut String) -> Result<usize> { // Note that we do *not* call `.read_to_end()` here. We are passing // `&mut Vec<u8>` (the raw contents of `buf`) into the `read_to_end` // method to fill it up. An arbitrary implementation could overwrite the // entire contents of the vector, not just append to it (which is what // we are expecting). // // To prevent extraneously checking the UTF-8-ness of the entire buffer // we pass it to our hardcoded `read_to_end` implementation which we // know is guaranteed to only read data into the end of the buffer. append_to_string(buf, |b| read_to_end(self, b)) } /// Read the exact number of bytes required to fill `buf`. /// /// This function reads as many bytes as necessary to completely fill the /// specified buffer `buf`. /// /// No guarantees are provided about the contents of `buf` when this /// function is called, implementations cannot rely on any property of the /// contents of `buf` being true. It is recommended that implementations /// only write data to `buf` instead of reading its contents. /// /// # Errors /// /// If this function encounters an error of the kind /// [`ErrorKind::Interrupted`] then the error is ignored and the operation /// will continue. /// /// If this function encounters an "end of file" before completely filling /// the buffer, it returns an error of the kind [`ErrorKind::UnexpectedEof`]. /// The contents of `buf` are unspecified in this case. /// /// If any other read error is encountered then this function immediately /// returns. The contents of `buf` are unspecified in this case. /// /// If this function returns an error, it is unspecified how many bytes it /// has read, but it will never read more than would be necessary to /// completely fill the buffer. /// /// # Examples /// /// [`File`]s implement `Read`: /// /// [`File`]: ../fs/struct.File.html /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted /// [`ErrorKind::UnexpectedEof`]: ../../std/io/enum.ErrorKind.html#variant.UnexpectedEof /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = [0; 10]; /// /// // read exactly 10 bytes /// f.read_exact(&mut buffer)?; /// Ok(()) /// } /// ``` #[stable(feature = "read_exact", since = "1.6.0")] fn read_exact(&mut self, mut buf: &mut [u8]) -> Result<()> { while !buf.is_empty() { match self.read(buf) { Ok(0) => break, Ok(n) => { let tmp = buf; buf = &mut tmp[n..]; } Err(ref e) if e.kind() == ErrorKind::Interrupted => {} Err(e) => return Err(e), } } if !buf.is_empty() { Err(Error::new(ErrorKind::UnexpectedEof, "failed to fill whole buffer")) } else { Ok(()) } } /// Creates a "by reference" adaptor for this instance of `Read`. /// /// The returned adaptor also implements `Read` and will simply borrow this /// current reader. /// /// # Examples /// /// [`File`][file]s implement `Read`: /// /// [file]: ../fs/struct.File.html /// /// ```no_run /// use std::io; /// use std::io::Read; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = Vec::new(); /// let mut other_buffer = Vec::new(); /// /// { /// let reference = f.by_ref(); /// /// // read at most 5 bytes /// reference.take(5).read_to_end(&mut buffer)?; /// /// } // drop our &mut reference so we can use f again /// /// // original file still usable, read the rest /// f.read_to_end(&mut other_buffer)?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn by_ref(&mut self) -> &mut Self where Self: Sized, { self } /// Transforms this `Read` instance to an [`Iterator`] over its bytes. /// /// The returned type implements [`Iterator`] where the `Item` is /// [`Result`]`<`[`u8`]`, `[`io::Error`]`>`. /// The yielded item is [`Ok`] if a byte was successfully read and [`Err`] /// otherwise. EOF is mapped to returning [`None`] from this iterator. /// /// # Examples /// /// [`File`][file]s implement `Read`: /// /// [file]: ../fs/struct.File.html /// [`Iterator`]: ../../std/iter/trait.Iterator.html /// [`Result`]: ../../std/result/enum.Result.html /// [`io::Error`]: ../../std/io/struct.Error.html /// [`u8`]: ../../std/primitive.u8.html /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok /// [`Err`]: ../../std/result/enum.Result.html#variant.Err /// [`None`]: ../../std/option/enum.Option.html#variant.None /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// /// for byte in f.bytes() { /// println!("{}", byte.unwrap()); /// } /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn bytes(self) -> Bytes<Self> where Self: Sized, { Bytes { inner: self } } /// Creates an adaptor which will chain this stream with another. /// /// The returned `Read` instance will first read all bytes from this object /// until EOF is encountered. Afterwards the output is equivalent to the /// output of `next`. /// /// # Examples /// /// [`File`][file]s implement `Read`: /// /// [file]: ../fs/struct.File.html /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f1 = File::open("foo.txt")?; /// let mut f2 = File::open("bar.txt")?; /// /// let mut handle = f1.chain(f2); /// let mut buffer = String::new(); /// /// // read the value into a String. We could use any Read method here, /// // this is just one example. /// handle.read_to_string(&mut buffer)?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn chain<R: Read>(self, next: R) -> Chain<Self, R> where Self: Sized, { Chain { first: self, second: next, done_first: false } } /// Creates an adaptor which will read at most `limit` bytes from it. /// /// This function returns a new instance of `Read` which will read at most /// `limit` bytes, after which it will always return EOF ([`Ok(0)`]). Any /// read errors will not count towards the number of bytes read and future /// calls to [`read()`] may succeed. /// /// # Examples /// /// [`File`]s implement `Read`: /// /// [`File`]: ../fs/struct.File.html /// [`Ok(0)`]: ../../std/result/enum.Result.html#variant.Ok /// [`read()`]: trait.Read.html#tymethod.read /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// let mut buffer = [0; 5]; /// /// // read at most five bytes /// let mut handle = f.take(5); /// /// handle.read(&mut buffer)?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn take(self, limit: u64) -> Take<Self> where Self: Sized, { Take { inner: self, limit } } } /// A buffer type used with `Read::read_vectored`. /// /// It is semantically a wrapper around an `&mut [u8]`, but is guaranteed to be /// ABI compatible with the `iovec` type on Unix platforms and `WSABUF` on /// Windows. #[stable(feature = "iovec", since = "1.36.0")] #[repr(transparent)] pub struct IoSliceMut<'a>(sys::io::IoSliceMut<'a>); #[stable(feature = "iovec", since = "1.36.0")] impl<'a> fmt::Debug for IoSliceMut<'a> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self.0.as_slice(), fmt) } } impl<'a> IoSliceMut<'a> { /// Creates a new `IoSliceMut` wrapping a byte slice. /// /// # Panics /// /// Panics on Windows if the slice is larger than 4GB. #[stable(feature = "iovec", since = "1.36.0")] #[inline] pub fn new(buf: &'a mut [u8]) -> IoSliceMut<'a> { IoSliceMut(sys::io::IoSliceMut::new(buf)) } /// Advance the internal cursor of the slice. /// /// # Notes /// /// Elements in the slice may be modified if the cursor is not advanced to /// the end of the slice. For example if we have a slice of buffers with 2 /// `IoSliceMut`s, both of length 8, and we advance the cursor by 10 bytes /// the first `IoSliceMut` will be untouched however the second will be /// modified to remove the first 2 bytes (10 - 8). /// /// # Examples /// /// ``` /// #![feature(io_slice_advance)] /// /// use std::io::IoSliceMut; /// use std::ops::Deref; /// /// let mut buf1 = [1; 8]; /// let mut buf2 = [2; 16]; /// let mut buf3 = [3; 8]; /// let mut bufs = &mut [ /// IoSliceMut::new(&mut buf1), /// IoSliceMut::new(&mut buf2), /// IoSliceMut::new(&mut buf3), /// ][..]; /// /// // Mark 10 bytes as read. /// bufs = IoSliceMut::advance(bufs, 10); /// assert_eq!(bufs[0].deref(), [2; 14].as_ref()); /// assert_eq!(bufs[1].deref(), [3; 8].as_ref()); /// ``` #[unstable(feature = "io_slice_advance", issue = "62726")] #[inline] pub fn advance<'b>(bufs: &'b mut [IoSliceMut<'a>], n: usize) -> &'b mut [IoSliceMut<'a>] { // Number of buffers to remove. let mut remove = 0; // Total length of all the to be removed buffers. let mut accumulated_len = 0; for buf in bufs.iter() { if accumulated_len + buf.len() > n { break; } else { accumulated_len += buf.len(); remove += 1; } } let bufs = &mut bufs[remove..]; if !bufs.is_empty() { bufs[0].0.advance(n - accumulated_len) } bufs } } #[stable(feature = "iovec", since = "1.36.0")] impl<'a> Deref for IoSliceMut<'a> { type Target = [u8]; #[inline] fn deref(&self) -> &[u8] { self.0.as_slice() } } #[stable(feature = "iovec", since = "1.36.0")] impl<'a> DerefMut for IoSliceMut<'a> { #[inline] fn deref_mut(&mut self) -> &mut [u8] { self.0.as_mut_slice() } } /// A buffer type used with `Write::write_vectored`. /// /// It is semantically a wrapper around an `&[u8]`, but is guaranteed to be /// ABI compatible with the `iovec` type on Unix platforms and `WSABUF` on /// Windows. #[stable(feature = "iovec", since = "1.36.0")] #[repr(transparent)] pub struct IoSlice<'a>(sys::io::IoSlice<'a>); #[stable(feature = "iovec", since = "1.36.0")] impl<'a> fmt::Debug for IoSlice<'a> { fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(self.0.as_slice(), fmt) } } impl<'a> IoSlice<'a> { /// Creates a new `IoSlice` wrapping a byte slice. /// /// # Panics /// /// Panics on Windows if the slice is larger than 4GB. #[stable(feature = "iovec", since = "1.36.0")] #[inline] pub fn new(buf: &'a [u8]) -> IoSlice<'a> { IoSlice(sys::io::IoSlice::new(buf)) } /// Advance the internal cursor of the slice. /// /// # Notes /// /// Elements in the slice may be modified if the cursor is not advanced to /// the end of the slice. For example if we have a slice of buffers with 2 /// `IoSlice`s, both of length 8, and we advance the cursor by 10 bytes the /// first `IoSlice` will be untouched however the second will be modified to /// remove the first 2 bytes (10 - 8). /// /// # Examples /// /// ``` /// #![feature(io_slice_advance)] /// /// use std::io::IoSlice; /// use std::ops::Deref; /// /// let buf1 = [1; 8]; /// let buf2 = [2; 16]; /// let buf3 = [3; 8]; /// let mut bufs = &mut [ /// IoSlice::new(&buf1), /// IoSlice::new(&buf2), /// IoSlice::new(&buf3), /// ][..]; /// /// // Mark 10 bytes as written. /// bufs = IoSlice::advance(bufs, 10); /// assert_eq!(bufs[0].deref(), [2; 14].as_ref()); /// assert_eq!(bufs[1].deref(), [3; 8].as_ref()); #[unstable(feature = "io_slice_advance", issue = "62726")] #[inline] pub fn advance<'b>(bufs: &'b mut [IoSlice<'a>], n: usize) -> &'b mut [IoSlice<'a>] { // Number of buffers to remove. let mut remove = 0; // Total length of all the to be removed buffers. let mut accumulated_len = 0; for buf in bufs.iter() { if accumulated_len + buf.len() > n { break; } else { accumulated_len += buf.len(); remove += 1; } } let bufs = &mut bufs[remove..]; if !bufs.is_empty() { bufs[0].0.advance(n - accumulated_len) } bufs } } #[stable(feature = "iovec", since = "1.36.0")] impl<'a> Deref for IoSlice<'a> { type Target = [u8]; #[inline] fn deref(&self) -> &[u8] { self.0.as_slice() } } /// A type used to conditionally initialize buffers passed to `Read` methods. #[unstable(feature = "read_initializer", issue = "42788")] #[derive(Debug)] pub struct Initializer(bool); impl Initializer { /// Returns a new `Initializer` which will zero out buffers. #[unstable(feature = "read_initializer", issue = "42788")] #[inline] pub fn zeroing() -> Initializer { Initializer(true) } /// Returns a new `Initializer` which will not zero out buffers. /// /// # Safety /// /// This may only be called by `Read`ers which guarantee that they will not /// read from buffers passed to `Read` methods, and that the return value of /// the method accurately reflects the number of bytes that have been /// written to the head of the buffer. #[unstable(feature = "read_initializer", issue = "42788")] #[inline] pub unsafe fn nop() -> Initializer { Initializer(false) } /// Indicates if a buffer should be initialized. #[unstable(feature = "read_initializer", issue = "42788")] #[inline] pub fn should_initialize(&self) -> bool { self.0 } /// Initializes a buffer if necessary. #[unstable(feature = "read_initializer", issue = "42788")] #[inline] pub fn initialize(&self, buf: &mut [u8]) { if self.should_initialize() { unsafe { ptr::write_bytes(buf.as_mut_ptr(), 0, buf.len()) } } } } /// A trait for objects which are byte-oriented sinks. /// /// Implementors of the `Write` trait are sometimes called 'writers'. /// /// Writers are defined by two required methods, [`write`] and [`flush`]: /// /// * The [`write`] method will attempt to write some data into the object, /// returning how many bytes were successfully written. /// /// * The [`flush`] method is useful for adaptors and explicit buffers /// themselves for ensuring that all buffered data has been pushed out to the /// 'true sink'. /// /// Writers are intended to be composable with one another. Many implementors /// throughout [`std::io`] take and provide types which implement the `Write` /// trait. /// /// [`write`]: #tymethod.write /// [`flush`]: #tymethod.flush /// [`std::io`]: index.html /// /// # Examples /// /// ```no_run /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { /// let data = b"some bytes"; /// /// let mut pos = 0; /// let mut buffer = File::create("foo.txt")?; /// /// while pos < data.len() { /// let bytes_written = buffer.write(&data[pos..])?; /// pos += bytes_written; /// } /// Ok(()) /// } /// ``` /// /// The trait also provides convenience methods like [`write_all`], which calls /// `write` in a loop until its entire input has been written. /// /// [`write_all`]: #method.write_all #[stable(feature = "rust1", since = "1.0.0")] #[doc(spotlight)] pub trait Write { /// Write a buffer into this writer, returning how many bytes were written. /// /// This function will attempt to write the entire contents of `buf`, but /// the entire write may not succeed, or the write may also generate an /// error. A call to `write` represents *at most one* attempt to write to /// any wrapped object. /// /// Calls to `write` are not guaranteed to block waiting for data to be /// written, and a write which would otherwise block can be indicated through /// an [`Err`] variant. /// /// If the return value is [`Ok(n)`] then it must be guaranteed that /// `n <= buf.len()`. A return value of `0` typically means that the /// underlying object is no longer able to accept bytes and will likely not /// be able to in the future as well, or that the buffer provided is empty. /// /// # Errors /// /// Each call to `write` may generate an I/O error indicating that the /// operation could not be completed. If an error is returned then no bytes /// in the buffer were written to this writer. /// /// It is **not** considered an error if the entire buffer could not be /// written to this writer. /// /// An error of the [`ErrorKind::Interrupted`] kind is non-fatal and the /// write operation should be retried if there is nothing else to do. /// /// [`Err`]: ../../std/result/enum.Result.html#variant.Err /// [`Ok(n)`]: ../../std/result/enum.Result.html#variant.Ok /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted /// /// # Examples /// /// ```no_run /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { /// let mut buffer = File::create("foo.txt")?; /// /// // Writes some prefix of the byte string, not necessarily all of it. /// buffer.write(b"some bytes")?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn write(&mut self, buf: &[u8]) -> Result<usize>; /// Like `write`, except that it writes from a slice of buffers. /// /// Data is copied from each buffer in order, with the final buffer /// read from possibly being only partially consumed. This method must /// behave as a call to `write` with the buffers concatenated would. /// /// The default implementation calls `write` with either the first nonempty /// buffer provided, or an empty one if none exists. #[stable(feature = "iovec", since = "1.36.0")] fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> Result<usize> { default_write_vectored(|b| self.write(b), bufs) } /// Flush this output stream, ensuring that all intermediately buffered /// contents reach their destination. /// /// # Errors /// /// It is considered an error if not all bytes could be written due to /// I/O errors or EOF being reached. /// /// # Examples /// /// ```no_run /// use std::io::prelude::*; /// use std::io::BufWriter; /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { /// let mut buffer = BufWriter::new(File::create("foo.txt")?); /// /// buffer.write_all(b"some bytes")?; /// buffer.flush()?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn flush(&mut self) -> Result<()>; /// Attempts to write an entire buffer into this writer. /// /// This method will continuously call [`write`] until there is no more data /// to be written or an error of non-[`ErrorKind::Interrupted`] kind is /// returned. This method will not return until the entire buffer has been /// successfully written or such an error occurs. The first error that is /// not of [`ErrorKind::Interrupted`] kind generated from this method will be /// returned. /// /// If the buffer contains no data, this will never call [`write`]. /// /// # Errors /// /// This function will return the first error of /// non-[`ErrorKind::Interrupted`] kind that [`write`] returns. /// /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted /// [`write`]: #tymethod.write /// /// # Examples /// /// ```no_run /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { /// let mut buffer = File::create("foo.txt")?; /// /// buffer.write_all(b"some bytes")?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn write_all(&mut self, mut buf: &[u8]) -> Result<()> { while !buf.is_empty() { match self.write(buf) { Ok(0) => { return Err(Error::new(ErrorKind::WriteZero, "failed to write whole buffer")); } Ok(n) => buf = &buf[n..], Err(ref e) if e.kind() == ErrorKind::Interrupted => {} Err(e) => return Err(e), } } Ok(()) } /// Writes a formatted string into this writer, returning any error /// encountered. /// /// This method is primarily used to interface with the /// [`format_args!`][formatargs] macro, but it is rare that this should /// explicitly be called. The [`write!`][write] macro should be favored to /// invoke this method instead. /// /// [formatargs]: ../macro.format_args.html /// [write]: ../macro.write.html /// /// This function internally uses the [`write_all`][writeall] method on /// this trait and hence will continuously write data so long as no errors /// are received. This also means that partial writes are not indicated in /// this signature. /// /// [writeall]: #method.write_all /// /// # Errors /// /// This function will return any I/O error reported while formatting. /// /// # Examples /// /// ```no_run /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { /// let mut buffer = File::create("foo.txt")?; /// /// // this call /// write!(buffer, "{:.*}", 2, 1.234567)?; /// // turns into this: /// buffer.write_fmt(format_args!("{:.*}", 2, 1.234567))?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> Result<()> { // Create a shim which translates a Write to a fmt::Write and saves // off I/O errors. instead of discarding them struct Adaptor<'a, T: ?Sized + 'a> { inner: &'a mut T, error: Result<()>, } impl<T: Write + ?Sized> fmt::Write for Adaptor<'_, T> { fn write_str(&mut self, s: &str) -> fmt::Result { match self.inner.write_all(s.as_bytes()) { Ok(()) => Ok(()), Err(e) => { self.error = Err(e); Err(fmt::Error) } } } } let mut output = Adaptor { inner: self, error: Ok(()) }; match fmt::write(&mut output, fmt) { Ok(()) => Ok(()), Err(..) => { // check if the error came from the underlying `Write` or not if output.error.is_err() { output.error } else { Err(Error::new(ErrorKind::Other, "formatter error")) } } } } /// Creates a "by reference" adaptor for this instance of `Write`. /// /// The returned adaptor also implements `Write` and will simply borrow this /// current writer. /// /// # Examples /// /// ```no_run /// use std::io::Write; /// use std::fs::File; /// /// fn main() -> std::io::Result<()> { /// let mut buffer = File::create("foo.txt")?; /// /// let reference = buffer.by_ref(); /// /// // we can use reference just like our original buffer /// reference.write_all(b"some bytes")?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn by_ref(&mut self) -> &mut Self where Self: Sized, { self } } /// The `Seek` trait provides a cursor which can be moved within a stream of /// bytes. /// /// The stream typically has a fixed size, allowing seeking relative to either /// end or the current offset. /// /// # Examples /// /// [`File`][file]s implement `Seek`: /// /// [file]: ../fs/struct.File.html /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// use std::io::SeekFrom; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// /// // move the cursor 42 bytes from the start of the file /// f.seek(SeekFrom::Start(42))?; /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub trait Seek { /// Seek to an offset, in bytes, in a stream. /// /// A seek beyond the end of a stream is allowed, but behavior is defined /// by the implementation. /// /// If the seek operation completed successfully, /// this method returns the new position from the start of the stream. /// That position can be used later with [`SeekFrom::Start`]. /// /// # Errors /// /// Seeking to a negative offset is considered an error. /// /// [`SeekFrom::Start`]: enum.SeekFrom.html#variant.Start #[stable(feature = "rust1", since = "1.0.0")] fn seek(&mut self, pos: SeekFrom) -> Result<u64>; /// Returns the length of this stream (in bytes). /// /// This method is implemented using up to three seek operations. If this /// method returns successfully, the seek position is unchanged (i.e. the /// position before calling this method is the same as afterwards). /// However, if this method returns an error, the seek position is /// unspecified. /// /// If you need to obtain the length of *many* streams and you don't care /// about the seek position afterwards, you can reduce the number of seek /// operations by simply calling `seek(SeekFrom::End(0))` and using its /// return value (it is also the stream length). /// /// Note that length of a stream can change over time (for example, when /// data is appended to a file). So calling this method multiple times does /// not necessarily return the same length each time. /// /// /// # Example /// /// ```no_run /// #![feature(seek_convenience)] /// use std::{ /// io::{self, Seek}, /// fs::File, /// }; /// /// fn main() -> io::Result<()> { /// let mut f = File::open("foo.txt")?; /// /// let len = f.stream_len()?; /// println!("The file is currently {} bytes long", len); /// Ok(()) /// } /// ``` #[unstable(feature = "seek_convenience", issue = "59359")] fn stream_len(&mut self) -> Result<u64> { let old_pos = self.stream_position()?; let len = self.seek(SeekFrom::End(0))?; // Avoid seeking a third time when we were already at the end of the // stream. The branch is usually way cheaper than a seek operation. if old_pos != len { self.seek(SeekFrom::Start(old_pos))?; } Ok(len) } /// Returns the current seek position from the start of the stream. /// /// This is equivalent to `self.seek(SeekFrom::Current(0))`. /// /// /// # Example /// /// ```no_run /// #![feature(seek_convenience)] /// use std::{ /// io::{self, BufRead, BufReader, Seek}, /// fs::File, /// }; /// /// fn main() -> io::Result<()> { /// let mut f = BufReader::new(File::open("foo.txt")?); /// /// let before = f.stream_position()?; /// f.read_line(&mut String::new())?; /// let after = f.stream_position()?; /// /// println!("The first line was {} bytes long", after - before); /// Ok(()) /// } /// ``` #[unstable(feature = "seek_convenience", issue = "59359")] fn stream_position(&mut self) -> Result<u64> { self.seek(SeekFrom::Current(0)) } } /// Enumeration of possible methods to seek within an I/O object. /// /// It is used by the [`Seek`] trait. /// /// [`Seek`]: trait.Seek.html #[derive(Copy, PartialEq, Eq, Clone, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub enum SeekFrom { /// Sets the offset to the provided number of bytes. #[stable(feature = "rust1", since = "1.0.0")] Start(#[stable(feature = "rust1", since = "1.0.0")] u64), /// Sets the offset to the size of this object plus the specified number of /// bytes. /// /// It is possible to seek beyond the end of an object, but it's an error to /// seek before byte 0. #[stable(feature = "rust1", since = "1.0.0")] End(#[stable(feature = "rust1", since = "1.0.0")] i64), /// Sets the offset to the current position plus the specified number of /// bytes. /// /// It is possible to seek beyond the end of an object, but it's an error to /// seek before byte 0. #[stable(feature = "rust1", since = "1.0.0")] Current(#[stable(feature = "rust1", since = "1.0.0")] i64), } fn read_until<R: BufRead + ?Sized>(r: &mut R, delim: u8, buf: &mut Vec<u8>) -> Result<usize> { let mut read = 0; loop { let (done, used) = { let available = match r.fill_buf() { Ok(n) => n, Err(ref e) if e.kind() == ErrorKind::Interrupted => continue, Err(e) => return Err(e), }; match memchr::memchr(delim, available) { Some(i) => { buf.extend_from_slice(&available[..=i]); (true, i + 1) } None => { buf.extend_from_slice(available); (false, available.len()) } } }; r.consume(used); read += used; if done || used == 0 { return Ok(read); } } } /// A `BufRead` is a type of `Read`er which has an internal buffer, allowing it /// to perform extra ways of reading. /// /// For example, reading line-by-line is inefficient without using a buffer, so /// if you want to read by line, you'll need `BufRead`, which includes a /// [`read_line`] method as well as a [`lines`] iterator. /// /// # Examples /// /// A locked standard input implements `BufRead`: /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// /// let stdin = io::stdin(); /// for line in stdin.lock().lines() { /// println!("{}", line.unwrap()); /// } /// ``` /// /// If you have something that implements [`Read`], you can use the [`BufReader` /// type][`BufReader`] to turn it into a `BufRead`. /// /// For example, [`File`] implements [`Read`], but not `BufRead`. /// [`BufReader`] to the rescue! /// /// [`BufReader`]: struct.BufReader.html /// [`File`]: ../fs/struct.File.html /// [`read_line`]: #method.read_line /// [`lines`]: #method.lines /// [`Read`]: trait.Read.html /// /// ```no_run /// use std::io::{self, BufReader}; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let f = File::open("foo.txt")?; /// let f = BufReader::new(f); /// /// for line in f.lines() { /// println!("{}", line.unwrap()); /// } /// /// Ok(()) /// } /// ``` /// #[stable(feature = "rust1", since = "1.0.0")] pub trait BufRead: Read { /// Returns the contents of the internal buffer, filling it with more data /// from the inner reader if it is empty. /// /// This function is a lower-level call. It needs to be paired with the /// [`consume`] method to function properly. When calling this /// method, none of the contents will be "read" in the sense that later /// calling `read` may return the same contents. As such, [`consume`] must /// be called with the number of bytes that are consumed from this buffer to /// ensure that the bytes are never returned twice. /// /// [`consume`]: #tymethod.consume /// /// An empty buffer returned indicates that the stream has reached EOF. /// /// # Errors /// /// This function will return an I/O error if the underlying reader was /// read, but returned an error. /// /// # Examples /// /// A locked standard input implements `BufRead`: /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// /// let stdin = io::stdin(); /// let mut stdin = stdin.lock(); /// /// let buffer = stdin.fill_buf().unwrap(); /// /// // work with buffer /// println!("{:?}", buffer); /// /// // ensure the bytes we worked with aren't returned again later /// let length = buffer.len(); /// stdin.consume(length); /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn fill_buf(&mut self) -> Result<&[u8]>; /// Tells this buffer that `amt` bytes have been consumed from the buffer, /// so they should no longer be returned in calls to `read`. /// /// This function is a lower-level call. It needs to be paired with the /// [`fill_buf`] method to function properly. This function does /// not perform any I/O, it simply informs this object that some amount of /// its buffer, returned from [`fill_buf`], has been consumed and should /// no longer be returned. As such, this function may do odd things if /// [`fill_buf`] isn't called before calling it. /// /// The `amt` must be `<=` the number of bytes in the buffer returned by /// [`fill_buf`]. /// /// # Examples /// /// Since `consume()` is meant to be used with [`fill_buf`], /// that method's example includes an example of `consume()`. /// /// [`fill_buf`]: #tymethod.fill_buf #[stable(feature = "rust1", since = "1.0.0")] fn consume(&mut self, amt: usize); /// Read all bytes into `buf` until the delimiter `byte` or EOF is reached. /// /// This function will read bytes from the underlying stream until the /// delimiter or EOF is found. Once found, all bytes up to, and including, /// the delimiter (if found) will be appended to `buf`. /// /// If successful, this function will return the total number of bytes read. /// /// # Errors /// /// This function will ignore all instances of [`ErrorKind::Interrupted`] and /// will otherwise return any errors returned by [`fill_buf`]. /// /// If an I/O error is encountered then all bytes read so far will be /// present in `buf` and its length will have been adjusted appropriately. /// /// [`fill_buf`]: #tymethod.fill_buf /// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted /// /// # Examples /// /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In /// this example, we use [`Cursor`] to read all the bytes in a byte slice /// in hyphen delimited segments: /// /// [`Cursor`]: struct.Cursor.html /// /// ``` /// use std::io::{self, BufRead}; /// /// let mut cursor = io::Cursor::new(b"lorem-ipsum"); /// let mut buf = vec![]; /// /// // cursor is at 'l' /// let num_bytes = cursor.read_until(b'-', &mut buf) /// .expect("reading from cursor won't fail"); /// assert_eq!(num_bytes, 6); /// assert_eq!(buf, b"lorem-"); /// buf.clear(); /// /// // cursor is at 'i' /// let num_bytes = cursor.read_until(b'-', &mut buf) /// .expect("reading from cursor won't fail"); /// assert_eq!(num_bytes, 5); /// assert_eq!(buf, b"ipsum"); /// buf.clear(); /// /// // cursor is at EOF /// let num_bytes = cursor.read_until(b'-', &mut buf) /// .expect("reading from cursor won't fail"); /// assert_eq!(num_bytes, 0); /// assert_eq!(buf, b""); /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> Result<usize> { read_until(self, byte, buf) } /// Read all bytes until a newline (the 0xA byte) is reached, and append /// them to the provided buffer. /// /// This function will read bytes from the underlying stream until the /// newline delimiter (the 0xA byte) or EOF is found. Once found, all bytes /// up to, and including, the delimiter (if found) will be appended to /// `buf`. /// /// If successful, this function will return the total number of bytes read. /// /// If this function returns `Ok(0)`, the stream has reached EOF. /// /// # Errors /// /// This function has the same error semantics as [`read_until`] and will /// also return an error if the read bytes are not valid UTF-8. If an I/O /// error is encountered then `buf` may contain some bytes already read in /// the event that all data read so far was valid UTF-8. /// /// [`read_until`]: #method.read_until /// /// # Examples /// /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In /// this example, we use [`Cursor`] to read all the lines in a byte slice: /// /// [`Cursor`]: struct.Cursor.html /// /// ``` /// use std::io::{self, BufRead}; /// /// let mut cursor = io::Cursor::new(b"foo\nbar"); /// let mut buf = String::new(); /// /// // cursor is at 'f' /// let num_bytes = cursor.read_line(&mut buf) /// .expect("reading from cursor won't fail"); /// assert_eq!(num_bytes, 4); /// assert_eq!(buf, "foo\n"); /// buf.clear(); /// /// // cursor is at 'b' /// let num_bytes = cursor.read_line(&mut buf) /// .expect("reading from cursor won't fail"); /// assert_eq!(num_bytes, 3); /// assert_eq!(buf, "bar"); /// buf.clear(); /// /// // cursor is at EOF /// let num_bytes = cursor.read_line(&mut buf) /// .expect("reading from cursor won't fail"); /// assert_eq!(num_bytes, 0); /// assert_eq!(buf, ""); /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn read_line(&mut self, buf: &mut String) -> Result<usize> { // Note that we are not calling the `.read_until` method here, but // rather our hardcoded implementation. For more details as to why, see // the comments in `read_to_end`. append_to_string(buf, |b| read_until(self, b'\n', b)) } /// Returns an iterator over the contents of this reader split on the byte /// `byte`. /// /// The iterator returned from this function will return instances of /// [`io::Result`]`<`[`Vec<u8>`]`>`. Each vector returned will *not* have /// the delimiter byte at the end. /// /// This function will yield errors whenever [`read_until`] would have /// also yielded an error. /// /// [`io::Result`]: type.Result.html /// [`Vec<u8>`]: ../vec/struct.Vec.html /// [`read_until`]: #method.read_until /// /// # Examples /// /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In /// this example, we use [`Cursor`] to iterate over all hyphen delimited /// segments in a byte slice /// /// [`Cursor`]: struct.Cursor.html /// /// ``` /// use std::io::{self, BufRead}; /// /// let cursor = io::Cursor::new(b"lorem-ipsum-dolor"); /// /// let mut split_iter = cursor.split(b'-').map(|l| l.unwrap()); /// assert_eq!(split_iter.next(), Some(b"lorem".to_vec())); /// assert_eq!(split_iter.next(), Some(b"ipsum".to_vec())); /// assert_eq!(split_iter.next(), Some(b"dolor".to_vec())); /// assert_eq!(split_iter.next(), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn split(self, byte: u8) -> Split<Self> where Self: Sized, { Split { buf: self, delim: byte } } /// Returns an iterator over the lines of this reader. /// /// The iterator returned from this function will yield instances of /// [`io::Result`]`<`[`String`]`>`. Each string returned will *not* have a newline /// byte (the 0xA byte) or CRLF (0xD, 0xA bytes) at the end. /// /// [`io::Result`]: type.Result.html /// [`String`]: ../string/struct.String.html /// /// # Examples /// /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In /// this example, we use [`Cursor`] to iterate over all the lines in a byte /// slice. /// /// [`Cursor`]: struct.Cursor.html /// /// ``` /// use std::io::{self, BufRead}; /// /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor"); /// /// let mut lines_iter = cursor.lines().map(|l| l.unwrap()); /// assert_eq!(lines_iter.next(), Some(String::from("lorem"))); /// assert_eq!(lines_iter.next(), Some(String::from("ipsum"))); /// assert_eq!(lines_iter.next(), Some(String::from("dolor"))); /// assert_eq!(lines_iter.next(), None); /// ``` /// /// # Errors /// /// Each line of the iterator has the same error semantics as [`BufRead::read_line`]. /// /// [`BufRead::read_line`]: trait.BufRead.html#method.read_line #[stable(feature = "rust1", since = "1.0.0")] fn lines(self) -> Lines<Self> where Self: Sized, { Lines { buf: self } } } /// Adaptor to chain together two readers. /// /// This struct is generally created by calling [`chain`] on a reader. /// Please see the documentation of [`chain`] for more details. /// /// [`chain`]: trait.Read.html#method.chain #[stable(feature = "rust1", since = "1.0.0")] pub struct Chain<T, U> { first: T, second: U, done_first: bool, } impl<T, U> Chain<T, U> { /// Consumes the `Chain`, returning the wrapped readers. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut foo_file = File::open("foo.txt")?; /// let mut bar_file = File::open("bar.txt")?; /// /// let chain = foo_file.chain(bar_file); /// let (foo_file, bar_file) = chain.into_inner(); /// Ok(()) /// } /// ``` #[stable(feature = "more_io_inner_methods", since = "1.20.0")] pub fn into_inner(self) -> (T, U) { (self.first, self.second) } /// Gets references to the underlying readers in this `Chain`. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut foo_file = File::open("foo.txt")?; /// let mut bar_file = File::open("bar.txt")?; /// /// let chain = foo_file.chain(bar_file); /// let (foo_file, bar_file) = chain.get_ref(); /// Ok(()) /// } /// ``` #[stable(feature = "more_io_inner_methods", since = "1.20.0")] pub fn get_ref(&self) -> (&T, &U) { (&self.first, &self.second) } /// Gets mutable references to the underlying readers in this `Chain`. /// /// Care should be taken to avoid modifying the internal I/O state of the /// underlying readers as doing so may corrupt the internal state of this /// `Chain`. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut foo_file = File::open("foo.txt")?; /// let mut bar_file = File::open("bar.txt")?; /// /// let mut chain = foo_file.chain(bar_file); /// let (foo_file, bar_file) = chain.get_mut(); /// Ok(()) /// } /// ``` #[stable(feature = "more_io_inner_methods", since = "1.20.0")] pub fn get_mut(&mut self) -> (&mut T, &mut U) { (&mut self.first, &mut self.second) } } #[stable(feature = "std_debug", since = "1.16.0")] impl<T: fmt::Debug, U: fmt::Debug> fmt::Debug for Chain<T, U> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Chain").field("t", &self.first).field("u", &self.second).finish() } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Read, U: Read> Read for Chain<T, U> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { if !self.done_first { match self.first.read(buf)? { 0 if !buf.is_empty() => self.done_first = true, n => return Ok(n), } } self.second.read(buf) } fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> Result<usize> { if !self.done_first { match self.first.read_vectored(bufs)? { 0 if bufs.iter().any(|b| !b.is_empty()) => self.done_first = true, n => return Ok(n), } } self.second.read_vectored(bufs) } unsafe fn initializer(&self) -> Initializer { let initializer = self.first.initializer(); if initializer.should_initialize() { initializer } else { self.second.initializer() } } } #[stable(feature = "chain_bufread", since = "1.9.0")] impl<T: BufRead, U: BufRead> BufRead for Chain<T, U> { fn fill_buf(&mut self) -> Result<&[u8]> { if !self.done_first { match self.first.fill_buf()? { buf if buf.is_empty() => { self.done_first = true; } buf => return Ok(buf), } } self.second.fill_buf() } fn consume(&mut self, amt: usize) { if !self.done_first { self.first.consume(amt) } else { self.second.consume(amt) } } } /// Reader adaptor which limits the bytes read from an underlying reader. /// /// This struct is generally created by calling [`take`] on a reader. /// Please see the documentation of [`take`] for more details. /// /// [`take`]: trait.Read.html#method.take #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct Take<T> { inner: T, limit: u64, } impl<T> Take<T> { /// Returns the number of bytes that can be read before this instance will /// return EOF. /// /// # Note /// /// This instance may reach `EOF` after reading fewer bytes than indicated by /// this method if the underlying [`Read`] instance reaches EOF. /// /// [`Read`]: ../../std/io/trait.Read.html /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let f = File::open("foo.txt")?; /// /// // read at most five bytes /// let handle = f.take(5); /// /// println!("limit: {}", handle.limit()); /// Ok(()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn limit(&self) -> u64 { self.limit } /// Sets the number of bytes that can be read before this instance will /// return EOF. This is the same as constructing a new `Take` instance, so /// the amount of bytes read and the previous limit value don't matter when /// calling this method. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let f = File::open("foo.txt")?; /// /// // read at most five bytes /// let mut handle = f.take(5); /// handle.set_limit(10); /// /// assert_eq!(handle.limit(), 10); /// Ok(()) /// } /// ``` #[stable(feature = "take_set_limit", since = "1.27.0")] pub fn set_limit(&mut self, limit: u64) { self.limit = limit; } /// Consumes the `Take`, returning the wrapped reader. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut file = File::open("foo.txt")?; /// /// let mut buffer = [0; 5]; /// let mut handle = file.take(5); /// handle.read(&mut buffer)?; /// /// let file = handle.into_inner(); /// Ok(()) /// } /// ``` #[stable(feature = "io_take_into_inner", since = "1.15.0")] pub fn into_inner(self) -> T { self.inner } /// Gets a reference to the underlying reader. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut file = File::open("foo.txt")?; /// /// let mut buffer = [0; 5]; /// let mut handle = file.take(5); /// handle.read(&mut buffer)?; /// /// let file = handle.get_ref(); /// Ok(()) /// } /// ``` #[stable(feature = "more_io_inner_methods", since = "1.20.0")] pub fn get_ref(&self) -> &T { &self.inner } /// Gets a mutable reference to the underlying reader. /// /// Care should be taken to avoid modifying the internal I/O state of the /// underlying reader as doing so may corrupt the internal limit of this /// `Take`. /// /// # Examples /// /// ```no_run /// use std::io; /// use std::io::prelude::*; /// use std::fs::File; /// /// fn main() -> io::Result<()> { /// let mut file = File::open("foo.txt")?; /// /// let mut buffer = [0; 5]; /// let mut handle = file.take(5); /// handle.read(&mut buffer)?; /// /// let file = handle.get_mut(); /// Ok(()) /// } /// ``` #[stable(feature = "more_io_inner_methods", since = "1.20.0")] pub fn get_mut(&mut self) -> &mut T { &mut self.inner } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: Read> Read for Take<T> { fn read(&mut self, buf: &mut [u8]) -> Result<usize> { // Don't call into inner reader at all at EOF because it may still block if self.limit == 0 { return Ok(0); } let max = cmp::min(buf.len() as u64, self.limit) as usize; let n = self.inner.read(&mut buf[..max])?; self.limit -= n as u64; Ok(n) } unsafe fn initializer(&self) -> Initializer { self.inner.initializer() } fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> { // Pass in a reservation_size closure that respects the current value // of limit for each read. If we hit the read limit, this prevents the // final zero-byte read from allocating again. read_to_end_with_reservation(self, buf, |self_| cmp::min(self_.limit, 32) as usize) } } #[stable(feature = "rust1", since = "1.0.0")] impl<T: BufRead> BufRead for Take<T> { fn fill_buf(&mut self) -> Result<&[u8]> { // Don't call into inner reader at all at EOF because it may still block if self.limit == 0 { return Ok(&[]); } let buf = self.inner.fill_buf()?; let cap = cmp::min(buf.len() as u64, self.limit) as usize; Ok(&buf[..cap]) } fn consume(&mut self, amt: usize) { // Don't let callers reset the limit by passing an overlarge value let amt = cmp::min(amt as u64, self.limit) as usize; self.limit -= amt as u64; self.inner.consume(amt); } } /// An iterator over `u8` values of a reader. /// /// This struct is generally created by calling [`bytes`] on a reader. /// Please see the documentation of [`bytes`] for more details. /// /// [`bytes`]: trait.Read.html#method.bytes #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct Bytes<R> { inner: R, } #[stable(feature = "rust1", since = "1.0.0")] impl<R: Read> Iterator for Bytes<R> { type Item = Result<u8>; fn next(&mut self) -> Option<Result<u8>> { let mut byte = 0; loop { return match self.inner.read(slice::from_mut(&mut byte)) { Ok(0) => None, Ok(..) => Some(Ok(byte)), Err(ref e) if e.kind() == ErrorKind::Interrupted => continue, Err(e) => Some(Err(e)), }; } } } /// An iterator over the contents of an instance of `BufRead` split on a /// particular byte. /// /// This struct is generally created by calling [`split`] on a `BufRead`. /// Please see the documentation of [`split`] for more details. /// /// [`split`]: trait.BufRead.html#method.split #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct Split<B> { buf: B, delim: u8, } #[stable(feature = "rust1", since = "1.0.0")] impl<B: BufRead> Iterator for Split<B> { type Item = Result<Vec<u8>>; fn next(&mut self) -> Option<Result<Vec<u8>>> { let mut buf = Vec::new(); match self.buf.read_until(self.delim, &mut buf) { Ok(0) => None, Ok(_n) => { if buf[buf.len() - 1] == self.delim { buf.pop(); } Some(Ok(buf)) } Err(e) => Some(Err(e)), } } } /// An iterator over the lines of an instance of `BufRead`. /// /// This struct is generally created by calling [`lines`] on a `BufRead`. /// Please see the documentation of [`lines`] for more details. /// /// [`lines`]: trait.BufRead.html#method.lines #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct Lines<B> { buf: B, } #[stable(feature = "rust1", since = "1.0.0")] impl<B: BufRead> Iterator for Lines<B> { type Item = Result<String>; fn next(&mut self) -> Option<Result<String>> { let mut buf = String::new(); match self.buf.read_line(&mut buf) { Ok(0) => None, Ok(_n) => { if buf.ends_with('\n') { buf.pop(); if buf.ends_with('\r') { buf.pop(); } } Some(Ok(buf)) } Err(e) => Some(Err(e)), } } } #[cfg(test)] mod tests { use super::{repeat, Cursor, SeekFrom}; use crate::cmp; use crate::io::prelude::*; use crate::io::{self, IoSlice, IoSliceMut}; use crate::ops::Deref; #[test] #[cfg_attr(target_os = "emscripten", ignore)] fn read_until() { let mut buf = Cursor::new(&b"12"[..]); let mut v = Vec::new(); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 2); assert_eq!(v, b"12"); let mut buf = Cursor::new(&b"1233"[..]); let mut v = Vec::new(); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 3); assert_eq!(v, b"123"); v.truncate(0); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 1); assert_eq!(v, b"3"); v.truncate(0); assert_eq!(buf.read_until(b'3', &mut v).unwrap(), 0); assert_eq!(v, []); } #[test] fn split() { let buf = Cursor::new(&b"12"[..]); let mut s = buf.split(b'3'); assert_eq!(s.next().unwrap().unwrap(), vec![b'1', b'2']); assert!(s.next().is_none()); let buf = Cursor::new(&b"1233"[..]); let mut s = buf.split(b'3'); assert_eq!(s.next().unwrap().unwrap(), vec![b'1', b'2']); assert_eq!(s.next().unwrap().unwrap(), vec![]); assert!(s.next().is_none()); } #[test] fn read_line() { let mut buf = Cursor::new(&b"12"[..]); let mut v = String::new(); assert_eq!(buf.read_line(&mut v).unwrap(), 2); assert_eq!(v, "12"); let mut buf = Cursor::new(&b"12\n\n"[..]); let mut v = String::new(); assert_eq!(buf.read_line(&mut v).unwrap(), 3); assert_eq!(v, "12\n"); v.truncate(0); assert_eq!(buf.read_line(&mut v).unwrap(), 1); assert_eq!(v, "\n"); v.truncate(0); assert_eq!(buf.read_line(&mut v).unwrap(), 0); assert_eq!(v, ""); } #[test] fn lines() { let buf = Cursor::new(&b"12\r"[..]); let mut s = buf.lines(); assert_eq!(s.next().unwrap().unwrap(), "12\r".to_string()); assert!(s.next().is_none()); let buf = Cursor::new(&b"12\r\n\n"[..]); let mut s = buf.lines(); assert_eq!(s.next().unwrap().unwrap(), "12".to_string()); assert_eq!(s.next().unwrap().unwrap(), "".to_string()); assert!(s.next().is_none()); } #[test] fn read_to_end() { let mut c = Cursor::new(&b""[..]); let mut v = Vec::new(); assert_eq!(c.read_to_end(&mut v).unwrap(), 0); assert_eq!(v, []); let mut c = Cursor::new(&b"1"[..]); let mut v = Vec::new(); assert_eq!(c.read_to_end(&mut v).unwrap(), 1); assert_eq!(v, b"1"); let cap = 1024 * 1024; let data = (0..cap).map(|i| (i / 3) as u8).collect::<Vec<_>>(); let mut v = Vec::new(); let (a, b) = data.split_at(data.len() / 2); assert_eq!(Cursor::new(a).read_to_end(&mut v).unwrap(), a.len()); assert_eq!(Cursor::new(b).read_to_end(&mut v).unwrap(), b.len()); assert_eq!(v, data); } #[test] fn read_to_string() { let mut c = Cursor::new(&b""[..]); let mut v = String::new(); assert_eq!(c.read_to_string(&mut v).unwrap(), 0); assert_eq!(v, ""); let mut c = Cursor::new(&b"1"[..]); let mut v = String::new(); assert_eq!(c.read_to_string(&mut v).unwrap(), 1); assert_eq!(v, "1"); let mut c = Cursor::new(&b"\xff"[..]); let mut v = String::new(); assert!(c.read_to_string(&mut v).is_err()); } #[test] fn read_exact() { let mut buf = [0; 4]; let mut c = Cursor::new(&b""[..]); assert_eq!(c.read_exact(&mut buf).unwrap_err().kind(), io::ErrorKind::UnexpectedEof); let mut c = Cursor::new(&b"123"[..]).chain(Cursor::new(&b"456789"[..])); c.read_exact(&mut buf).unwrap(); assert_eq!(&buf, b"1234"); c.read_exact(&mut buf).unwrap(); assert_eq!(&buf, b"5678"); assert_eq!(c.read_exact(&mut buf).unwrap_err().kind(), io::ErrorKind::UnexpectedEof); } #[test] fn read_exact_slice() { let mut buf = [0; 4]; let mut c = &b""[..]; assert_eq!(c.read_exact(&mut buf).unwrap_err().kind(), io::ErrorKind::UnexpectedEof); let mut c = &b"123"[..]; assert_eq!(c.read_exact(&mut buf).unwrap_err().kind(), io::ErrorKind::UnexpectedEof); // make sure the optimized (early returning) method is being used assert_eq!(&buf, &[0; 4]); let mut c = &b"1234"[..]; c.read_exact(&mut buf).unwrap(); assert_eq!(&buf, b"1234"); let mut c = &b"56789"[..]; c.read_exact(&mut buf).unwrap(); assert_eq!(&buf, b"5678"); assert_eq!(c, b"9"); } #[test] fn take_eof() { struct R; impl Read for R { fn read(&mut self, _: &mut [u8]) -> io::Result<usize> { Err(io::Error::new(io::ErrorKind::Other, "")) } } impl BufRead for R { fn fill_buf(&mut self) -> io::Result<&[u8]> { Err(io::Error::new(io::ErrorKind::Other, "")) } fn consume(&mut self, _amt: usize) {} } let mut buf = [0; 1]; assert_eq!(0, R.take(0).read(&mut buf).unwrap()); assert_eq!(b"", R.take(0).fill_buf().unwrap()); } fn cmp_bufread<Br1: BufRead, Br2: BufRead>(mut br1: Br1, mut br2: Br2, exp: &[u8]) { let mut cat = Vec::new(); loop { let consume = { let buf1 = br1.fill_buf().unwrap(); let buf2 = br2.fill_buf().unwrap(); let minlen = if buf1.len() < buf2.len() { buf1.len() } else { buf2.len() }; assert_eq!(buf1[..minlen], buf2[..minlen]); cat.extend_from_slice(&buf1[..minlen]); minlen }; if consume == 0 { break; } br1.consume(consume); br2.consume(consume); } assert_eq!(br1.fill_buf().unwrap().len(), 0); assert_eq!(br2.fill_buf().unwrap().len(), 0); assert_eq!(&cat[..], &exp[..]) } #[test] fn chain_bufread() { let testdata = b"ABCDEFGHIJKL"; let chain1 = (&testdata[..3]).chain(&testdata[3..6]).chain(&testdata[6..9]).chain(&testdata[9..]); let chain2 = (&testdata[..4]).chain(&testdata[4..8]).chain(&testdata[8..]); cmp_bufread(chain1, chain2, &testdata[..]); } #[test] fn chain_zero_length_read_is_not_eof() { let a = b"A"; let b = b"B"; let mut s = String::new(); let mut chain = (&a[..]).chain(&b[..]); chain.read(&mut []).unwrap(); chain.read_to_string(&mut s).unwrap(); assert_eq!("AB", s); } #[bench] #[cfg_attr(target_os = "emscripten", ignore)] fn bench_read_to_end(b: &mut test::Bencher) { b.iter(|| { let mut lr = repeat(1).take(10000000); let mut vec = Vec::with_capacity(1024); super::read_to_end(&mut lr, &mut vec) }); } #[test] fn seek_len() -> io::Result<()> { let mut c = Cursor::new(vec![0; 15]); assert_eq!(c.stream_len()?, 15); c.seek(SeekFrom::End(0))?; let old_pos = c.stream_position()?; assert_eq!(c.stream_len()?, 15); assert_eq!(c.stream_position()?, old_pos); c.seek(SeekFrom::Start(7))?; c.seek(SeekFrom::Current(2))?; let old_pos = c.stream_position()?; assert_eq!(c.stream_len()?, 15); assert_eq!(c.stream_position()?, old_pos); Ok(()) } #[test] fn seek_position() -> io::Result<()> { // All `asserts` are duplicated here to make sure the method does not // change anything about the seek state. let mut c = Cursor::new(vec![0; 15]); assert_eq!(c.stream_position()?, 0); assert_eq!(c.stream_position()?, 0); c.seek(SeekFrom::End(0))?; assert_eq!(c.stream_position()?, 15); assert_eq!(c.stream_position()?, 15); c.seek(SeekFrom::Start(7))?; c.seek(SeekFrom::Current(2))?; assert_eq!(c.stream_position()?, 9); assert_eq!(c.stream_position()?, 9); c.seek(SeekFrom::End(-3))?; c.seek(SeekFrom::Current(1))?; c.seek(SeekFrom::Current(-5))?; assert_eq!(c.stream_position()?, 8); assert_eq!(c.stream_position()?, 8); Ok(()) } // A simple example reader which uses the default implementation of // read_to_end. struct ExampleSliceReader<'a> { slice: &'a [u8], } impl<'a> Read for ExampleSliceReader<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let len = cmp::min(self.slice.len(), buf.len()); buf[..len].copy_from_slice(&self.slice[..len]); self.slice = &self.slice[len..]; Ok(len) } } #[test] fn test_read_to_end_capacity() -> io::Result<()> { let input = &b"foo"[..]; // read_to_end() generally needs to over-allocate, both for efficiency // and so that it can distinguish EOF. Assert that this is the case // with this simple ExampleSliceReader struct, which uses the default // implementation of read_to_end. Even though vec1 is allocated with // exactly enough capacity for the read, read_to_end will allocate more // space here. let mut vec1 = Vec::with_capacity(input.len()); ExampleSliceReader { slice: input }.read_to_end(&mut vec1)?; assert_eq!(vec1.len(), input.len()); assert!(vec1.capacity() > input.len(), "allocated more"); // However, std::io::Take includes an implementation of read_to_end // that will not allocate when the limit has already been reached. In // this case, vec2 never grows. let mut vec2 = Vec::with_capacity(input.len()); ExampleSliceReader { slice: input }.take(input.len() as u64).read_to_end(&mut vec2)?; assert_eq!(vec2.len(), input.len()); assert_eq!(vec2.capacity(), input.len(), "did not allocate more"); Ok(()) } #[test] fn io_slice_mut_advance() { let mut buf1 = [1; 8]; let mut buf2 = [2; 16]; let mut buf3 = [3; 8]; let mut bufs = &mut [ IoSliceMut::new(&mut buf1), IoSliceMut::new(&mut buf2), IoSliceMut::new(&mut buf3), ][..]; // Only in a single buffer.. bufs = IoSliceMut::advance(bufs, 1); assert_eq!(bufs[0].deref(), [1; 7].as_ref()); assert_eq!(bufs[1].deref(), [2; 16].as_ref()); assert_eq!(bufs[2].deref(), [3; 8].as_ref()); // Removing a buffer, leaving others as is. bufs = IoSliceMut::advance(bufs, 7); assert_eq!(bufs[0].deref(), [2; 16].as_ref()); assert_eq!(bufs[1].deref(), [3; 8].as_ref()); // Removing a buffer and removing from the next buffer. bufs = IoSliceMut::advance(bufs, 18); assert_eq!(bufs[0].deref(), [3; 6].as_ref()); } #[test] fn io_slice_mut_advance_empty_slice() { let empty_bufs = &mut [][..]; // Shouldn't panic. IoSliceMut::advance(empty_bufs, 1); } #[test] fn io_slice_mut_advance_beyond_total_length() { let mut buf1 = [1; 8]; let mut bufs = &mut [IoSliceMut::new(&mut buf1)][..]; // Going beyond the total length should be ok. bufs = IoSliceMut::advance(bufs, 9); assert!(bufs.is_empty()); } #[test] fn io_slice_advance() { let buf1 = [1; 8]; let buf2 = [2; 16]; let buf3 = [3; 8]; let mut bufs = &mut [IoSlice::new(&buf1), IoSlice::new(&buf2), IoSlice::new(&buf3)][..]; // Only in a single buffer.. bufs = IoSlice::advance(bufs, 1); assert_eq!(bufs[0].deref(), [1; 7].as_ref()); assert_eq!(bufs[1].deref(), [2; 16].as_ref()); assert_eq!(bufs[2].deref(), [3; 8].as_ref()); // Removing a buffer, leaving others as is. bufs = IoSlice::advance(bufs, 7); assert_eq!(bufs[0].deref(), [2; 16].as_ref()); assert_eq!(bufs[1].deref(), [3; 8].as_ref()); // Removing a buffer and removing from the next buffer. bufs = IoSlice::advance(bufs, 18); assert_eq!(bufs[0].deref(), [3; 6].as_ref()); } #[test] fn io_slice_advance_empty_slice() { let empty_bufs = &mut [][..]; // Shouldn't panic. IoSlice::advance(empty_bufs, 1); } #[test] fn io_slice_advance_beyond_total_length() { let buf1 = [1; 8]; let mut bufs = &mut [IoSlice::new(&buf1)][..]; // Going beyond the total length should be ok. bufs = IoSlice::advance(bufs, 9); assert!(bufs.is_empty()); } }
32.871301
97
0.546365
1e15ddd65448d8cc722e6aa1c4af0e433bd3c312
392
use json::JsonValue; use crate::book; pub fn init(index:&String) -> Result<JsonValue,&'static str> { match book::check(index.to_string()){ Ok(bool)=>{ let mut build = JsonValue::new_object(); build.insert("exists",bool).unwrap(); return Ok(build); }, Err(_)=>{ return Err("failed-get_from_book"); } } }
24.5
62
0.530612
f869c6a0a6bc19cd094facf204bbb4325081cb66
11,004
//! This module implements the global `Symbol` object. //! //! The data type symbol is a primitive data type. //! The `Symbol()` function returns a value of type symbol, has static properties that expose //! several members of built-in objects, has static methods that expose the global symbol registry, //! and resembles a built-in object class, but is incomplete as a constructor because it does not //! support the syntax "`new Symbol()`". //! //! Every symbol value returned from `Symbol()` is unique. //! //! More information: //! - [MDN documentation][mdn] //! - [ECMAScript reference][spec] //! //! [spec]: https://tc39.es/ecma262/#sec-symbol-value //! [mdn]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol use crate::JsString; use boa_gc::{unsafe_empty_trace, Finalize, Trace}; use std::{ cell::Cell, fmt::{self, Display}, hash::{Hash, Hasher}, rc::Rc, }; /// A structure that contains the JavaScript well known symbols. /// /// # Examples /// ``` ///# use boa_engine::symbol::WellKnownSymbols; /// /// let iterator = WellKnownSymbols::iterator(); /// assert_eq!(iterator.description().as_deref(), Some("Symbol.iterator")); /// ``` /// This is equivalent to `let iterator = Symbol.iterator` in JavaScript. #[derive(Debug, Clone)] pub struct WellKnownSymbols { async_iterator: JsSymbol, has_instance: JsSymbol, is_concat_spreadable: JsSymbol, iterator: JsSymbol, r#match: JsSymbol, match_all: JsSymbol, replace: JsSymbol, search: JsSymbol, species: JsSymbol, split: JsSymbol, to_primitive: JsSymbol, to_string_tag: JsSymbol, unscopables: JsSymbol, } /// Reserved number of symbols. /// /// This is where the well known symbol live /// and internal engine symbols. const RESERVED_SYMBOL_HASHES: u64 = 128; thread_local! { /// Cached well known symbols static WELL_KNOW_SYMBOLS: WellKnownSymbols = WellKnownSymbols::new(); /// Symbol hash. /// /// For now this is an incremented u64 number. static SYMBOL_HASH_COUNT: Cell<u64> = Cell::new(RESERVED_SYMBOL_HASHES); } impl WellKnownSymbols { /// Create the well known symbols. fn new() -> Self { let mut count = 0; let async_iterator = JsSymbol::with_hash(count, Some("Symbol.asyncIterator".into())); count += 1; let has_instance = JsSymbol::with_hash(count, Some("Symbol.hasInstance".into())); count += 1; let is_concat_spreadable = JsSymbol::with_hash(count, Some("Symbol.isConcatSpreadable".into())); count += 1; let iterator = JsSymbol::with_hash(count, Some("Symbol.iterator".into())); count += 1; let match_ = JsSymbol::with_hash(count, Some("Symbol.match".into())); count += 1; let match_all = JsSymbol::with_hash(count, Some("Symbol.matchAll".into())); count += 1; let replace = JsSymbol::with_hash(count, Some("Symbol.replace".into())); count += 1; let search = JsSymbol::with_hash(count, Some("Symbol.search".into())); count += 1; let species = JsSymbol::with_hash(count, Some("Symbol.species".into())); count += 1; let split = JsSymbol::with_hash(count, Some("Symbol.split".into())); count += 1; let to_primitive = JsSymbol::with_hash(count, Some("Symbol.toPrimitive".into())); count += 1; let to_string_tag = JsSymbol::with_hash(count, Some("Symbol.toStringTag".into())); count += 1; let unscopables = JsSymbol::with_hash(count, Some("Symbol.unscopables".into())); Self { async_iterator, has_instance, is_concat_spreadable, iterator, r#match: match_, match_all, replace, search, species, split, to_primitive, to_string_tag, unscopables, } } /// The `Symbol.asyncIterator` well known symbol. /// /// A method that returns the default `AsyncIterator` for an object. /// Called by the semantics of the `for-await-of` statement. #[inline] pub fn async_iterator() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.async_iterator.clone()) } /// The `Symbol.hasInstance` well known symbol. /// /// A method that determines if a `constructor` object /// recognizes an object as one of the `constructor`'s instances. /// Called by the semantics of the instanceof operator. #[inline] pub fn has_instance() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.has_instance.clone()) } /// The `Symbol.isConcatSpreadable` well known symbol. /// /// A Boolean valued property that if `true` indicates that /// an object should be flattened to its array elements /// by `Array.prototype.concat`. #[inline] pub fn is_concat_spreadable() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.is_concat_spreadable.clone()) } /// The `Symbol.iterator` well known symbol. /// /// A method that returns the default Iterator for an object. /// Called by the semantics of the `for-of` statement. #[inline] pub fn iterator() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.iterator.clone()) } /// The `Symbol.match` well known symbol. /// /// A regular expression method that matches the regular expression /// against a string. Called by the `String.prototype.match` method. #[inline] pub fn r#match() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.r#match.clone()) } /// The `Symbol.matchAll` well known symbol. /// /// A regular expression method that returns an iterator, that yields /// matches of the regular expression against a string. /// Called by the `String.prototype.matchAll` method. #[inline] pub fn match_all() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.match_all.clone()) } /// The `Symbol.replace` well known symbol. /// /// A regular expression method that replaces matched substrings /// of a string. Called by the `String.prototype.replace` method. #[inline] pub fn replace() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.replace.clone()) } /// The `Symbol.search` well known symbol. /// /// A regular expression method that returns the index within a /// string that matches the regular expression. /// Called by the `String.prototype.search` method. #[inline] pub fn search() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.search.clone()) } /// The `Symbol.species` well known symbol. /// /// A function valued property that is the `constructor` function /// that is used to create derived objects. #[inline] pub fn species() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.species.clone()) } /// The `Symbol.split` well known symbol. /// /// A regular expression method that splits a string at the indices /// that match the regular expression. /// Called by the `String.prototype.split` method. #[inline] pub fn split() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.split.clone()) } /// The `Symbol.toPrimitive` well known symbol. /// /// A method that converts an object to a corresponding primitive value. /// Called by the `ToPrimitive` (`Value::to_primitve`) abstract operation. #[inline] pub fn to_primitive() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.to_primitive.clone()) } /// The `Symbol.toStringTag` well known symbol. /// /// A String valued property that is used in the creation of the default /// string description of an object. /// Accessed by the built-in method `Object.prototype.toString`. #[inline] pub fn to_string_tag() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.to_string_tag.clone()) } /// The `Symbol.unscopables` well known symbol. /// /// An object valued property whose own and inherited property names are property /// names that are excluded from the `with` environment bindings of the associated object. #[inline] pub fn unscopables() -> JsSymbol { WELL_KNOW_SYMBOLS.with(|symbols| symbols.unscopables.clone()) } } /// The inner representation of a JavaScript symbol. #[derive(Debug, Clone)] struct Inner { hash: u64, description: Option<JsString>, } /// This represents a JavaScript symbol primitive. #[derive(Debug, Clone)] pub struct JsSymbol { inner: Rc<Inner>, } impl JsSymbol { /// Create a new symbol. #[inline] pub fn new(description: Option<JsString>) -> Self { let hash = SYMBOL_HASH_COUNT.with(|count| { let hash = count.get(); count.set(hash + 1); hash }); Self { inner: Rc::new(Inner { hash, description }), } } /// Create a new symbol with a specified hash and description. #[inline] fn with_hash(hash: u64, description: Option<JsString>) -> Self { Self { inner: Rc::new(Inner { hash, description }), } } /// Returns the `Symbol`s description. #[inline] pub fn description(&self) -> Option<JsString> { self.inner.description.clone() } /// Returns the `Symbol`s hash. /// /// The hash is guaranteed to be unique. #[inline] pub fn hash(&self) -> u64 { self.inner.hash } /// Abstract operation `SymbolDescriptiveString ( sym )` /// /// More info: /// - [ECMAScript reference][spec] /// /// [spec]: https://tc39.es/ecma262/#sec-symboldescriptivestring pub fn descriptive_string(&self) -> JsString { self.to_string().into() } } impl Finalize for JsSymbol {} // Safety: `JsSymbol` does not contain any object that require trace, // so this is safe. unsafe impl Trace for JsSymbol { unsafe_empty_trace!(); } impl Display for JsSymbol { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.inner.description { Some(desc) => write!(f, "Symbol({desc})"), None => write!(f, "Symbol()"), } } } impl Eq for JsSymbol {} impl PartialEq for JsSymbol { #[inline] fn eq(&self, other: &Self) -> bool { self.inner.hash == other.inner.hash } } impl PartialOrd for JsSymbol { #[inline] fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { self.inner.hash.partial_cmp(&other.inner.hash) } } impl Ord for JsSymbol { #[inline] fn cmp(&self, other: &Self) -> std::cmp::Ordering { self.inner.hash.cmp(&other.inner.hash) } } impl Hash for JsSymbol { #[inline] fn hash<H: Hasher>(&self, state: &mut H) { self.inner.hash.hash(state); } }
31.350427
99
0.627863
d5dacb9b9c6a9765429bbd8a5e89992809adc9f3
2,731
// Copyright 2020-2022 the Deno authors. All rights reserved. MIT license. use deno_ast::swc::common::Spanned; use deno_ast::ParsedSource; use crate::node::DeclarationKind; use crate::node::DocNode; use crate::parser::DocParser; use crate::swc_util::get_location; use crate::swc_util::js_doc_for_span; pub fn get_doc_node_for_export_decl( doc_parser: &DocParser, parsed_source: &ParsedSource, export_decl: &deno_ast::swc::ast::ExportDecl, ) -> DocNode { let export_span = export_decl.span(); use deno_ast::swc::ast::Decl; let js_doc = js_doc_for_span(parsed_source, &export_span); let location = get_location(parsed_source, export_span.lo()); match &export_decl.decl { Decl::Class(class_decl) => { let (name, class_def, decorator_js_doc) = super::class::get_doc_for_class_decl(parsed_source, class_decl); let js_doc = if js_doc.is_empty() { decorator_js_doc } else { js_doc }; DocNode::class(name, location, DeclarationKind::Export, js_doc, class_def) } Decl::Fn(fn_decl) => { let (name, fn_def) = super::function::get_doc_for_fn_decl(parsed_source, fn_decl); DocNode::function(name, location, DeclarationKind::Export, js_doc, fn_def) } Decl::Var(var_decl) => { let (name, var_def) = super::variable::get_doc_for_var_decl(var_decl); DocNode::variable( name, location, DeclarationKind::Export, js_doc, var_def, ) } Decl::TsInterface(ts_interface_decl) => { let (name, interface_def) = super::interface::get_doc_for_ts_interface_decl( parsed_source, ts_interface_decl, ); DocNode::interface( name, location, DeclarationKind::Export, js_doc, interface_def, ) } Decl::TsTypeAlias(ts_type_alias) => { let (name, type_alias_def) = super::type_alias::get_doc_for_ts_type_alias_decl( parsed_source, ts_type_alias, ); DocNode::type_alias( name, location, DeclarationKind::Export, js_doc, type_alias_def, ) } Decl::TsEnum(ts_enum) => { let (name, enum_def) = super::r#enum::get_doc_for_ts_enum_decl(parsed_source, ts_enum); DocNode::r#enum(name, location, DeclarationKind::Export, js_doc, enum_def) } Decl::TsModule(ts_module) => { let (name, namespace_def) = super::namespace::get_doc_for_ts_module( doc_parser, parsed_source, ts_module, ); DocNode::namespace( name, location, DeclarationKind::Export, js_doc, namespace_def, ) } } }
27.867347
80
0.626877
8ae729128978655b9268599607bc97a8960b800d
5,526
// Detecting lib features (i.e., features that are not lang features). // // These are declared using stability attributes (e.g., `#[stable (..)]` // and `#[unstable (..)]`), but are not declared in one single location // (unlike lang features), which means we need to collect them instead. use rustc::hir::map::Map; use rustc::middle::lib_features::LibFeatures; use rustc::ty::query::Providers; use rustc::ty::TyCtxt; use rustc_errors::struct_span_err; use rustc_hir::def_id::LOCAL_CRATE; use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor}; use rustc_span::symbol::Symbol; use rustc_span::{sym, Span}; use syntax::ast::{Attribute, MetaItem, MetaItemKind}; use rustc_error_codes::*; fn new_lib_features() -> LibFeatures { LibFeatures { stable: Default::default(), unstable: Default::default() } } pub struct LibFeatureCollector<'tcx> { tcx: TyCtxt<'tcx>, lib_features: LibFeatures, } impl LibFeatureCollector<'tcx> { fn new(tcx: TyCtxt<'tcx>) -> LibFeatureCollector<'tcx> { LibFeatureCollector { tcx, lib_features: new_lib_features() } } fn extract(&self, attr: &Attribute) -> Option<(Symbol, Option<Symbol>, Span)> { let stab_attrs = [sym::stable, sym::unstable, sym::rustc_const_unstable]; // Find a stability attribute (i.e., `#[stable (..)]`, `#[unstable (..)]`, // `#[rustc_const_unstable (..)]`). if let Some(stab_attr) = stab_attrs.iter().find(|stab_attr| attr.check_name(**stab_attr)) { let meta_item = attr.meta(); if let Some(MetaItem { kind: MetaItemKind::List(ref metas), .. }) = meta_item { let mut feature = None; let mut since = None; for meta in metas { if let Some(mi) = meta.meta_item() { // Find the `feature = ".."` meta-item. match (mi.name_or_empty(), mi.value_str()) { (sym::feature, val) => feature = val, (sym::since, val) => since = val, _ => {} } } } if let Some(feature) = feature { // This additional check for stability is to make sure we // don't emit additional, irrelevant errors for malformed // attributes. if *stab_attr != sym::stable || since.is_some() { return Some((feature, since, attr.span)); } } // We need to iterate over the other attributes, because // `rustc_const_unstable` is not mutually exclusive with // the other stability attributes, so we can't just `break` // here. } } None } fn collect_feature(&mut self, feature: Symbol, since: Option<Symbol>, span: Span) { let already_in_stable = self.lib_features.stable.contains_key(&feature); let already_in_unstable = self.lib_features.unstable.contains(&feature); match (since, already_in_stable, already_in_unstable) { (Some(since), _, false) => { if let Some(prev_since) = self.lib_features.stable.get(&feature) { if *prev_since != since { self.span_feature_error( span, &format!( "feature `{}` is declared stable since {}, \ but was previously declared stable since {}", feature, since, prev_since, ), ); return; } } self.lib_features.stable.insert(feature, since); } (None, false, _) => { self.lib_features.unstable.insert(feature); } (Some(_), _, true) | (None, true, _) => { self.span_feature_error( span, &format!( "feature `{}` is declared {}, but was previously declared {}", feature, if since.is_some() { "stable" } else { "unstable" }, if since.is_none() { "stable" } else { "unstable" }, ), ); } } } fn span_feature_error(&self, span: Span, msg: &str) { struct_span_err!(self.tcx.sess, span, E0711, "{}", &msg,).emit(); } } impl Visitor<'tcx> for LibFeatureCollector<'tcx> { type Map = Map<'tcx>; fn nested_visit_map(&mut self) -> NestedVisitorMap<'_, Self::Map> { NestedVisitorMap::All(&self.tcx.hir()) } fn visit_attribute(&mut self, attr: &'tcx Attribute) { if let Some((feature, stable, span)) = self.extract(attr) { self.collect_feature(feature, stable, span); } } } fn collect(tcx: TyCtxt<'_>) -> LibFeatures { let mut collector = LibFeatureCollector::new(tcx); let krate = tcx.hir().krate(); for attr in krate.non_exported_macro_attrs { collector.visit_attribute(attr); } intravisit::walk_crate(&mut collector, krate); collector.lib_features } pub fn provide(providers: &mut Providers<'_>) { providers.get_lib_features = |tcx, id| { assert_eq!(id, LOCAL_CRATE); tcx.arena.alloc(collect(tcx)) }; }
37.849315
99
0.526963
ffd290fb073db18c67a41b580e585cbe00c5016b
949
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Most traits cannot be derived for unions. #[derive( PartialEq, //~ ERROR this trait cannot be derived for unions PartialOrd, //~ ERROR this trait cannot be derived for unions Ord, //~ ERROR this trait cannot be derived for unions Hash, //~ ERROR this trait cannot be derived for unions Default, //~ ERROR this trait cannot be derived for unions Debug, //~ ERROR this trait cannot be derived for unions )] union U { a: u8, b: u16, } fn main() {}
35.148148
68
0.709168
ac8a17f1d3540d8751ff8bdf5adf539e13232053
2,681
#![cfg_attr(not(feature = "std"), no_std)] use ink_lang as ink; #[ink::contract] mod delegator { use accumulator::AccumulatorRef; use ink_storage::{ traits::{ PackedLayout, SpreadLayout, }, }; #[derive(Debug, scale::Encode, scale::Decode, Clone, SpreadLayout, PackedLayout)] #[cfg_attr( feature = "std", derive(scale_info::TypeInfo, ink_storage::traits::StorageLayout) )] pub struct AddrInstance { pub base: Option<AccumulatorRef>, } #[derive( Debug, Copy, Clone, PartialEq, Eq, scale::Encode, scale::Decode, SpreadLayout, PackedLayout, Default )] #[cfg_attr( feature = "std", derive(::scale_info::TypeInfo, ::ink_storage::traits::StorageLayout) )] pub struct Addr { pub base_addr: Option<AccountId>, } #[ink(storage)] pub struct Delegator { pub init: bool, pub components: AddrInstance, pub component_addrs: Addr, } impl Delegator { /// Instantiate a `delegator` contract with the given sub-contract codes. #[ink(constructor)] pub fn new() -> Self { Self { init: false, components: DAOComponents { base: None, }, component_addrs: DAOComponentAddrs { base_addr: None, }, } } /// Returns the `accumulator` value. #[ink(message)] pub fn get(&self) -> Option<AccountId> { self.component_addrs.base_addr } #[ink(message)] pub fn init_base(&mut self, base_code_hash: Hash, init_value: i32,version: u32) -> bool { let total_balance = Self::env().balance(); // instance base let salt = version.to_le_bytes(); let instance_params = AccumulatorRef::new(init_value) .endowment(total_balance / 2) .code_hash(base_code_hash) .salt_bytes(salt) .params(); let init_result = ink_env::instantiate_contract(&instance_params); let contract_addr = init_result.expect("failed at instantiating the `Base` contract"); let mut contract_instance: AccumulatorRef = ink_env::call::FromAccountId::from_account_id(contract_addr); self.components.base = Some(contract_instance); self.component_addrs.base_addr = Some(contract_addr); true } } }
30.465909
118
0.534875
e6f5b77b6b66541a2cc8d1b20c45a59cf257f5aa
2,458
use std::fmt; use ::GameBoy; use cpu::Register16; static SIZE_TABLE: [u8; 256] = [ // 0 1 2 3 4 5 6 7 8 9 a b c d e f 1, 3, 1, 1, 1, 1, 2, 1, 3, 1, 1, 1, 1, 1, 2, 1, // 0 2, 3, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, // 1 2, 3, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, // 2 2, 3, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 2, 1, // 3 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 4 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 5 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 6 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 7 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 8 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // 9 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // a 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // b 1, 1, 3, 3, 3, 1, 2, 1, 1, 1, 3, 1, 3, 3, 2, 1, // c 1, 1, 3, 1, 3, 1, 2, 1, 1, 1, 3, 1, 3, 1, 2, 1, // d 2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 3, 1, 1, 1, 2, 1, // e 2, 1, 2, 1, 1, 1, 2, 1, 2, 1, 3, 1, 1, 1, 2, 1, // f ]; pub struct Opcode { pub opcode: u8, pub length: u8, pub param: Option<u16>, pub x: u8, pub y: u8, pub z: u8, pub p: u8, pub q: u8 } impl Opcode { pub fn fetch_param(&mut self, gb: &GameBoy) -> Option<u16> { let param = match self.length { 1 => None, 2 => Some(gb.cartridge.read_byte(gb.cpu.get_16(Register16::PC) + 1) as u16), 3 => Some(gb.cartridge.read_word(gb.cpu.get_16(Register16::PC) + 1)), _ => unreachable!() }; self.param = param; param } pub fn flag_slice(&self) -> (u8, u8, u8, u8, u8) { ( self.x, self.y, self.z, self.p, self.q ) } } impl From<u8> for Opcode { fn from(op: u8) -> Self { let y = (op & 0b0011_1000) as u8 >> 3; Opcode { opcode: op, length: SIZE_TABLE[op as usize], param: None, x: (op & 0b1100_0000) as u8 >> 6, y, z: (op & 0b0000_0111) as u8, p: (y & 0b0000_0110) as u8 >> 1, q: (y & 0b0000_0001) as u8 } } } impl fmt::Debug for Opcode { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Opcode ( op: {:x}, len: {}, param: {:#x} )", self.opcode, self.length, self.param.unwrap_or(0) ) } }
29.614458
88
0.40236
21a5d94101e1c8f657402f48049480e6e17d37e6
5,727
//! # Getting Started //! ``` //! use siyo::clock::*; //! //! // Print current Local time and UTC time. //! let clock = Clock::new(); //! println!("Local: {}", clock); //! println!("UTC: {:?}", clock); //! //! // Print 'Hello, world #!' every 1/3 seconds //! /* let mut a = 0; //! loop { //! let now = Clock::new(); //! let b = now.since(&clock, SECOND / 3); //! if a != b { //! a = b; //! println!("Hello, world {}!", a); //! } //! } */ //! ``` use crate::math::Fr64; use chrono::{Datelike, TimeZone, Timelike}; use std::fmt::*; /// Fraction value for nanoseconds. pub const NANOSECOND: Fr64 = Fr64(1, 1_000_000_000); /// Fraction value for microseconds. pub const MICROSECOND: Fr64 = Fr64(1, 1_000_000); /// Fraction value for milliseconds. pub const MILLISECOND: Fr64 = Fr64(1, 1_000); /// Fraction value for seconds. pub const SECOND: Fr64 = Fr64(1, 1); /// Fraction value for minutes. pub const MINUTE: Fr64 = Fr64(60, 1); /// Fraction value for minutes. pub const HOUR: Fr64 = Fr64(60 * 60, 1); /// Fraction value for days. pub const DAY: Fr64 = Fr64(24 * 60 * 60, 1); /// Month of the year. #[repr(u8)] pub enum Month { /// January Jan = 1u8, /// Febuary Feb = 2, /// March Mar = 3, /// April Apr = 4, /// May May = 5, /// June Jun = 6, /// July Jul = 7, /// August Aug = 8, /// September Sep = 9, /// October Oct = 10, /// November Nov = 11, /// December Dec = 12, } /// Which day of the week. #[repr(u8)] pub enum DayOfWeek { /// Sunday Sunday = 0u8, /// Monday Monday = 1, /// Tuesday Tuesday = 2, /// Wednesday Wednesday = 3, /// Thursday Thursday = 4, /// Friday Friday = 5, /// Saturday Saturday = 6, } /// A calendar date and time. Stored as UTC. /// ``` /// use siyo::clock::*; /// let clock = Clock::new(); /// println!("{}", clock); // Print out in local time. /// println!("{:?}", clock); // Print out in UTC. /// ``` pub struct Clock(chrono::NaiveDateTime); impl Clock { /// Get the current time. /// /// ``` /// use siyo::clock::*; /// let clock = Clock::new(); /// ``` pub fn new() -> Self { Clock(chrono::offset::Utc::now().naive_utc()) } /// Define a utc time. pub fn utc(year: i32, month: u8, day: u8, hour: u8, min: u8, sec: u8) -> Option<Self> { let date = chrono::offset::Utc .ymd(year, month as u32, day as u32) .and_hms(hour as u32, min as u32, sec as u32); Some(Clock(date.naive_utc())) } /// Define a local time. /// /// ``` /// use siyo::clock::*; /// Clock::new(); /// ``` pub fn local(year: i32, month: u8, day: u8, hour: u8, min: u8, sec: u8) -> Option<Self> { let date = chrono::offset::Local .ymd(year, month as u32, day as u32) .and_hms(hour as u32, min as u32, sec as u32) .with_timezone(&chrono::Utc); Some(Clock(date.naive_utc())) } /// Get the year. pub fn year(&self) -> i32 { self.0.year() } /// Get the month. pub fn month(&self) -> Month { let month = self.0.month() as u8; unsafe { std::mem::transmute(month) } } /// Get the day of the month. pub fn day(&self) -> u8 { self.0.day() as u8 } /// Get the day of the week. pub fn dayofweek(&self) -> DayOfWeek { let dayofweek = self.0.weekday().num_days_from_sunday() as u8; unsafe { std::mem::transmute(dayofweek) } } /// Get the hour (0-23). pub fn hour(&self) -> u8 { self.0.hour() as u8 } /// Get the minute (0-59). pub fn minute(&self) -> u8 { self.0.minute() as u8 } /// Get the second (0-59). pub fn second(&self) -> u8 { self.0.second() as u8 } /// Get the nanosecond (0-1,999,999,999 b/c leap seconds). pub fn nanosecond(&self) -> u32 { self.0.nanosecond() } /// Get the amount of time since another clock in fractions of a second. /// /// ``` /// use siyo::clock::*; /// let start = Clock::new(); /// let nanos_since_start = Clock::new().since(&start, NANOSECOND); /// assert!(nanos_since_start >= 0); /// ``` pub fn since(&self, other: &Self, frac: Fr64) -> i64 { let duration = self.0 - other.0; let seconds: i64 = duration.num_seconds(); let nanos: i64 = (duration - chrono::Duration::seconds(duration.num_seconds())) .num_nanoseconds() .unwrap(); // Multiply time by reciprocal fraction (numerator). let frac_den = frac.0 as i128; let frac_num = frac.1 as i128; let seconds = seconds as i128 * frac_num; let nanos = nanos as i128 * frac_num; // Denominator let seconds_remaining = seconds % frac_den; // what couldn't be divided let nanos = nanos + (seconds_remaining * 1_000_000_000); let nanos = (nanos / frac_den) as i64; let seconds = (seconds / frac_den) as i64; // Add together seconds + (nanos / 1_000_000_000) } } impl Debug for Clock { fn fmt(&self, f: &mut Formatter) -> Result { write!(f, "{}", self.0) } } impl Display for Clock { fn fmt(&self, f: &mut Formatter) -> Result { write!( f, "{}", chrono::DateTime::<chrono::Local>::from_utc( self.0, chrono::offset::Local.offset_from_utc_datetime(&self.0) ) .naive_local() ) } } /*#[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4); } }*/
24.474359
93
0.521914
9cee2927fec51fed7b48c8fecb30accdf7ddfc5a
277
use crate::DeserializeFn; use serde::de::Expected; pub trait DeserializerRegistry<T: ?Sized> { fn get_deserializer<E>( &'static self, key: &str, expected: &dyn Expected, ) -> Result<DeserializeFn<T>, E> where E: serde::de::Error; }
21.307692
43
0.599278
d63ed43a432132de42807a834ba5b6ac864de064
20,959
// Copyright 2020 Parity Technologies (UK) Ltd. // This file is part of Tetcoin. // Tetcoin is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Tetcoin is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Tetcoin. If not, see <http://www.gnu.org/licenses/>. //! A validator discovery service for the Network Bridge. use core::marker::PhantomData; use std::borrow::Cow; use std::collections::{HashSet, HashMap, hash_map}; use std::sync::Arc; use async_trait::async_trait; use futures::channel::mpsc; use tc_network::multiaddr::{Multiaddr, Protocol}; use tc_authority_discovery::Service as AuthorityDiscoveryService; use tetcoin_node_network_protocol::PeerId; use tetcoin_primitives::v1::{AuthorityDiscoveryId, Block, Hash}; use tetcoin_node_network_protocol::peer_set::PeerSet; const LOG_TARGET: &str = "validator_discovery"; /// An abstraction over networking for the purposes of validator discovery service. #[async_trait] pub trait Network: Send + 'static { /// Ask the network to connect to these nodes and not disconnect from them until removed from the priority group. async fn add_peers_to_reserved_set(&mut self, protocol: Cow<'static, str>, multiaddresses: HashSet<Multiaddr>) -> Result<(), String>; /// Remove the peers from the priority group. async fn remove_peers_from_reserved_set(&mut self, protocol: Cow<'static, str>, multiaddresses: HashSet<Multiaddr>) -> Result<(), String>; } /// An abstraction over the authority discovery service. #[async_trait] pub trait AuthorityDiscovery: Send + 'static { /// Get the addresses for the given [`AuthorityId`] from the local address cache. async fn get_addresses_by_authority_id(&mut self, authority: AuthorityDiscoveryId) -> Option<Vec<Multiaddr>>; /// Get the [`AuthorityId`] for the given [`PeerId`] from the local address cache. async fn get_authority_id_by_peer_id(&mut self, peer_id: PeerId) -> Option<AuthorityDiscoveryId>; } #[async_trait] impl Network for Arc<tc_network::NetworkService<Block, Hash>> { async fn add_peers_to_reserved_set(&mut self, protocol: Cow<'static, str>, multiaddresses: HashSet<Multiaddr>) -> Result<(), String> { tc_network::NetworkService::add_peers_to_reserved_set(&**self, protocol, multiaddresses) } async fn remove_peers_from_reserved_set(&mut self, protocol: Cow<'static, str>, multiaddresses: HashSet<Multiaddr>) -> Result<(), String> { tc_network::NetworkService::remove_peers_from_reserved_set(&**self, protocol, multiaddresses) } } #[async_trait] impl AuthorityDiscovery for AuthorityDiscoveryService { async fn get_addresses_by_authority_id(&mut self, authority: AuthorityDiscoveryId) -> Option<Vec<Multiaddr>> { AuthorityDiscoveryService::get_addresses_by_authority_id(self, authority).await } async fn get_authority_id_by_peer_id(&mut self, peer_id: PeerId) -> Option<AuthorityDiscoveryId> { AuthorityDiscoveryService::get_authority_id_by_peer_id(self, peer_id).await } } /// This struct tracks the state for one `ConnectToValidators` request. struct NonRevokedConnectionRequestState { requested: Vec<AuthorityDiscoveryId>, pending: HashSet<AuthorityDiscoveryId>, sender: mpsc::Sender<(AuthorityDiscoveryId, PeerId)>, } impl NonRevokedConnectionRequestState { /// Create a new instance of `ConnectToValidatorsState`. pub fn new( requested: Vec<AuthorityDiscoveryId>, pending: HashSet<AuthorityDiscoveryId>, sender: mpsc::Sender<(AuthorityDiscoveryId, PeerId)>, ) -> Self { Self { requested, pending, sender, } } pub fn on_authority_connected(&mut self, authority: &AuthorityDiscoveryId, peer_id: &PeerId) { if self.pending.remove(authority) { // an error may happen if the request was revoked or // the channel's buffer is full, ignoring it is fine let _ = self.sender.try_send((authority.clone(), peer_id.clone())); } } /// Returns `true` if the request is revoked. pub fn is_revoked(&mut self) -> bool { self.sender.is_closed() } pub fn requested(&self) -> &[AuthorityDiscoveryId] { self.requested.as_ref() } } /// Will be called by [`Service::on_request`] when a request was revoked. /// /// Takes the `map` of requested validators and the `id` of the validator that should be revoked. /// /// Returns `Some(id)` iff the request counter is `0`. fn on_revoke(map: &mut HashMap<AuthorityDiscoveryId, u64>, id: AuthorityDiscoveryId) -> Option<AuthorityDiscoveryId> { if let hash_map::Entry::Occupied(mut entry) = map.entry(id) { if entry.get_mut().saturating_sub(1) == 0 { return Some(entry.remove_entry().0); } } None } fn peer_id_from_multiaddr(addr: &Multiaddr) -> Option<PeerId> { addr.iter().last().and_then(|protocol| if let Protocol::P2p(multihash) = protocol { PeerId::from_multihash(multihash).ok() } else { None }) } pub(super) struct Service<N, AD> { // Peers that are connected to us and authority ids associated to them. connected_peers: HashMap<PeerId, HashSet<AuthorityDiscoveryId>>, // The `u64` counts the number of pending non-revoked requests for this validator // note: the validators in this map are not necessarily present // in the `connected_validators` map. // Invariant: the value > 0 for non-revoked requests. requested_validators: HashMap<AuthorityDiscoveryId, u64>, non_revoked_discovery_requests: Vec<NonRevokedConnectionRequestState>, // PhantomData used to make the struct generic instead of having generic methods _phantom: PhantomData<(N, AD)>, } impl<N: Network, AD: AuthorityDiscovery> Service<N, AD> { pub fn new() -> Self { Self { connected_peers: HashMap::new(), requested_validators: HashMap::new(), non_revoked_discovery_requests: Vec::new(), _phantom: PhantomData, } } /// Find connected validators using the given `validator_ids`. /// /// Returns a [`HashMap`] that contains the found [`AuthorityDiscoveryId`]'s and their associated [`PeerId`]'s. #[tracing::instrument(level = "trace", skip(self, authority_discovery_service), fields(subsystem = LOG_TARGET))] async fn find_connected_validators( &mut self, validator_ids: &[AuthorityDiscoveryId], authority_discovery_service: &mut AD, ) -> HashMap<AuthorityDiscoveryId, PeerId> { let mut result = HashMap::new(); for id in validator_ids { // First check if we already cached the validator if let Some(pid) = self.connected_peers .iter() .find_map(|(pid, ids)| if ids.contains(&id) { Some(pid) } else { None }) { result.insert(id.clone(), pid.clone()); continue; } // If not ask the authority discovery if let Some(addresses) = authority_discovery_service.get_addresses_by_authority_id(id.clone()).await { for peer_id in addresses.iter().filter_map(peer_id_from_multiaddr) { if let Some(ids) = self.connected_peers.get_mut(&peer_id) { ids.insert(id.clone()); result.insert(id.clone(), peer_id.clone()); } } } } result } /// On a new connection request, a priority group update will be issued. /// It will ask the network to connect to the validators and not disconnect /// from them at least until all the pending requests containing them are revoked. /// /// This method will also clean up all previously revoked requests. /// it takes `network_service` and `authority_discovery_service` by value /// and returns them as a workaround for the Future: Send requirement imposed by async fn impl. #[tracing::instrument(level = "trace", skip(self, connected, network_service, authority_discovery_service), fields(subsystem = LOG_TARGET))] pub async fn on_request( &mut self, validator_ids: Vec<AuthorityDiscoveryId>, mut connected: mpsc::Sender<(AuthorityDiscoveryId, PeerId)>, mut network_service: N, mut authority_discovery_service: AD, ) -> (N, AD) { const MAX_ADDR_PER_PEER: usize = 3; // Increment the counter of how many times the validators were requested. validator_ids.iter().for_each(|id| *self.requested_validators.entry(id.clone()).or_default() += 1); let already_connected = self.find_connected_validators(&validator_ids, &mut authority_discovery_service).await; // try to send already connected peers for (id, peer) in already_connected.iter() { match connected.try_send((id.clone(), peer.clone())) { Err(e) if e.is_disconnected() => { // the request is already revoked for peer_id in validator_ids { let _ = on_revoke(&mut self.requested_validators, peer_id); } return (network_service, authority_discovery_service); } Err(_) => { // the channel's buffer is full // ignore the error, the receiver will miss out some peers // but that's fine break; } Ok(()) => continue, } } // collect multiaddress of validators let mut multiaddr_to_add = HashSet::new(); for authority in validator_ids.iter() { let result = authority_discovery_service.get_addresses_by_authority_id(authority.clone()).await; if let Some(addresses) = result { // We might have several `PeerId`s per `AuthorityId` // depending on the number of sentry nodes, // so we limit the max number of sentries per node to connect to. // They are going to be removed soon though: // https://github.com/tetcoin/tetcore/issues/6845 multiaddr_to_add.extend(addresses.into_iter().take(MAX_ADDR_PER_PEER)); } } // clean up revoked requests let mut revoked_indices = Vec::new(); let mut revoked_validators = Vec::new(); for (i, maybe_revoked) in self.non_revoked_discovery_requests.iter_mut().enumerate() { if maybe_revoked.is_revoked() { for id in maybe_revoked.requested() { if let Some(id) = on_revoke(&mut self.requested_validators, id.clone()) { revoked_validators.push(id); } } revoked_indices.push(i); } } // clean up revoked requests states for to_revoke in revoked_indices.into_iter().rev() { drop(self.non_revoked_discovery_requests.swap_remove(to_revoke)); } // multiaddresses to remove let mut multiaddr_to_remove = HashSet::new(); for id in revoked_validators.into_iter() { let result = authority_discovery_service.get_addresses_by_authority_id(id).await; if let Some(addresses) = result { multiaddr_to_remove.extend(addresses.into_iter()); } } // ask the network to connect to these nodes and not disconnect // from them until removed from the set if let Err(e) = network_service.add_peers_to_reserved_set( PeerSet::Collation.into_protocol_name(), multiaddr_to_add.clone(), ).await { tracing::warn!(target: LOG_TARGET, err = ?e, "AuthorityDiscoveryService returned an invalid multiaddress"); } if let Err(e) = network_service.add_peers_to_reserved_set( PeerSet::Validation.into_protocol_name(), multiaddr_to_add, ).await { tracing::warn!(target: LOG_TARGET, err = ?e, "AuthorityDiscoveryService returned an invalid multiaddress"); } // the addresses are known to be valid let _ = network_service.remove_peers_from_reserved_set( PeerSet::Collation.into_protocol_name(), multiaddr_to_remove.clone() ).await; let _ = network_service.remove_peers_from_reserved_set( PeerSet::Validation.into_protocol_name(), multiaddr_to_remove ).await; let pending = validator_ids.iter() .cloned() .filter(|id| !already_connected.contains_key(id)) .collect::<HashSet<_>>(); self.non_revoked_discovery_requests.push(NonRevokedConnectionRequestState::new( validator_ids, pending, connected, )); (network_service, authority_discovery_service) } /// Should be called when a peer connected. #[tracing::instrument(level = "trace", skip(self, authority_discovery_service), fields(subsystem = LOG_TARGET))] pub async fn on_peer_connected(&mut self, peer_id: &PeerId, authority_discovery_service: &mut AD) { // check if it's an authority we've been waiting for let maybe_authority = authority_discovery_service.get_authority_id_by_peer_id(peer_id.clone()).await; if let Some(authority) = maybe_authority { for request in self.non_revoked_discovery_requests.iter_mut() { let _ = request.on_authority_connected(&authority, peer_id); } self.connected_peers.entry(peer_id.clone()).or_default().insert(authority); } else { self.connected_peers.insert(peer_id.clone(), Default::default()); } } /// Should be called when a peer disconnected. pub fn on_peer_disconnected(&mut self, peer_id: &PeerId) { self.connected_peers.remove(peer_id); } } #[cfg(test)] mod tests { use super::*; use futures::stream::StreamExt as _; use tp_keyring::Sr25519Keyring; fn new_service() -> Service<TestNetwork, TestAuthorityDiscovery> { Service::new() } fn new_network() -> (TestNetwork, TestAuthorityDiscovery) { (TestNetwork::default(), TestAuthorityDiscovery::new()) } #[derive(Default)] struct TestNetwork { peers_set: HashSet<Multiaddr>, } #[derive(Default)] struct TestAuthorityDiscovery { by_authority_id: HashMap<AuthorityDiscoveryId, Multiaddr>, by_peer_id: HashMap<PeerId, AuthorityDiscoveryId>, } impl TestAuthorityDiscovery { fn new() -> Self { let peer_ids = known_peer_ids(); let authorities = known_authorities(); let multiaddr = known_multiaddr(); Self { by_authority_id: authorities.iter() .cloned() .zip(multiaddr.into_iter()) .collect(), by_peer_id: peer_ids.into_iter() .zip(authorities.into_iter()) .collect(), } } } #[async_trait] impl Network for TestNetwork { async fn add_peers_to_reserved_set(&mut self, _protocol: Cow<'static, str>, multiaddresses: HashSet<Multiaddr>) -> Result<(), String> { self.peers_set.extend(multiaddresses.into_iter()); Ok(()) } async fn remove_peers_from_reserved_set(&mut self, _protocol: Cow<'static, str>, multiaddresses: HashSet<Multiaddr>) -> Result<(), String> { self.peers_set.retain(|elem| !multiaddresses.contains(elem)); Ok(()) } } #[async_trait] impl AuthorityDiscovery for TestAuthorityDiscovery { async fn get_addresses_by_authority_id(&mut self, authority: AuthorityDiscoveryId) -> Option<Vec<Multiaddr>> { self.by_authority_id.get(&authority).cloned().map(|addr| vec![addr]) } async fn get_authority_id_by_peer_id(&mut self, peer_id: PeerId) -> Option<AuthorityDiscoveryId> { self.by_peer_id.get(&peer_id).cloned() } } fn known_authorities() -> Vec<AuthorityDiscoveryId> { [ Sr25519Keyring::Alice, Sr25519Keyring::Bob, Sr25519Keyring::Charlie, ].iter().map(|k| k.public().into()).collect() } fn known_peer_ids() -> Vec<PeerId> { (0..3).map(|_| PeerId::random()).collect() } fn known_multiaddr() -> Vec<Multiaddr> { vec![ "/ip4/127.0.0.1/tcp/1234".parse().unwrap(), "/ip4/127.0.0.1/tcp/1235".parse().unwrap(), "/ip4/127.0.0.1/tcp/1236".parse().unwrap(), ] } #[test] fn request_is_revoked_when_the_receiver_is_dropped() { let (sender, receiver) = mpsc::channel(0); let mut request = NonRevokedConnectionRequestState::new( Vec::new(), HashSet::new(), sender, ); assert!(!request.is_revoked()); drop(receiver); assert!(request.is_revoked()); } #[test] fn requests_are_fulfilled_immediately_for_already_connected_peers() { let mut service = new_service(); let (ns, mut ads) = new_network(); let peer_ids: Vec<_> = ads.by_peer_id.keys().cloned().collect(); let authority_ids: Vec<_> = ads.by_peer_id.values().cloned().collect(); futures::executor::block_on(async move { let req1 = vec![authority_ids[0].clone(), authority_ids[1].clone()]; let (sender, mut receiver) = mpsc::channel(2); service.on_peer_connected(&peer_ids[0], &mut ads).await; let _ = service.on_request( req1, sender, ns, ads, ).await; // the results should be immediately available let reply1 = receiver.next().await.unwrap(); assert_eq!(reply1.0, authority_ids[0]); assert_eq!(reply1.1, peer_ids[0]); }); } #[test] fn requests_are_fulfilled_on_peer_connection() { let mut service = new_service(); let (ns, ads) = new_network(); let peer_ids: Vec<_> = ads.by_peer_id.keys().cloned().collect(); let authority_ids: Vec<_> = ads.by_peer_id.values().cloned().collect(); futures::executor::block_on(async move { let req1 = vec![authority_ids[0].clone(), authority_ids[1].clone()]; let (sender, mut receiver) = mpsc::channel(2); let (_, mut ads) = service.on_request( req1, sender, ns, ads, ).await; service.on_peer_connected(&peer_ids[0], &mut ads).await; let reply1 = receiver.next().await.unwrap(); assert_eq!(reply1.0, authority_ids[0]); assert_eq!(reply1.1, peer_ids[0]); service.on_peer_connected(&peer_ids[1], &mut ads).await; let reply2 = receiver.next().await.unwrap(); assert_eq!(reply2.0, authority_ids[1]); assert_eq!(reply2.1, peer_ids[1]); }); } // Test cleanup works. #[test] fn requests_are_removed_on_revoke() { let mut service = new_service(); let (ns, mut ads) = new_network(); let peer_ids: Vec<_> = ads.by_peer_id.keys().cloned().collect(); let authority_ids: Vec<_> = ads.by_peer_id.values().cloned().collect(); futures::executor::block_on(async move { let (sender, mut receiver) = mpsc::channel(1); service.on_peer_connected(&peer_ids[0], &mut ads).await; service.on_peer_connected(&peer_ids[1], &mut ads).await; let (ns, ads) = service.on_request( vec![authority_ids[0].clone()], sender, ns, ads, ).await; let _ = receiver.next().await.unwrap(); // revoke the request drop(receiver); let (sender, mut receiver) = mpsc::channel(1); let _ = service.on_request( vec![authority_ids[1].clone()], sender, ns, ads, ).await; let reply = receiver.next().await.unwrap(); assert_eq!(reply.0, authority_ids[1]); assert_eq!(reply.1, peer_ids[1]); assert_eq!(service.non_revoked_discovery_requests.len(), 1); }); } // More complex test with overlapping revoked requests #[test] fn revoking_requests_with_overlapping_validator_sets() { let mut service = new_service(); let (ns, mut ads) = new_network(); let peer_ids: Vec<_> = ads.by_peer_id.keys().cloned().collect(); let authority_ids: Vec<_> = ads.by_peer_id.values().cloned().collect(); futures::executor::block_on(async move { let (sender, mut receiver) = mpsc::channel(1); service.on_peer_connected(&peer_ids[0], &mut ads).await; service.on_peer_connected(&peer_ids[1], &mut ads).await; let (ns, ads) = service.on_request( vec![authority_ids[0].clone(), authority_ids[2].clone()], sender, ns, ads, ).await; let _ = receiver.next().await.unwrap(); // revoke the first request drop(receiver); let (sender, mut receiver) = mpsc::channel(1); let (ns, ads) = service.on_request( vec![authority_ids[0].clone(), authority_ids[1].clone()], sender, ns, ads, ).await; let _ = receiver.next().await.unwrap(); assert_eq!(service.non_revoked_discovery_requests.len(), 1); assert_eq!(ns.peers_set.len(), 2); // revoke the second request drop(receiver); let (sender, mut receiver) = mpsc::channel(1); let (ns, _) = service.on_request( vec![authority_ids[0].clone()], sender, ns, ads, ).await; let _ = receiver.next().await.unwrap(); assert_eq!(service.non_revoked_discovery_requests.len(), 1); assert_eq!(ns.peers_set.len(), 1); }); } /// A test for when a validator connects, but the authority discovery not yet knows that the connecting node /// is a validator. This can happen for example at startup of a node. #[test] fn handle_validator_connect_without_authority_discovery_knowing_it() { let mut service = new_service(); let ns = TestNetwork::default(); let mut ads = TestAuthorityDiscovery::default(); let validator_peer_id = PeerId::random(); let validator_id: AuthorityDiscoveryId = Sr25519Keyring::Alice.public().into(); futures::executor::block_on(async move { let (sender, mut receiver) = mpsc::channel(1); service.on_peer_connected(&validator_peer_id, &mut ads).await; let address = known_multiaddr()[0].clone().with(Protocol::P2p(validator_peer_id.clone().into())); ads.by_peer_id.insert(validator_peer_id.clone(), validator_id.clone()); ads.by_authority_id.insert(validator_id.clone(), address); let _ = service.on_request( vec![validator_id.clone()], sender, ns, ads, ).await; assert_eq!((validator_id.clone(), validator_peer_id.clone()), receiver.next().await.unwrap()); assert!(service.connected_peers.get(&validator_peer_id).unwrap().contains(&validator_id)); }); } }
32.748438
142
0.712105
710c7f3f9e14524114f932411e96952a3d591c76
1,180
use sc_cli::RunCmd; #[derive(Debug, clap::Parser)] pub struct Cli { #[clap(subcommand)] pub subcommand: Option<Subcommand>, #[clap(flatten)] pub run: RunCmd, } #[derive(Debug, clap::Subcommand)] pub enum Subcommand { /// Key management cli utilities #[clap(subcommand)] Key(sc_cli::KeySubcommand), /// Build a chain specification. BuildSpec(sc_cli::BuildSpecCmd), /// Validate blocks. CheckBlock(sc_cli::CheckBlockCmd), /// Export blocks. ExportBlocks(sc_cli::ExportBlocksCmd), /// Export the state of a given block into a chain spec. ExportState(sc_cli::ExportStateCmd), /// Import blocks. ImportBlocks(sc_cli::ImportBlocksCmd), /// Remove the whole chain. PurgeChain(sc_cli::PurgeChainCmd), /// Revert the chain to a previous state. Revert(sc_cli::RevertCmd), /// Sub-commands concerned with benchmarking. #[clap(subcommand)] Benchmark(frame_benchmarking_cli::BenchmarkCmd), /// Try some command against runtime state. #[cfg(feature = "try-runtime")] TryRuntime(try_runtime_cli::TryRuntimeCmd), /// Try some command against runtime state. Note: `try-runtime` feature must be enabled. #[cfg(not(feature = "try-runtime"))] TryRuntime, }
23.137255
89
0.725424
d68390c98df720be8169bfb3835bedf22118089b
6,076
use std::time::Instant; #[derive(Eq)] pub enum SyncEntry { Origin(OriginInfo), File(FileInfo), Directory(DirInfo) } #[derive(Eq)] pub struct OriginInfo { pub path: String, pub children: Vec<SyncEntry> } impl PartialEq for OriginInfo { fn eq(&self, other:&Self)->bool { &self.path==&other.path && vec_equals(&self.children,&other.children) } } #[derive(Eq)] pub struct FileInfo { pub name: String, pub last_modified: Instant, pub created: Instant, pub size: u32 } impl PartialEq for FileInfo { fn eq(&self, other:&Self)->bool { &self.name==&other.name && &self.last_modified==&other.last_modified && &self.created==&other.created && &self.size==&other.size } } #[derive(Eq)] pub struct DirInfo { pub name: String, pub children: Vec<SyncEntry> } impl PartialEq for DirInfo { fn eq(&self, other:&Self)->bool { &self.name==&other.name && vec_equals(&self.children,&other.children) } } impl PartialEq for SyncEntry { fn eq(&self, other:&Self)->bool { match &self { SyncEntry::Origin(origin_info)=> { match other { SyncEntry::Origin(origin_info2) => { origin_info==origin_info2 }, SyncEntry::File(_) => { false }, SyncEntry::Directory(_) => { false } } }, SyncEntry::File(file_info)=> { match other { SyncEntry::Origin(_) => { false }, SyncEntry::File(file_info2) => { file_info==file_info2 }, SyncEntry::Directory(_) => { false } } }, SyncEntry::Directory(dir_info)=> { match other { SyncEntry::Origin(_) => { false }, SyncEntry::File(_) => { false }, SyncEntry::Directory(dir_info2) => { dir_info==dir_info2 } } } } } } // fn vec_compare(va: &[f64], vb: &[f64]) -> bool { // (va.len() == vb.len()) && // zip stops at the shortest // va.iter() // .zip(vb) // .all(|(a,b)| eq_with_nan_eq(*a,*b)) // } //} fn vec_equals(v1:&Vec<SyncEntry>, v2:&Vec<SyncEntry>)->bool { v1.len()==v2.len() && v1.iter().zip(v2).all(|(a, b)| a==b) } pub fn get_path(origin:&SyncEntry, to_item:&SyncEntry)->Option<String> { if let SyncEntry::Origin(origin_info) = origin { for child in &origin_info.children { let response = get_path_recursive(&to_item, &child, &origin_info.path); if let Option::Some(_) = response { return response; } } } return Option::None; } fn get_path_recursive(to_item:&SyncEntry, current_item:&SyncEntry, parent_path:&String)->Option<String> { match current_item { SyncEntry::Origin(_)=> { //not possible return Option::None; }, SyncEntry::File(file_info) => { if to_item==current_item { return Option::Some(path_combine(&parent_path,&file_info.name)); } return Option::None; }, SyncEntry::Directory(dir_info) => { let new_parent_path:String = path_combine(&parent_path,&dir_info.name); if to_item==current_item { return Option::Some(new_parent_path); } for child in &dir_info.children { let response = get_path_recursive(&to_item, &child, &new_parent_path); if let Option::Some(_) = response { return response; } } return Option::None; } } } fn path_combine(path_one:&String, path_two:&String)->String { let mut p1=path_one.to_string(); let mut p2=path_two.to_string(); if !p1.ends_with("/") { p1.push('/'); } while p2.starts_with("/") { p2=p2.chars().skip(1).take(p2.len()-1).collect(); } return format!("{}{}", p1,p2) } #[cfg(test)] mod test { use crate::my_sync_data::{get_path, SyncEntry, OriginInfo, DirInfo}; #[test] fn test_get_path() { let child3=SyncEntry::Directory( DirInfo { name: String::from("childthree"), children: vec![] }); let origin=SyncEntry::Origin(OriginInfo{ path:String::from("c:/this/is/a/test"), children: vec![SyncEntry::Directory( DirInfo { name: String::from("childone"), children: vec![SyncEntry::Directory( DirInfo { name: String::from("childtwo"), children: vec![child3] })] })] }); if let SyncEntry::Origin(origin_info) = &origin { if let Option::Some(SyncEntry::Directory(dir_info1))= &origin_info.children.first() { if let Option::Some(SyncEntry::Directory(dir_info2))= &dir_info1.children.first() { if let Option::Some(sync_entry)=dir_info2.children.first() { let path_op=get_path(&origin, &sync_entry); if let Option::Some(path) = path_op { println!("path={}", path); assert_eq!(path, String::from("c:/this/is/a/test/childone/childtwo/childthree")); return; } } } } } assert!(false); } }
28.796209
109
0.472844
b9bdac29b178a7f9b5c0b787eff9a6d2eac1de03
1,059
use engine_shared::{stored_value::StoredValue, transform::Transform}; use engine_test_support::{ internal::{ExecuteRequestBuilder, InMemoryWasmTestBuilder, DEFAULT_RUN_GENESIS_REQUEST}, DEFAULT_ACCOUNT_ADDR, }; const CONTRACT_EE_584_REGRESSION: &str = "ee_584_regression.wasm"; #[ignore] #[test] fn should_run_ee_584_no_errored_session_transforms() { let exec_request = ExecuteRequestBuilder::standard(DEFAULT_ACCOUNT_ADDR, CONTRACT_EE_584_REGRESSION, ()) .build(); let mut builder = InMemoryWasmTestBuilder::default(); builder .run_genesis(&DEFAULT_RUN_GENESIS_REQUEST) .exec(exec_request); assert!(builder.is_error()); let transforms = builder.get_transforms(); assert!(transforms[0] .iter() .find( |(_, t)| if let Transform::Write(StoredValue::CLValue(cl_value)) = t { cl_value.to_owned().into_t::<String>().unwrap_or_default() == "Hello, World!" } else { false } ) .is_none()); }
28.621622
93
0.651558
2f8b32f88e1e8dc5e05cdc91867b074764b42325
5,637
//! An asynchronous `Mutex`-like type. //! //! This module provides [`Lock`], a type that acts similarly to an asynchronous `Mutex`, with one //! major difference: the [`LockGuard`] returned by `poll_lock` is not tied to the lifetime of the //! `Mutex`. This enables you to acquire a lock, and then pass that guard into a future, and then //! release it at some later point in time. //! //! This allows you to do something along the lines of: //! //! ```rust,no_run //! # #[macro_use] //! # extern crate futures; //! # extern crate tokio; //! # use futures::{future, Poll, Async, Future, Stream}; //! use tokio::sync::lock::{Lock, LockGuard}; //! struct MyType<S> { //! lock: Lock<S>, //! } //! //! impl<S> Future for MyType<S> //! where S: Stream<Item = u32> + Send + 'static //! { //! type Item = (); //! type Error = (); //! //! fn poll(&mut self) -> Poll<Self::Item, Self::Error> { //! match self.lock.poll_lock() { //! Async::Ready(mut guard) => { //! tokio::spawn(future::poll_fn(move || { //! let item = try_ready!(guard.poll().map_err(|_| ())); //! println!("item = {:?}", item); //! Ok(().into()) //! })); //! Ok(().into()) //! }, //! Async::NotReady => Ok(Async::NotReady) //! } //! } //! } //! # fn main() {} //! ``` //! //! [`Lock`]: struct.Lock.html //! [`LockGuard`]: struct.LockGuard.html use futures::Async; use semaphore; use std::cell::UnsafeCell; use std::fmt; use std::ops::{Deref, DerefMut}; use std::sync::Arc; /// An asynchronous mutual exclusion primitive useful for protecting shared data /// /// Each mutex has a type parameter (`T`) which represents the data that it is protecting. The data /// can only be accessed through the RAII guards returned from `poll_lock`, which guarantees that /// the data is only ever accessed when the mutex is locked. #[derive(Debug)] pub struct Lock<T> { inner: Arc<State<T>>, permit: semaphore::Permit, } /// A handle to a held `Lock`. /// /// As long as you have this guard, you have exclusive access to the underlying `T`. The guard /// internally keeps a reference-couned pointer to the original `Lock`, so even if the lock goes /// away, the guard remains valid. /// /// The lock is automatically released whenever the guard is dropped, at which point `poll_lock` /// will succeed yet again. #[derive(Debug)] pub struct LockGuard<T>(Lock<T>); // As long as T: Send, it's fine to send and share Lock<T> between threads. // If T was not Send, sending and sharing a Lock<T> would be bad, since you can access T through // Lock<T>. unsafe impl<T> Send for Lock<T> where T: Send {} unsafe impl<T> Sync for Lock<T> where T: Send {} unsafe impl<T> Sync for LockGuard<T> where T: Send + Sync {} #[derive(Debug)] struct State<T> { c: UnsafeCell<T>, s: semaphore::Semaphore, } #[test] fn bounds() { fn check<T: Send>() {} check::<LockGuard<u32>>(); } impl<T> Lock<T> { /// Creates a new lock in an unlocked state ready for use. pub fn new(t: T) -> Self { Self { inner: Arc::new(State { c: UnsafeCell::new(t), s: semaphore::Semaphore::new(1), }), permit: semaphore::Permit::new(), } } /// Try to acquire the lock. /// /// If the lock is already held, the current task is notified when it is released. pub fn poll_lock(&mut self) -> Async<LockGuard<T>> { if let Async::NotReady = self.permit.poll_acquire(&self.inner.s).unwrap_or_else(|_| { // The semaphore was closed. but, we never explicitly close it, and we have a // handle to it through the Arc, which means that this can never happen. unreachable!() }) { return Async::NotReady; } // We want to move the acquired permit into the guard, // and leave an unacquired one in self. let acquired = Self { inner: self.inner.clone(), permit: ::std::mem::replace(&mut self.permit, semaphore::Permit::new()), }; Async::Ready(LockGuard(acquired)) } } impl<T> Drop for LockGuard<T> { fn drop(&mut self) { if self.0.permit.is_acquired() { self.0.permit.release(&self.0.inner.s); } else if ::std::thread::panicking() { // A guard _should_ always hold its permit, but if the thread is already panicking, // we don't want to generate a panic-while-panicing, since that's just unhelpful! } else { unreachable!("Permit not held when LockGuard was dropped") } } } impl<T> From<T> for Lock<T> { fn from(s: T) -> Self { Self::new(s) } } impl<T> Clone for Lock<T> { fn clone(&self) -> Self { Self { inner: self.inner.clone(), permit: semaphore::Permit::new(), } } } impl<T> Default for Lock<T> where T: Default, { fn default() -> Self { Self::new(T::default()) } } impl<T> Deref for LockGuard<T> { type Target = T; fn deref(&self) -> &Self::Target { assert!(self.0.permit.is_acquired()); unsafe { &*self.0.inner.c.get() } } } impl<T> DerefMut for LockGuard<T> { fn deref_mut(&mut self) -> &mut Self::Target { assert!(self.0.permit.is_acquired()); unsafe { &mut *self.0.inner.c.get() } } } impl<T: fmt::Display> fmt::Display for LockGuard<T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Display::fmt(&**self, f) } }
30.47027
99
0.576725
4a03f94162916a615f22946b0c7fe6ec658c7c4f
768
#![no_std] #![no_main] extern crate panic_halt; use arduino_uno::prelude::*; #[no_mangle] pub extern fn main() -> ! { let dp = arduino_uno::Peripherals::take().unwrap(); let mut delay = arduino_uno::Delay::new(); let mut pins = arduino_uno::Pins::new( dp.PORTB, dp.PORTC, dp.PORTD, ); // Digital pin 13 is also connected to an onboard LED marked "L" let mut led = pins.d13.into_output(&mut pins.ddr); led.set_high().void_unwrap(); loop { led.toggle().void_unwrap(); delay.delay_ms(200); led.toggle().void_unwrap(); delay.delay_ms(200); led.toggle().void_unwrap(); delay.delay_ms(200); led.toggle().void_unwrap(); delay.delay_ms(800); } }
22.588235
68
0.585938
ac98f1c295b9c77db1fb3234da9de496a027cc87
4,723
use crate::format::problem::*; use crate::format::solution::*; use crate::format_time; use crate::helpers::*; parameterized_test! {can_use_vehicle_with_two_tours_and_two_jobs, (jobs, unassigned), { can_use_vehicle_with_two_tours_and_two_jobs_impl(jobs, unassigned); }} can_use_vehicle_with_two_tours_and_two_jobs! { case01: (vec![ create_delivery_job("job1", vec![1., 0.]), create_delivery_job("job2", vec![2., 0.])], None), case02: (vec![ create_delivery_job("job1", vec![1., 0.]), create_delivery_job("job2", vec![2., 0.]), create_delivery_job("job3", vec![3., 0.]) ], Some(vec![ UnassignedJob { job_id: "job3".to_string(), reasons: vec![UnassignedJobReason { code: "CAPACITY_CONSTRAINT".to_string(), description: "does not fit into any vehicle due to capacity".to_string() }] } ])), } fn can_use_vehicle_with_two_tours_and_two_jobs_impl(jobs: Vec<Job>, unassigned: Option<Vec<UnassignedJob>>) { let problem = Problem { plan: Plan { jobs, relations: Option::None }, fleet: Fleet { vehicles: vec![VehicleType { shifts: vec![VehicleShift { start: ShiftStart { earliest: format_time(0.), latest: None, location: vec![0., 0.].to_loc() }, end: Some(ShiftEnd { earliest: None, latest: format_time(100.).to_string(), location: vec![0., 0.].to_loc(), }), dispatch: None, breaks: None, reloads: Some(vec![VehicleReload { times: None, location: vec![0., 0.].to_loc(), duration: 2.0, tag: None, }]), }], capacity: vec![1], ..create_default_vehicle_type() }], profiles: create_default_matrix_profiles(), }, ..create_empty_problem() }; let matrix = create_matrix_from_problem(&problem); let solution = solve_with_metaheuristic(problem, Some(vec![matrix])); assert_eq!( solution, Solution { statistic: Statistic { cost: 26., distance: 6, duration: 10, times: Timing { driving: 6, serving: 4, waiting: 0, break_time: 0 }, }, tours: vec![Tour { vehicle_id: "my_vehicle_1".to_string(), type_id: "my_vehicle".to_string(), shift_index: 0, stops: vec![ create_stop_with_activity( "departure", "departure", (0., 0.), 1, ("1970-01-01T00:00:00Z", "1970-01-01T00:00:00Z"), 0 ), create_stop_with_activity( "job1", "delivery", (1., 0.), 0, ("1970-01-01T00:00:01Z", "1970-01-01T00:00:02Z"), 1 ), create_stop_with_activity( "reload", "reload", (0., 0.), 1, ("1970-01-01T00:00:03Z", "1970-01-01T00:00:05Z"), 2 ), create_stop_with_activity( "job2", "delivery", (2., 0.), 0, ("1970-01-01T00:00:07Z", "1970-01-01T00:00:08Z"), 4 ), create_stop_with_activity( "arrival", "arrival", (0., 0.), 0, ("1970-01-01T00:00:10Z", "1970-01-01T00:00:10Z"), 6 ), ], statistic: Statistic { cost: 26., distance: 6, duration: 10, times: Timing { driving: 6, serving: 4, waiting: 0, break_time: 0 }, }, }], unassigned, ..create_empty_solution() } ); }
36.330769
115
0.400593
e502ef73da27a8fb985a0c5e840fe63e09019bd9
2,594
/* Copyright 2016 Martin Buck Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ //! IsNormalized3D trait used for types which are positioned within the 3D space and normalized use crate::*; //------------------------------------------------------------------------------ /// IsNormalized3D is a trait used for types which are positioned within the 3D space and normalized pub trait IsNormalized3D: Sized + Is3D { /// Should construct a new object and only fail if it can't be normalized fn new<P>(p: P) -> Result<Self> where P: Is3D; /// Returns a new normalized object which only points in the positive x-Direction fn norm_x() -> Self { Self::new(Point3D::new(1.0, 0.0, 0.0)).unwrap() } /// Returns a new normalized object which only points in the positive y-Direction fn norm_y() -> Self { Self::new(Point3D::new(0.0, 1.0, 0.0)).unwrap() } /// Returns a new normalized object which only points in the positive z-Direction fn norm_z() -> Self { Self::new(Point3D::new(0.0, 0.0, 1.0)).unwrap() } /// Returns a new normalized object which only points in the negative x-Direction fn norm_x_neg() -> Self { Self::new(Point3D::new(-1.0, 0.0, 0.0)).unwrap() } /// Returns a new normalized object which only points in the negative y-Direction fn norm_y_neg() -> Self { Self::new(Point3D::new(0.0, -1.0, 0.0)).unwrap() } /// Returns a new normalized object which only points in the negative z-Direction fn norm_z_neg() -> Self { Self::new(Point3D::new(0.0, 0.0, -1.0)).unwrap() } }
42.52459
100
0.685042
0ee135e6f5d391b3723012b91938ebc496465f47
19,047
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::{ function_data_builder::{FunctionDataBuilder, FunctionDataBuilderOptions}, function_target::{FunctionData, FunctionTarget}, function_target_pipeline::{FunctionTargetProcessor, FunctionTargetsHolder}, graph::{Graph, NaturalLoop}, options::ProverOptions, stackless_bytecode::{AttrId, Bytecode, HavocKind, Label, Operation, PropKind}, stackless_control_flow_graph::{BlockContent, BlockId, StacklessControlFlowGraph}, }; use move_binary_format::file_format::CodeOffset; use move_model::{ ast::{self, TempIndex}, exp_generator::ExpGenerator, model::FunctionEnv, }; use std::collections::{BTreeMap, BTreeSet}; const LOOP_INVARIANT_BASE_FAILED: &str = "base case of the loop invariant does not hold"; const LOOP_INVARIANT_INDUCTION_FAILED: &str = "induction case of the loop invariant does not hold"; /// A fat-loop captures the information of one or more natural loops that share the same loop /// header. This shared header is called the header of the fat-loop. /// /// Conceptually, every back edge defines a unique natural loop and different back edges may points /// to the same loop header (e.g., when there are two "continue" statements in the loop body). /// /// However, since these natural loops share the same loop header, they share the same loop /// invariants too and the fat-loop targets (i.e., variables that may be changed in any sub-loop) /// is the union of loop targets per each natural loop that share the header. #[derive(Debug, Clone)] pub struct FatLoop { pub invariants: BTreeMap<CodeOffset, (AttrId, ast::Exp)>, pub val_targets: BTreeSet<TempIndex>, pub mut_targets: BTreeMap<TempIndex, bool>, pub back_edges: BTreeSet<CodeOffset>, } #[derive(Debug, Clone)] pub struct LoopAnnotation { pub fat_loops: BTreeMap<Label, FatLoop>, } impl LoopAnnotation { fn back_edges_locations(&self) -> BTreeSet<CodeOffset> { self.fat_loops .values() .map(|l| l.back_edges.iter()) .flatten() .copied() .collect() } fn invariants_locations(&self) -> BTreeSet<CodeOffset> { self.fat_loops .values() .map(|l| l.invariants.keys()) .flatten() .copied() .collect() } } pub struct LoopAnalysisProcessor {} impl LoopAnalysisProcessor { pub fn new() -> Box<Self> { Box::new(LoopAnalysisProcessor {}) } } impl FunctionTargetProcessor for LoopAnalysisProcessor { fn process( &self, _targets: &mut FunctionTargetsHolder, func_env: &FunctionEnv<'_>, data: FunctionData, ) -> FunctionData { if func_env.is_native() { return data; } let loop_annotation = Self::build_loop_annotation(func_env, &data); Self::transform(func_env, data, &loop_annotation) } fn name(&self) -> String { "loop_analysis".to_string() } } impl LoopAnalysisProcessor { /// Perform a loop transformation that eliminate back-edges in a loop and flatten the function /// CFG into a directed acyclic graph (DAG). /// /// The general procedure works as following (assuming the loop invariant expression is L): /// /// - At the beginning of the loop header (identified by the label bytecode), insert the /// following statements: /// - assert L; /// - havoc T; /// - assume L; /// - Create a new dummy block (say, block X) with only the following statements /// - assert L; /// - stop; /// - For each backedge in this loop: /// - In the source block of the back edge, replace the last statement (must be a jump or /// branch) with the new label of X. fn transform( func_env: &FunctionEnv<'_>, data: FunctionData, loop_annotation: &LoopAnnotation, ) -> FunctionData { let options = ProverOptions::get(func_env.module_env.env); let back_edge_locs = loop_annotation.back_edges_locations(); let invariant_locs = loop_annotation.invariants_locations(); let mut builder = FunctionDataBuilder::new_with_options( func_env, data, FunctionDataBuilderOptions { no_fallthrough_jump_removal: true, }, ); let mut goto_fixes = vec![]; let code = std::mem::take(&mut builder.data.code); for (offset, bytecode) in code.into_iter().enumerate() { match bytecode { Bytecode::Label(attr_id, label) => { builder.emit(bytecode); builder.set_loc_from_attr(attr_id); if let Some(loop_info) = loop_annotation.fat_loops.get(&label) { // assert loop invariants -> this is the base case for (attr_id, exp) in loop_info.invariants.values() { builder.set_loc_and_vc_info( builder.get_loc(*attr_id), LOOP_INVARIANT_BASE_FAILED, ); builder.emit_with(|attr_id| { Bytecode::Prop(attr_id, PropKind::Assert, exp.clone()) }); } // havoc all loop targets for idx in &loop_info.val_targets { builder.emit_with(|attr_id| { Bytecode::Call( attr_id, vec![], Operation::Havoc(HavocKind::Value), vec![*idx], None, ) }); } for (idx, havoc_all) in &loop_info.mut_targets { let havoc_kind = if *havoc_all { HavocKind::MutationAll } else { HavocKind::MutationValue }; builder.emit_with(|attr_id| { Bytecode::Call( attr_id, vec![], Operation::Havoc(havoc_kind), vec![*idx], None, ) }); } // add an additional assumption that the loop did not abort let exp = builder.mk_not(builder.mk_bool_call(ast::Operation::AbortFlag, vec![])); builder.emit_with(|attr_id| Bytecode::Prop(attr_id, PropKind::Assume, exp)); // re-assume loop invariants for (attr_id, exp) in loop_info.invariants.values() { builder.emit(Bytecode::Prop(*attr_id, PropKind::Assume, exp.clone())); } } } Bytecode::Prop(_, PropKind::Assert, _) if invariant_locs.contains(&(offset as CodeOffset)) => { // skip it, as the invariant should have been added as an assert after the label } _ => { builder.emit(bytecode); } } // mark that the goto labels in this bytecode needs to be updated to a new label // representing the invariant-checking block for the loop. if back_edge_locs.contains(&(offset as CodeOffset)) { goto_fixes.push(builder.data.code.len() - 1); } } // create one invariant-checking block for each fat loop let invariant_checker_labels: BTreeMap<_, _> = loop_annotation .fat_loops .keys() .map(|label| (*label, builder.new_label())) .collect(); for (label, loop_info) in &loop_annotation.fat_loops { let checker_label = invariant_checker_labels.get(label).unwrap(); builder.set_next_debug_comment(format!( "Loop invariant checking block for the loop started with header: L{}", label.as_usize() )); builder.emit_with(|attr_id| Bytecode::Label(attr_id, *checker_label)); builder.clear_next_debug_comment(); // add instrumentations to assert loop invariants -> this is the induction case for (attr_id, exp) in loop_info.invariants.values() { builder.set_loc_and_vc_info( builder.get_loc(*attr_id), LOOP_INVARIANT_INDUCTION_FAILED, ); builder.emit_with(|attr_id| Bytecode::Prop(attr_id, PropKind::Assert, exp.clone())); } // stop the checking in proving mode (branch back to loop header for interpretation mode) builder.emit_with(|attr_id| { if options.for_interpretation { Bytecode::Jump(attr_id, *label) } else { Bytecode::Call(attr_id, vec![], Operation::Stop, vec![], None) } }); } // fix the goto statements in the loop latch blocks for code_offset in goto_fixes { let updated_goto = match &builder.data.code[code_offset] { Bytecode::Jump(attr_id, old_label) => { Bytecode::Jump(*attr_id, *invariant_checker_labels.get(old_label).unwrap()) } Bytecode::Branch(attr_id, if_label, else_label, idx) => { let new_if_label = *invariant_checker_labels.get(if_label).unwrap_or(if_label); let new_else_label = *invariant_checker_labels .get(else_label) .unwrap_or(else_label); Bytecode::Branch(*attr_id, new_if_label, new_else_label, *idx) } _ => panic!("Expect a branch statement"), }; builder.data.code[code_offset] = updated_goto; } // we have unrolled the loop into a DAG, and there will be no loop invariants left builder.data.loop_invariants.clear(); builder.data } /// Collect invariants in the given loop header block /// /// Loop invariants are defined as /// 1) the longest sequence of consecutive /// 2) `PropKind::Assert` propositions /// 3) in the loop header block, immediately after the `Label` statement, /// 4) which are also marked in the `loop_invariants` field in the `FunctionData`. /// All above conditions must be met to be qualified as a loop invariant. /// /// The reason we piggyback on `PropKind::Assert` instead of introducing a new /// `PropKind::Invariant` is that we don't want to introduce a`PropKind::Invariant` type which /// only exists to be eliminated. The same logic applies for other invariants in the system /// (e.g., data invariants, global invariants, etc). /// /// In other words, for the loop header block: /// - the first statement must be a `label`, /// - followed by N `assert` statements, N >= 0 /// - all these N `assert` statements are marked as loop invariants, /// - statement N + 1 is either not an `assert` or is not marked in `loop_invariants`. fn collect_loop_invariants( cfg: &StacklessControlFlowGraph, func_target: &FunctionTarget<'_>, loop_header: BlockId, ) -> BTreeMap<CodeOffset, (AttrId, ast::Exp)> { let code = func_target.get_bytecode(); let asserts_as_invariants = &func_target.data.loop_invariants; let mut invariants = BTreeMap::new(); for (index, code_offset) in cfg.instr_indexes(loop_header).unwrap().enumerate() { let bytecode = &code[code_offset as usize]; if index == 0 { assert!(matches!(bytecode, Bytecode::Label(_, _))); } else { match bytecode { Bytecode::Prop(attr_id, PropKind::Assert, exp) if asserts_as_invariants.contains(attr_id) => { invariants.insert(code_offset, (*attr_id, exp.clone())); } _ => break, } } } invariants } /// Collect variables that may be changed during the loop execution. /// /// The input to this function should include all the sub loops that constitute a fat-loop. /// This function will return two sets of variables that represents, respectively, /// - the set of values to be havoc-ed, and /// - the set of mutations to he havoc-ed and how they should be havoc-ed. fn collect_loop_targets( cfg: &StacklessControlFlowGraph, func_target: &FunctionTarget<'_>, sub_loops: &[NaturalLoop<BlockId>], ) -> (BTreeSet<TempIndex>, BTreeMap<TempIndex, bool>) { let code = func_target.get_bytecode(); let mut val_targets = BTreeSet::new(); let mut mut_targets = BTreeMap::new(); let fat_loop_body: BTreeSet<_> = sub_loops .iter() .map(|l| l.loop_body.iter()) .flatten() .copied() .collect(); for block_id in fat_loop_body { for code_offset in cfg .instr_indexes(block_id) .expect("A loop body should never contain a dummy block") { let bytecode = &code[code_offset as usize]; let (bc_val_targets, bc_mut_targets) = bytecode.modifies(func_target); val_targets.extend(bc_val_targets); for (idx, is_full_havoc) in bc_mut_targets { mut_targets .entry(idx) .and_modify(|v| { *v = *v || is_full_havoc; }) .or_insert(is_full_havoc); } } } (val_targets, mut_targets) } /// Collect code offsets that are branch instructions forming loop back-edges /// /// The input to this function should include all the sub loops that constitute a fat-loop. /// This function will return one back-edge location for each sub loop. fn collect_loop_back_edges( code: &[Bytecode], cfg: &StacklessControlFlowGraph, header_label: Label, sub_loops: &[NaturalLoop<BlockId>], ) -> BTreeSet<CodeOffset> { sub_loops .iter() .map(|l| { let code_offset = match cfg.content(l.loop_latch) { BlockContent::Dummy => { panic!("A loop body should never contain a dummy block") } BlockContent::Basic { upper, .. } => *upper, }; match &code[code_offset as usize] { Bytecode::Jump(_, goto_label) if *goto_label == header_label => {} Bytecode::Branch(_, if_label, else_label, _) if *if_label == header_label || *else_label == header_label => {} _ => panic!("The latch bytecode of a loop does not branch into the header"), }; code_offset }) .collect() } /// Find all loops in the function and collect information needed for invariant instrumentation /// and loop-to-DAG transformation. fn build_loop_annotation(func_env: &FunctionEnv<'_>, data: &FunctionData) -> LoopAnnotation { // build for natural loops let func_target = FunctionTarget::new(func_env, data); let code = func_target.get_bytecode(); let cfg = StacklessControlFlowGraph::new_forward(code); let entry = cfg.entry_block(); let nodes = cfg.blocks(); let edges: Vec<(BlockId, BlockId)> = nodes .iter() .map(|x| { cfg.successors(*x) .iter() .map(|y| (*x, *y)) .collect::<Vec<(BlockId, BlockId)>>() }) .flatten() .collect(); let graph = Graph::new(entry, nodes, edges); let natural_loops = graph.compute_reducible().expect( "A well-formed Move function is expected to have a reducible control-flow graph", ); // collect shared headers from loops let mut fat_headers = BTreeMap::new(); for single_loop in natural_loops { fat_headers .entry(single_loop.loop_header) .or_insert_with(Vec::new) .push(single_loop); } // build fat loops by label let mut fat_loops = BTreeMap::new(); for (fat_root, sub_loops) in fat_headers { // get the label of the scc root let label = match cfg.content(fat_root) { BlockContent::Dummy => panic!("A loop header should never be a dummy block"), BlockContent::Basic { lower, upper: _ } => match code[*lower as usize] { Bytecode::Label(_, label) => label, _ => panic!("A loop header block is expected to start with a Label bytecode"), }, }; let invariants = Self::collect_loop_invariants(&cfg, &func_target, fat_root); let (val_targets, mut_targets) = Self::collect_loop_targets(&cfg, &func_target, &sub_loops); let back_edges = Self::collect_loop_back_edges(code, &cfg, label, &sub_loops); // done with all information collection fat_loops.insert( label, FatLoop { invariants, val_targets, mut_targets, back_edges, }, ); } // check for redundant loop invariant declarations in the spec let all_invariants: BTreeSet<_> = fat_loops .values() .map(|l| l.invariants.values().map(|(attr_id, _)| *attr_id)) .flatten() .collect(); let env = func_target.global_env(); for attr_id in data.loop_invariants.difference(&all_invariants) { env.error( &func_target.get_bytecode_loc(*attr_id), "Loop invariants must be declared at the beginning of the loop header in a \ consecutive sequence", ); } LoopAnnotation { fat_loops } } }
41.678337
101
0.545125
e5345ef125189df73e0ebd95c04e256bda8234f5
8,066
use codec::Encode; use parking_lot::RwLock; use sp_state_machine::StorageKey; use sp_storage::ChildInfo; use std::{collections::HashMap, sync::Arc, time::Instant}; #[derive(PartialEq, Eq)] enum AccessType { None, Redundant, Important, Whitelisted, } impl Default for AccessType { fn default() -> Self { AccessType::None } } impl AccessType { fn is_important(&self) -> bool { *self == AccessType::Important } fn mark_important(&mut self) { if *self != AccessType::Whitelisted { *self = AccessType::Important; } } } #[derive(Default)] struct AccessInfo { pub read: AccessType, pub written: AccessType, } impl AccessInfo { fn read(redundant: bool) -> Self { let read = if redundant { AccessType::Redundant } else { AccessType::Important }; Self { read, written: AccessType::None, } } fn written(redundant: bool) -> Self { let written = if redundant { AccessType::Redundant } else { AccessType::Important }; Self { read: AccessType::Redundant, written, } } fn whitelisted(read: bool, write: bool) -> Self { Self { read: if read { AccessType::Whitelisted } else { AccessType::None }, written: if write { AccessType::Whitelisted } else { AccessType::None }, } } } #[derive(Default, Debug)] struct AccessReport { pub read: u32, pub written: u32, } pub struct BenchTracker { instant: RwLock<Instant>, depth: RwLock<u32>, redundant: RwLock<Instant>, results: RwLock<Vec<u128>>, main_keys: RwLock<HashMap<StorageKey, AccessInfo>>, child_keys: RwLock<HashMap<StorageKey, HashMap<StorageKey, AccessInfo>>>, warn_child_prefix_remove: RwLock<bool>, whitelisted_keys: RwLock<HashMap<StorageKey, (bool, bool)>>, } impl BenchTracker { pub fn new() -> Self { BenchTracker { instant: RwLock::new(Instant::now()), depth: RwLock::new(0), redundant: RwLock::new(Instant::now()), results: RwLock::new(Vec::new()), main_keys: RwLock::new(HashMap::new()), child_keys: RwLock::new(HashMap::new()), warn_child_prefix_remove: RwLock::new(false), whitelisted_keys: RwLock::new(HashMap::new()), } } pub fn has_warn_child_prefix_removal(&self) -> bool { *self.warn_child_prefix_remove.read() } pub fn instant(&self) { *self.instant.write() = Instant::now(); } pub fn elapsed(&self) -> u128 { self.instant.read().elapsed().as_nanos() } pub fn is_redundant(&self) -> bool { *self.depth.read() > 1 } pub fn reading_key(&self, key: StorageKey) { let redundant = self.is_redundant(); let main_keys = &mut *self.main_keys.write(); match main_keys.get_mut(&key) { Some(info) => { if redundant { return; } if info.written.is_important() { return; } info.read.mark_important(); } None => { main_keys.insert(key, AccessInfo::read(redundant)); } }; } pub fn reading_child_key(&self, child_info: &ChildInfo, key: StorageKey) { let redundant = self.is_redundant(); let child_keys = &mut *self.child_keys.write(); let storage_key = child_info.storage_key().to_vec(); match child_keys.get_mut(&storage_key) { Some(reads) => { match reads.get_mut(&key) { Some(info) => { if redundant { return; } if info.written.is_important() { return; } info.read.mark_important(); } None => { reads.insert(key, AccessInfo::read(redundant)); } }; } None => { let mut reads = HashMap::<StorageKey, AccessInfo>::new(); reads.insert(key, AccessInfo::read(redundant)); child_keys.insert(storage_key, reads); } }; } pub fn changing_key(&self, key: StorageKey) { let redundant = self.is_redundant(); let main_keys = &mut *self.main_keys.write(); match main_keys.get_mut(&key) { Some(info) => { if redundant { return; } info.written.mark_important(); } None => { main_keys.insert(key, AccessInfo::written(redundant)); } }; } pub fn changing_child_key(&self, child_info: &ChildInfo, key: StorageKey) { let redundant = self.is_redundant(); let child_keys = &mut *self.child_keys.write(); let storage_key = child_info.storage_key().to_vec(); match child_keys.get_mut(&storage_key) { Some(changes) => { match changes.get_mut(&key) { Some(info) => { if redundant { return; } info.written.mark_important(); } None => { changes.insert(key, AccessInfo::written(redundant)); } }; } None => { let mut changes = HashMap::<StorageKey, AccessInfo>::new(); changes.insert(key, AccessInfo::written(redundant)); child_keys.insert(storage_key, changes); } }; } pub fn read_written_keys(&self) -> Vec<u8> { let mut summary = HashMap::<StorageKey, AccessReport>::new(); self.main_keys.read().iter().for_each(|(key, info)| { let prefix_end = core::cmp::min(32, key.len()); let prefix = key[0..prefix_end].to_vec(); if let Some(report) = summary.get_mut(&prefix) { if info.read.is_important() { report.read += 1; } if info.written.is_important() { report.written += 1; } } else { let mut report = AccessReport::default(); if info.read.is_important() { report.read += 1; } if info.written.is_important() { report.written += 1; } if report.read + report.written > 0 { summary.insert(prefix, report); } } }); self.child_keys.read().iter().for_each(|(prefix, keys)| { keys.iter().for_each(|(key, info)| { let prefix_end = core::cmp::min(32, prefix.len() + key.len()); let prefix = [prefix.clone(), key.clone()].concat()[0..prefix_end].to_vec(); if let Some(report) = summary.get_mut(&prefix) { if info.read.is_important() { report.read += 1; } if info.written.is_important() { report.written += 1; } } else { let mut report = AccessReport::default(); if info.read.is_important() { report.read += 1; } if info.written.is_important() { report.written += 1; } if report.read + report.written > 0 { summary.insert(prefix, report); } } }); }); summary .into_iter() .map(|(prefix, report)| (prefix, report.read, report.written)) .collect::<Vec<(StorageKey, u32, u32)>>() .encode() } pub fn before_block(&self) { let timestamp = Instant::now(); let mut depth = self.depth.write(); if *depth == 0 { *depth = 1; return; } if *depth == 1 { *self.redundant.write() = timestamp; } *depth += 1; } pub fn after_block(&self) { let mut depth = self.depth.write(); if *depth == 2 { let redundant = self.redundant.read(); let elapsed = redundant.elapsed().as_nanos(); self.results.write().push(elapsed); } *depth -= 1; } pub fn warn_child_prefix_removal(&self) { *self.warn_child_prefix_remove.write() = true; } pub fn redundant_time(&self) -> u128 { assert_eq!(*self.depth.read(), 0, "benchmark in progress"); let mut elapsed = 0u128; self.results.read().iter().for_each(|x| { elapsed = elapsed.saturating_add(*x); }); elapsed } pub fn prepare(&self) { *self.depth.write() = 0; self.results.write().clear(); self.child_keys.write().clear(); *self.warn_child_prefix_remove.write() = false; let main_keys = &mut self.main_keys.write(); main_keys.clear(); let keys = self.whitelisted_keys.read(); for (key, (read, write)) in keys.iter() { main_keys.insert(key.clone(), AccessInfo::whitelisted(*read, *write)); } } pub fn whitelist(&self, key: Vec<u8>, read: bool, write: bool) { let whitelisted = &mut self.whitelisted_keys.write(); whitelisted.insert(key, (read, write)); } pub fn reset(&self) { *self.depth.write() = 0; *self.redundant.write() = Instant::now(); self.results.write().clear(); self.main_keys.write().clear(); self.child_keys.write().clear(); *self.warn_child_prefix_remove.write() = false; self.whitelisted_keys.write().clear(); } } sp_externalities::decl_extension! { pub struct BenchTrackerExt(Arc<BenchTracker>); }
22.785311
80
0.631168
505994f66e7e553c339ccfc49e7a59b7b3064b01
693
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // aux-build:call-site.rs // ignore-stage1 #![feature(proc_macro_non_items, use_extern_macros)] extern crate call_site; use call_site::*; fn main() { let x1 = 10; call_site::check!(let x2 = x1;); let x6 = x5; }
28.875
68
0.714286
75ba81a38a2b0bdde76e6b073c5e53a328d48766
2,431
#[cfg(test)] mod tests { use crate::helpers::CwTemplateContract; use crate::msg::InstantiateMsg; use cosmwasm_std::{Addr, Coin, Empty, Uint128}; use cw_multi_test::{App, AppBuilder, Contract, ContractWrapper, Executor}; pub fn contract_template() -> Box<dyn Contract<Empty>> { let contract = ContractWrapper::new( crate::contract::execute, crate::contract::instantiate, crate::contract::query, ); Box::new(contract) } const USER: &str = "USER"; const ADMIN: &str = "ADMIN"; const NATIVE_DENOM: &str = "denom"; fn mock_app() -> App { AppBuilder::new().build(|router, _, storage| { router .bank .init_balance( storage, &Addr::unchecked(USER), vec![Coin { denom: NATIVE_DENOM.to_string(), amount: Uint128::new(1), }], ) .unwrap(); }) } fn proper_instantiate() -> (App, CwTemplateContract) { let mut app = mock_app(); let cw_template_id = app.store_code(contract_template()); let msg = InstantiateMsg { blocks_per_year: 5048093, meta_url: "example.com".to_string(), denom: "ujuno".to_string(), cost_for_6: Some(1), cost_for_5: Some(2), cost_for_4: Some(4), cost_for_3: Some(8), cost_for_2: Some(16), cost_for_1: Some(32), }; let cw_template_contract_addr = app .instantiate_contract( cw_template_id, Addr::unchecked(ADMIN), &msg, &[], "test", None, ) .unwrap(); let cw_template_contract = CwTemplateContract(cw_template_contract_addr); (app, cw_template_contract) } mod blocks { use super::*; use crate::msg::ExecuteMsg; #[test] fn blocks() { let (mut app, cw_template_contract) = proper_instantiate(); let msg = ExecuteMsg::SetBlocksPerYear { blocks_per_year: 5048093 }; let cosmos_msg = cw_template_contract.call(msg).unwrap(); app.execute(Addr::unchecked(ADMIN), cosmos_msg).unwrap(); } } }
29.646341
81
0.508021
628d67ca959979710558d3e4a3a1c65664da600d
82,073
//! An implementation of the VP8 Video Codec //! //! This module contains a partial implementation of the //! VP8 video format as defined in RFC-6386. //! //! It decodes Keyframes only sans Loop Filtering. //! VP8 is the underpinning of the WebP image format //! //! # Related Links //! * [rfc-6386](http://tools.ietf.org/html/rfc6386) - The VP8 Data Format and Decoding Guide //! * [VP8.pdf](http://static.googleusercontent.com/media/research.google.com/en//pubs/archive/37073.pdf) - An overview of //! of the VP8 format //! use byteorder::{LittleEndian, ReadBytesExt}; use std::convert::TryInto; use std::default::Default; use std::{cmp, error, fmt}; use std::io::Read; use super::transform; use crate::error::{ DecodingError, ImageError, ImageResult, UnsupportedError, UnsupportedErrorKind, }; use crate::image::ImageFormat; use crate::utils::clamp; const MAX_SEGMENTS: usize = 4; const NUM_DCT_TOKENS: usize = 12; // Prediction modes const DC_PRED: i8 = 0; const V_PRED: i8 = 1; const H_PRED: i8 = 2; const TM_PRED: i8 = 3; const B_PRED: i8 = 4; const B_DC_PRED: i8 = 0; const B_TM_PRED: i8 = 1; const B_VE_PRED: i8 = 2; const B_HE_PRED: i8 = 3; const B_LD_PRED: i8 = 4; const B_RD_PRED: i8 = 5; const B_VR_PRED: i8 = 6; const B_VL_PRED: i8 = 7; const B_HD_PRED: i8 = 8; const B_HU_PRED: i8 = 9; // Prediction mode enum #[repr(i8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum LumaMode { /// Predict DC using row above and column to the left. DC = DC_PRED, /// Predict rows using row above. V = V_PRED, /// Predict columns using column to the left. H = H_PRED, /// Propagate second differences. TM = TM_PRED, /// Each Y subblock is independently predicted. B = B_PRED, } #[repr(i8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum ChromaMode { /// Predict DC using row above and column to the left. DC = DC_PRED, /// Predict rows using row above. V = V_PRED, /// Predict columns using column to the left. H = H_PRED, /// Propagate second differences. TM = TM_PRED, } #[repr(i8)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] enum IntraMode { DC = B_DC_PRED, TM = B_TM_PRED, VE = B_VE_PRED, HE = B_HE_PRED, LD = B_LD_PRED, RD = B_RD_PRED, VR = B_VR_PRED, VL = B_VL_PRED, HD = B_HD_PRED, HU = B_HU_PRED, } type Prob = u8; static SEGMENT_ID_TREE: [i8; 6] = [2, 4, -0, -1, -2, -3]; // Section 11.2 // Tree for determining the keyframe luma intra prediction modes: static KEYFRAME_YMODE_TREE: [i8; 8] = [-B_PRED, 2, 4, 6, -DC_PRED, -V_PRED, -H_PRED, -TM_PRED]; // Default probabilities for decoding the keyframe luma modes static KEYFRAME_YMODE_PROBS: [Prob; 4] = [145, 156, 163, 128]; // Tree for determining the keyframe B_PRED mode: static KEYFRAME_BPRED_MODE_TREE: [i8; 18] = [ -B_DC_PRED, 2, -B_TM_PRED, 4, -B_VE_PRED, 6, 8, 12, -B_HE_PRED, 10, -B_RD_PRED, -B_VR_PRED, -B_LD_PRED, 14, -B_VL_PRED, 16, -B_HD_PRED, -B_HU_PRED, ]; // Probabilities for the BPRED_MODE_TREE static KEYFRAME_BPRED_MODE_PROBS: [[[u8; 9]; 10]; 10] = [ [ [231, 120, 48, 89, 115, 113, 120, 152, 112], [152, 179, 64, 126, 170, 118, 46, 70, 95], [175, 69, 143, 80, 85, 82, 72, 155, 103], [56, 58, 10, 171, 218, 189, 17, 13, 152], [144, 71, 10, 38, 171, 213, 144, 34, 26], [114, 26, 17, 163, 44, 195, 21, 10, 173], [121, 24, 80, 195, 26, 62, 44, 64, 85], [170, 46, 55, 19, 136, 160, 33, 206, 71], [63, 20, 8, 114, 114, 208, 12, 9, 226], [81, 40, 11, 96, 182, 84, 29, 16, 36], ], [ [134, 183, 89, 137, 98, 101, 106, 165, 148], [72, 187, 100, 130, 157, 111, 32, 75, 80], [66, 102, 167, 99, 74, 62, 40, 234, 128], [41, 53, 9, 178, 241, 141, 26, 8, 107], [104, 79, 12, 27, 217, 255, 87, 17, 7], [74, 43, 26, 146, 73, 166, 49, 23, 157], [65, 38, 105, 160, 51, 52, 31, 115, 128], [87, 68, 71, 44, 114, 51, 15, 186, 23], [47, 41, 14, 110, 182, 183, 21, 17, 194], [66, 45, 25, 102, 197, 189, 23, 18, 22], ], [ [88, 88, 147, 150, 42, 46, 45, 196, 205], [43, 97, 183, 117, 85, 38, 35, 179, 61], [39, 53, 200, 87, 26, 21, 43, 232, 171], [56, 34, 51, 104, 114, 102, 29, 93, 77], [107, 54, 32, 26, 51, 1, 81, 43, 31], [39, 28, 85, 171, 58, 165, 90, 98, 64], [34, 22, 116, 206, 23, 34, 43, 166, 73], [68, 25, 106, 22, 64, 171, 36, 225, 114], [34, 19, 21, 102, 132, 188, 16, 76, 124], [62, 18, 78, 95, 85, 57, 50, 48, 51], ], [ [193, 101, 35, 159, 215, 111, 89, 46, 111], [60, 148, 31, 172, 219, 228, 21, 18, 111], [112, 113, 77, 85, 179, 255, 38, 120, 114], [40, 42, 1, 196, 245, 209, 10, 25, 109], [100, 80, 8, 43, 154, 1, 51, 26, 71], [88, 43, 29, 140, 166, 213, 37, 43, 154], [61, 63, 30, 155, 67, 45, 68, 1, 209], [142, 78, 78, 16, 255, 128, 34, 197, 171], [41, 40, 5, 102, 211, 183, 4, 1, 221], [51, 50, 17, 168, 209, 192, 23, 25, 82], ], [ [125, 98, 42, 88, 104, 85, 117, 175, 82], [95, 84, 53, 89, 128, 100, 113, 101, 45], [75, 79, 123, 47, 51, 128, 81, 171, 1], [57, 17, 5, 71, 102, 57, 53, 41, 49], [115, 21, 2, 10, 102, 255, 166, 23, 6], [38, 33, 13, 121, 57, 73, 26, 1, 85], [41, 10, 67, 138, 77, 110, 90, 47, 114], [101, 29, 16, 10, 85, 128, 101, 196, 26], [57, 18, 10, 102, 102, 213, 34, 20, 43], [117, 20, 15, 36, 163, 128, 68, 1, 26], ], [ [138, 31, 36, 171, 27, 166, 38, 44, 229], [67, 87, 58, 169, 82, 115, 26, 59, 179], [63, 59, 90, 180, 59, 166, 93, 73, 154], [40, 40, 21, 116, 143, 209, 34, 39, 175], [57, 46, 22, 24, 128, 1, 54, 17, 37], [47, 15, 16, 183, 34, 223, 49, 45, 183], [46, 17, 33, 183, 6, 98, 15, 32, 183], [65, 32, 73, 115, 28, 128, 23, 128, 205], [40, 3, 9, 115, 51, 192, 18, 6, 223], [87, 37, 9, 115, 59, 77, 64, 21, 47], ], [ [104, 55, 44, 218, 9, 54, 53, 130, 226], [64, 90, 70, 205, 40, 41, 23, 26, 57], [54, 57, 112, 184, 5, 41, 38, 166, 213], [30, 34, 26, 133, 152, 116, 10, 32, 134], [75, 32, 12, 51, 192, 255, 160, 43, 51], [39, 19, 53, 221, 26, 114, 32, 73, 255], [31, 9, 65, 234, 2, 15, 1, 118, 73], [88, 31, 35, 67, 102, 85, 55, 186, 85], [56, 21, 23, 111, 59, 205, 45, 37, 192], [55, 38, 70, 124, 73, 102, 1, 34, 98], ], [ [102, 61, 71, 37, 34, 53, 31, 243, 192], [69, 60, 71, 38, 73, 119, 28, 222, 37], [68, 45, 128, 34, 1, 47, 11, 245, 171], [62, 17, 19, 70, 146, 85, 55, 62, 70], [75, 15, 9, 9, 64, 255, 184, 119, 16], [37, 43, 37, 154, 100, 163, 85, 160, 1], [63, 9, 92, 136, 28, 64, 32, 201, 85], [86, 6, 28, 5, 64, 255, 25, 248, 1], [56, 8, 17, 132, 137, 255, 55, 116, 128], [58, 15, 20, 82, 135, 57, 26, 121, 40], ], [ [164, 50, 31, 137, 154, 133, 25, 35, 218], [51, 103, 44, 131, 131, 123, 31, 6, 158], [86, 40, 64, 135, 148, 224, 45, 183, 128], [22, 26, 17, 131, 240, 154, 14, 1, 209], [83, 12, 13, 54, 192, 255, 68, 47, 28], [45, 16, 21, 91, 64, 222, 7, 1, 197], [56, 21, 39, 155, 60, 138, 23, 102, 213], [85, 26, 85, 85, 128, 128, 32, 146, 171], [18, 11, 7, 63, 144, 171, 4, 4, 246], [35, 27, 10, 146, 174, 171, 12, 26, 128], ], [ [190, 80, 35, 99, 180, 80, 126, 54, 45], [85, 126, 47, 87, 176, 51, 41, 20, 32], [101, 75, 128, 139, 118, 146, 116, 128, 85], [56, 41, 15, 176, 236, 85, 37, 9, 62], [146, 36, 19, 30, 171, 255, 97, 27, 20], [71, 30, 17, 119, 118, 255, 17, 18, 138], [101, 38, 60, 138, 55, 70, 43, 26, 142], [138, 45, 61, 62, 219, 1, 81, 188, 64], [32, 41, 20, 117, 151, 142, 20, 21, 163], [112, 19, 12, 61, 195, 128, 48, 4, 24], ], ]; // Section 11.4 Tree for determining macroblock the chroma mode static KEYFRAME_UV_MODE_TREE: [i8; 6] = [-DC_PRED, 2, -V_PRED, 4, -H_PRED, -TM_PRED]; // Probabilities for determining macroblock mode static KEYFRAME_UV_MODE_PROBS: [Prob; 3] = [142, 114, 183]; // Section 13.4 type TokenProbTables = [[[[Prob; NUM_DCT_TOKENS - 1]; 3]; 8]; 4]; // Probabilities that a token's probability will be updated static COEFF_UPDATE_PROBS: TokenProbTables = [ [ [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [176, 246, 255, 255, 255, 255, 255, 255, 255, 255, 255], [223, 241, 252, 255, 255, 255, 255, 255, 255, 255, 255], [249, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 244, 252, 255, 255, 255, 255, 255, 255, 255, 255], [234, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255], [253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 246, 254, 255, 255, 255, 255, 255, 255, 255, 255], [239, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], [254, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 248, 254, 255, 255, 255, 255, 255, 255, 255, 255], [251, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], [251, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255], [254, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 254, 253, 255, 254, 255, 255, 255, 255, 255, 255], [250, 255, 254, 255, 254, 255, 255, 255, 255, 255, 255], [254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], ], [ [ [217, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [225, 252, 241, 253, 255, 255, 254, 255, 255, 255, 255], [234, 250, 241, 250, 253, 255, 253, 254, 255, 255, 255], ], [ [255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], [223, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255], [238, 253, 254, 254, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 248, 254, 255, 255, 255, 255, 255, 255, 255, 255], [249, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255], [247, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], [252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255], [253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 254, 253, 255, 255, 255, 255, 255, 255, 255, 255], [250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], ], [ [ [186, 251, 250, 255, 255, 255, 255, 255, 255, 255, 255], [234, 251, 244, 254, 255, 255, 255, 255, 255, 255, 255], [251, 251, 243, 253, 254, 255, 254, 255, 255, 255, 255], ], [ [255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], [236, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], [251, 253, 253, 254, 254, 255, 255, 255, 255, 255, 255], ], [ [255, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255], [254, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], [254, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], [254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], ], [ [ [248, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [250, 254, 252, 254, 255, 255, 255, 255, 255, 255, 255], [248, 254, 249, 253, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255], [246, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255], [252, 254, 251, 254, 254, 255, 255, 255, 255, 255, 255], ], [ [255, 254, 252, 255, 255, 255, 255, 255, 255, 255, 255], [248, 254, 253, 255, 255, 255, 255, 255, 255, 255, 255], [253, 255, 254, 254, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 251, 254, 255, 255, 255, 255, 255, 255, 255, 255], [245, 251, 254, 255, 255, 255, 255, 255, 255, 255, 255], [253, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 251, 253, 255, 255, 255, 255, 255, 255, 255, 255], [252, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255], [255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255], [249, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255], [250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], [ [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], [255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255], ], ], ]; // Section 13.5 // Default Probabilities for tokens static COEFF_PROBS: TokenProbTables = [ [ [ [128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], [128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], [128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], ], [ [253, 136, 254, 255, 228, 219, 128, 128, 128, 128, 128], [189, 129, 242, 255, 227, 213, 255, 219, 128, 128, 128], [106, 126, 227, 252, 214, 209, 255, 255, 128, 128, 128], ], [ [1, 98, 248, 255, 236, 226, 255, 255, 128, 128, 128], [181, 133, 238, 254, 221, 234, 255, 154, 128, 128, 128], [78, 134, 202, 247, 198, 180, 255, 219, 128, 128, 128], ], [ [1, 185, 249, 255, 243, 255, 128, 128, 128, 128, 128], [184, 150, 247, 255, 236, 224, 128, 128, 128, 128, 128], [77, 110, 216, 255, 236, 230, 128, 128, 128, 128, 128], ], [ [1, 101, 251, 255, 241, 255, 128, 128, 128, 128, 128], [170, 139, 241, 252, 236, 209, 255, 255, 128, 128, 128], [37, 116, 196, 243, 228, 255, 255, 255, 128, 128, 128], ], [ [1, 204, 254, 255, 245, 255, 128, 128, 128, 128, 128], [207, 160, 250, 255, 238, 128, 128, 128, 128, 128, 128], [102, 103, 231, 255, 211, 171, 128, 128, 128, 128, 128], ], [ [1, 152, 252, 255, 240, 255, 128, 128, 128, 128, 128], [177, 135, 243, 255, 234, 225, 128, 128, 128, 128, 128], [80, 129, 211, 255, 194, 224, 128, 128, 128, 128, 128], ], [ [1, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128], [246, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128], [255, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], ], ], [ [ [198, 35, 237, 223, 193, 187, 162, 160, 145, 155, 62], [131, 45, 198, 221, 172, 176, 220, 157, 252, 221, 1], [68, 47, 146, 208, 149, 167, 221, 162, 255, 223, 128], ], [ [1, 149, 241, 255, 221, 224, 255, 255, 128, 128, 128], [184, 141, 234, 253, 222, 220, 255, 199, 128, 128, 128], [81, 99, 181, 242, 176, 190, 249, 202, 255, 255, 128], ], [ [1, 129, 232, 253, 214, 197, 242, 196, 255, 255, 128], [99, 121, 210, 250, 201, 198, 255, 202, 128, 128, 128], [23, 91, 163, 242, 170, 187, 247, 210, 255, 255, 128], ], [ [1, 200, 246, 255, 234, 255, 128, 128, 128, 128, 128], [109, 178, 241, 255, 231, 245, 255, 255, 128, 128, 128], [44, 130, 201, 253, 205, 192, 255, 255, 128, 128, 128], ], [ [1, 132, 239, 251, 219, 209, 255, 165, 128, 128, 128], [94, 136, 225, 251, 218, 190, 255, 255, 128, 128, 128], [22, 100, 174, 245, 186, 161, 255, 199, 128, 128, 128], ], [ [1, 182, 249, 255, 232, 235, 128, 128, 128, 128, 128], [124, 143, 241, 255, 227, 234, 128, 128, 128, 128, 128], [35, 77, 181, 251, 193, 211, 255, 205, 128, 128, 128], ], [ [1, 157, 247, 255, 236, 231, 255, 255, 128, 128, 128], [121, 141, 235, 255, 225, 227, 255, 255, 128, 128, 128], [45, 99, 188, 251, 195, 217, 255, 224, 128, 128, 128], ], [ [1, 1, 251, 255, 213, 255, 128, 128, 128, 128, 128], [203, 1, 248, 255, 255, 128, 128, 128, 128, 128, 128], [137, 1, 177, 255, 224, 255, 128, 128, 128, 128, 128], ], ], [ [ [253, 9, 248, 251, 207, 208, 255, 192, 128, 128, 128], [175, 13, 224, 243, 193, 185, 249, 198, 255, 255, 128], [73, 17, 171, 221, 161, 179, 236, 167, 255, 234, 128], ], [ [1, 95, 247, 253, 212, 183, 255, 255, 128, 128, 128], [239, 90, 244, 250, 211, 209, 255, 255, 128, 128, 128], [155, 77, 195, 248, 188, 195, 255, 255, 128, 128, 128], ], [ [1, 24, 239, 251, 218, 219, 255, 205, 128, 128, 128], [201, 51, 219, 255, 196, 186, 128, 128, 128, 128, 128], [69, 46, 190, 239, 201, 218, 255, 228, 128, 128, 128], ], [ [1, 191, 251, 255, 255, 128, 128, 128, 128, 128, 128], [223, 165, 249, 255, 213, 255, 128, 128, 128, 128, 128], [141, 124, 248, 255, 255, 128, 128, 128, 128, 128, 128], ], [ [1, 16, 248, 255, 255, 128, 128, 128, 128, 128, 128], [190, 36, 230, 255, 236, 255, 128, 128, 128, 128, 128], [149, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128], ], [ [1, 226, 255, 128, 128, 128, 128, 128, 128, 128, 128], [247, 192, 255, 128, 128, 128, 128, 128, 128, 128, 128], [240, 128, 255, 128, 128, 128, 128, 128, 128, 128, 128], ], [ [1, 134, 252, 255, 255, 128, 128, 128, 128, 128, 128], [213, 62, 250, 255, 255, 128, 128, 128, 128, 128, 128], [55, 93, 255, 128, 128, 128, 128, 128, 128, 128, 128], ], [ [128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], [128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], [128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128], ], ], [ [ [202, 24, 213, 235, 186, 191, 220, 160, 240, 175, 255], [126, 38, 182, 232, 169, 184, 228, 174, 255, 187, 128], [61, 46, 138, 219, 151, 178, 240, 170, 255, 216, 128], ], [ [1, 112, 230, 250, 199, 191, 247, 159, 255, 255, 128], [166, 109, 228, 252, 211, 215, 255, 174, 128, 128, 128], [39, 77, 162, 232, 172, 180, 245, 178, 255, 255, 128], ], [ [1, 52, 220, 246, 198, 199, 249, 220, 255, 255, 128], [124, 74, 191, 243, 183, 193, 250, 221, 255, 255, 128], [24, 71, 130, 219, 154, 170, 243, 182, 255, 255, 128], ], [ [1, 182, 225, 249, 219, 240, 255, 224, 128, 128, 128], [149, 150, 226, 252, 216, 205, 255, 171, 128, 128, 128], [28, 108, 170, 242, 183, 194, 254, 223, 255, 255, 128], ], [ [1, 81, 230, 252, 204, 203, 255, 192, 128, 128, 128], [123, 102, 209, 247, 188, 196, 255, 233, 128, 128, 128], [20, 95, 153, 243, 164, 173, 255, 203, 128, 128, 128], ], [ [1, 222, 248, 255, 216, 213, 128, 128, 128, 128, 128], [168, 175, 246, 252, 235, 205, 255, 255, 128, 128, 128], [47, 116, 215, 255, 211, 212, 255, 255, 128, 128, 128], ], [ [1, 121, 236, 253, 212, 214, 255, 255, 128, 128, 128], [141, 84, 213, 252, 201, 202, 255, 219, 128, 128, 128], [42, 80, 160, 240, 162, 185, 255, 205, 128, 128, 128], ], [ [1, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128], [244, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128], [238, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128], ], ], ]; // DCT Tokens const DCT_0: i8 = 0; const DCT_1: i8 = 1; const DCT_2: i8 = 2; const DCT_3: i8 = 3; const DCT_4: i8 = 4; const DCT_CAT1: i8 = 5; const DCT_CAT2: i8 = 6; const DCT_CAT3: i8 = 7; const DCT_CAT4: i8 = 8; const DCT_CAT5: i8 = 9; const DCT_CAT6: i8 = 10; const DCT_EOB: i8 = 11; static DCT_TOKEN_TREE: [i8; 22] = [ -DCT_EOB, 2, -DCT_0, 4, -DCT_1, 6, 8, 12, -DCT_2, 10, -DCT_3, -DCT_4, 14, 16, -DCT_CAT1, -DCT_CAT2, 18, 20, -DCT_CAT3, -DCT_CAT4, -DCT_CAT5, -DCT_CAT6, ]; static PROB_DCT_CAT: [[Prob; 12]; 6] = [ [159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [165, 145, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [173, 148, 140, 0, 0, 0, 0, 0, 0, 0, 0, 0], [176, 155, 140, 135, 0, 0, 0, 0, 0, 0, 0, 0], [180, 157, 141, 134, 130, 0, 0, 0, 0, 0, 0, 0], [254, 254, 243, 230, 196, 177, 153, 140, 133, 130, 129, 0], ]; static DCT_CAT_BASE: [u8; 6] = [5, 7, 11, 19, 35, 67]; static COEFF_BANDS: [u8; 16] = [0, 1, 2, 3, 6, 4, 5, 6, 6, 6, 6, 6, 6, 6, 6, 7]; #[rustfmt::skip] static DC_QUANT: [i16; 128] = [ 4, 5, 6, 7, 8, 9, 10, 10, 11, 12, 13, 14, 15, 16, 17, 17, 18, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 25, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 93, 95, 96, 98, 100, 101, 102, 104, 106, 108, 110, 112, 114, 116, 118, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 143, 145, 148, 151, 154, 157, ]; #[rustfmt::skip] static AC_QUANT: [i16; 128] = [ 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 119, 122, 125, 128, 131, 134, 137, 140, 143, 146, 149, 152, 155, 158, 161, 164, 167, 170, 173, 177, 181, 185, 189, 193, 197, 201, 205, 209, 213, 217, 221, 225, 229, 234, 239, 245, 249, 254, 259, 264, 269, 274, 279, 284, ]; static ZIGZAG: [u8; 16] = [0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15]; /// All errors that can occur when attempting to parse a VP8 codec inside WebP #[derive(Debug, Clone, Copy)] enum DecoderError { /// VP8's `[0x9D, 0x01, 0x2A]` magic not found or invalid Vp8MagicInvalid([u8; 3]), /// Decoder initialisation wasn't provided with enough data NotEnoughInitData, /// At time of writing, only the YUV colour-space encoded as `0` is specified ColorSpaceInvalid(u8), /// LUMA prediction mode was not recognised LumaPredictionModeInvalid(i8), /// Intra-prediction mode was not recognised IntraPredictionModeInvalid(i8), /// Chroma prediction mode was not recognised ChromaPredictionModeInvalid(i8), } impl fmt::Display for DecoderError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { DecoderError::Vp8MagicInvalid(tag) => f.write_fmt(format_args!("Invalid VP8 magic: [{:#04X?}, {:#04X?}, {:#04X?}]", tag[0], tag[1], tag[2])), DecoderError::NotEnoughInitData => f.write_str("Expected at least 2 bytes of VP8 decoder initialization data"), DecoderError::ColorSpaceInvalid(cs) => f.write_fmt(format_args!("Invalid non-YUV VP8 color space {}", cs)), DecoderError::LumaPredictionModeInvalid(pm) => f.write_fmt(format_args!("Invalid VP8 LUMA prediction mode {}", pm)), DecoderError::IntraPredictionModeInvalid(i) => f.write_fmt(format_args!("Invalid VP8 intra-prediction mode {}", i)), DecoderError::ChromaPredictionModeInvalid(c) => f.write_fmt(format_args!("Invalid VP8 chroma prediction mode {}", c)), } } } impl From<DecoderError> for ImageError { fn from(e: DecoderError) -> ImageError { ImageError::Decoding(DecodingError::new(ImageFormat::WebP.into(), e)) } } impl error::Error for DecoderError {} struct BoolReader { buf: Vec<u8>, index: usize, range: u32, value: u32, bit_count: u8, } impl BoolReader { pub(crate) fn new() -> BoolReader { BoolReader { buf: Vec::new(), range: 0, value: 0, bit_count: 0, index: 0, } } pub(crate) fn init(&mut self, buf: Vec<u8>) -> ImageResult<()> { if buf.len() < 2 { return Err(DecoderError::NotEnoughInitData.into()); } self.buf = buf; // Direct access safe, since length has just been validated. self.value = (u32::from(self.buf[0]) << 8) | u32::from(self.buf[1]); self.index = 2; self.range = 255; self.bit_count = 0; Ok(()) } pub(crate) fn read_bool(&mut self, probability: u8) -> bool { let split = 1 + (((self.range - 1) * u32::from(probability)) >> 8); let bigsplit = split << 8; let retval = if self.value >= bigsplit { self.range -= split; self.value -= bigsplit; true } else { self.range = split; false }; while self.range < 128 { self.value <<= 1; self.range <<= 1; self.bit_count += 1; if self.bit_count == 8 { self.bit_count = 0; // If no more bits are available, just don't do anything. // This strategy is suggested in the reference implementation of RFC6386 (p.135) if self.index < self.buf.len() { self.value |= u32::from(self.buf[self.index]); self.index += 1; } } } retval } pub(crate) fn read_literal(&mut self, n: u8) -> u8 { let mut v = 0u8; let mut n = n; while n != 0 { v = (v << 1) + self.read_bool(128u8) as u8; n -= 1; } v } pub(crate) fn read_magnitude_and_sign(&mut self, n: u8) -> i32 { let magnitude = self.read_literal(n); let sign = self.read_literal(1); if sign == 1 { -i32::from(magnitude) } else { i32::from(magnitude) } } pub(crate) fn read_with_tree(&mut self, tree: &[i8], probs: &[Prob], start: isize) -> i8 { let mut index = start; loop { let a = self.read_bool(probs[index as usize >> 1]); let b = index + a as isize; index = tree[b as usize] as isize; if index <= 0 { break; } } -index as i8 } pub(crate) fn read_flag(&mut self) -> bool { 0 != self.read_literal(1) } } #[derive(Default, Clone, Copy)] struct MacroBlock { bpred: [IntraMode; 16], complexity: [u8; 9], luma_mode: LumaMode, chroma_mode: ChromaMode, segmentid: u8, } /// A Representation of the last decoded video frame #[derive(Default, Debug, Clone)] pub struct Frame { /// The width of the luma plane pub width: u16, /// The height of the luma plane pub height: u16, /// The luma plane of the frame pub ybuf: Vec<u8>, /// The blue plane of the frame pub ubuf: Vec<u8>, /// The red plane of the frame pub vbuf: Vec<u8>, /// Indicates whether this frame is a keyframe pub keyframe: bool, version: u8, /// Indicates whether this frame is intended for display pub for_display: bool, // Section 9.2 /// The pixel type of the frame as defined by Section 9.2 /// of the VP8 Specification pub pixel_type: u8, // Section 9.4 and 15 filter: u8, filter_level: u8, sharpness_level: u8, } impl Frame { /// Chroma plane is half the size of the Luma plane fn chroma_width(&self) -> u16 { (self.width + 1) / 2 } fn chroma_height(&self) -> u16 { (self.height + 1) / 2 } /// Fills an rgb buffer with the converted values from the 4:2:0 YUV planes /// Conversion values from https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#converting-8-bit-yuv-to-rgb888 pub fn fill_rgb(&self, buf: &mut [u8]) { for index in 0..self.ybuf.len() { let y = index / self.width as usize; let x = index % self.width as usize; let chroma_index = self.chroma_width() as usize * (y / 2) + x / 2; let rgb_index = index * 3; let c = self.ybuf[index] as i32 - 16; let d = self.ubuf[chroma_index] as i32 - 128; let e = self.vbuf[chroma_index] as i32 - 128; let r = clamp((298 * c + 409 * e + 128) >> 8, 0, 255) as u8; let g = clamp((298 * c - 100 * d - 208 * e + 128) >> 8, 0, 255) as u8; let b = clamp((298 * c + 516 * d + 128) >> 8, 0, 255) as u8; buf[rgb_index] = r; buf[rgb_index+1] = g; buf[rgb_index+2] = b; } } } #[derive(Clone, Copy, Default)] struct Segment { ydc: i16, yac: i16, y2dc: i16, y2ac: i16, uvdc: i16, uvac: i16, delta_values: bool, quantizer_level: i8, loopfilter_level: i8, } /// VP8 Decoder /// /// Only decodes keyframes pub struct Vp8Decoder<R> { r: R, b: BoolReader, mbwidth: u16, mbheight: u16, frame: Frame, segments_enabled: bool, segments_update_map: bool, segment: [Segment; MAX_SEGMENTS], partitions: [BoolReader; 8], num_partitions: u8, segment_tree_probs: [Prob; 3], token_probs: Box<TokenProbTables>, // Section 9.10 prob_intra: Prob, // Section 9.11 prob_skip_false: Option<Prob>, top: Vec<MacroBlock>, left: MacroBlock, top_border: Vec<u8>, left_border: Vec<u8>, } impl<R: Read> Vp8Decoder<R> { /// Create a new decoder. /// The reader must present a raw vp8 bitstream to the decoder pub fn new(r: R) -> Vp8Decoder<R> { let f = Frame::default(); let s = Segment::default(); let m = MacroBlock::default(); Vp8Decoder { r, b: BoolReader::new(), mbwidth: 0, mbheight: 0, frame: f, segments_enabled: false, segments_update_map: false, segment: [s; MAX_SEGMENTS], partitions: [ BoolReader::new(), BoolReader::new(), BoolReader::new(), BoolReader::new(), BoolReader::new(), BoolReader::new(), BoolReader::new(), BoolReader::new(), ], num_partitions: 1, segment_tree_probs: [255u8; 3], token_probs: Box::new(COEFF_PROBS), // Section 9.10 prob_intra: 0u8, // Section 9.11 prob_skip_false: None, top: Vec::new(), left: m, top_border: Vec::new(), left_border: Vec::new(), } } fn update_token_probabilities(&mut self) { for (i, is) in COEFF_UPDATE_PROBS.iter().enumerate() { for (j, js) in is.iter().enumerate() { for (k, ks) in js.iter().enumerate() { for (t, prob) in ks.iter().enumerate().take(NUM_DCT_TOKENS - 1) { if self.b.read_bool(*prob) { let v = self.b.read_literal(8); self.token_probs[i][j][k][t] = v; } } } } } } fn init_partitions(&mut self, n: usize) -> ImageResult<()> { if n > 1 { let mut sizes = vec![0; 3 * n - 3]; self.r.read_exact(sizes.as_mut_slice())?; for (i, s) in sizes.chunks(3).enumerate() { let size = {s}.read_u24::<LittleEndian>() .expect("Reading from &[u8] can't fail and the chunk is complete"); let mut buf = vec![0; size as usize]; self.r.read_exact(buf.as_mut_slice())?; self.partitions[i].init(buf)?; } } let mut buf = Vec::new(); self.r.read_to_end(&mut buf)?; self.partitions[n - 1].init(buf)?; Ok(()) } fn read_quantization_indices(&mut self) { fn dc_quant(index: i32) -> i16 { DC_QUANT[clamp(index, 0, 127) as usize] } fn ac_quant(index: i32) -> i16 { AC_QUANT[clamp(index, 0, 127) as usize] } let yac_abs = self.b.read_literal(7); let ydc_delta = if self.b.read_flag() { self.b.read_magnitude_and_sign(4) } else { 0 }; let y2dc_delta = if self.b.read_flag() { self.b.read_magnitude_and_sign(4) } else { 0 }; let y2ac_delta = if self.b.read_flag() { self.b.read_magnitude_and_sign(4) } else { 0 }; let uvdc_delta = if self.b.read_flag() { self.b.read_magnitude_and_sign(4) } else { 0 }; let uvac_delta = if self.b.read_flag() { self.b.read_magnitude_and_sign(4) } else { 0 }; let n = if self.segments_enabled { MAX_SEGMENTS } else { 1 }; for i in 0usize..n { let base = i32::from(if !self.segment[i].delta_values { i16::from(self.segment[i].quantizer_level) } else { i16::from(self.segment[i].quantizer_level) + i16::from(yac_abs) }); self.segment[i].ydc = dc_quant(base + ydc_delta); self.segment[i].yac = ac_quant(base); self.segment[i].y2dc = dc_quant(base + y2dc_delta) * 2; // The intermediate result (max`284*155`) can be larger than the `i16` range. self.segment[i].y2ac = (i32::from(ac_quant(base + y2ac_delta)) * 155 / 100) as i16; self.segment[i].uvdc = dc_quant(base + uvdc_delta); self.segment[i].uvac = ac_quant(base + uvac_delta); if self.segment[i].y2ac < 8 { self.segment[i].y2ac = 8; } if self.segment[i].uvdc > 132 { self.segment[i].uvdc = 132; } } } fn read_loop_filter_adjustments(&mut self) { if self.b.read_flag() { for _i in 0usize..4 { let ref_frame_delta_update_flag = self.b.read_flag(); let _delta = if ref_frame_delta_update_flag { self.b.read_magnitude_and_sign(6) } else { 0i32 }; } for _i in 0usize..4 { let mb_mode_delta_update_flag = self.b.read_flag(); let _delta = if mb_mode_delta_update_flag { self.b.read_magnitude_and_sign(6) } else { 0i32 }; } } } fn read_segment_updates(&mut self) { // Section 9.3 self.segments_update_map = self.b.read_flag(); let update_segment_feature_data = self.b.read_flag(); if update_segment_feature_data { let segment_feature_mode = self.b.read_flag(); for i in 0usize..MAX_SEGMENTS { self.segment[i].delta_values = !segment_feature_mode; } for i in 0usize..MAX_SEGMENTS { let update = self.b.read_flag(); self.segment[i].quantizer_level = if update { self.b.read_magnitude_and_sign(7) } else { 0i32 } as i8; } for i in 0usize..MAX_SEGMENTS { let update = self.b.read_flag(); self.segment[i].loopfilter_level = if update { self.b.read_magnitude_and_sign(6) } else { 0i32 } as i8; } } if self.segments_update_map { for i in 0usize..3 { let update = self.b.read_flag(); self.segment_tree_probs[i] = if update { self.b.read_literal(8) } else { 255 }; } } } fn read_frame_header(&mut self) -> ImageResult<()> { let tag = self.r.read_u24::<LittleEndian>()?; self.frame.keyframe = tag & 1 == 0; self.frame.version = ((tag >> 1) & 7) as u8; self.frame.for_display = (tag >> 4) & 1 != 0; let first_partition_size = tag >> 5; if self.frame.keyframe { let mut tag = [0u8; 3]; self.r.read_exact(&mut tag)?; if tag != [0x9d, 0x01, 0x2a] { return Err(DecoderError::Vp8MagicInvalid(tag).into()); } let w = self.r.read_u16::<LittleEndian>()?; let h = self.r.read_u16::<LittleEndian>()?; self.frame.width = w & 0x3FFF; self.frame.height = h & 0x3FFF; self.top = init_top_macroblocks(self.frame.width as usize); // Almost always the first macro block, except when non exists (i.e. `width == 0`) self.left = self.top.get(0).cloned() .unwrap_or_else(MacroBlock::default); self.mbwidth = (self.frame.width + 15) / 16; self.mbheight = (self.frame.height + 15) / 16; self.frame.ybuf = vec![0u8; self.frame.width as usize * self.frame.height as usize]; self.frame.ubuf = vec![0u8; self.frame.chroma_width() as usize * self.frame.chroma_height() as usize]; self.frame.vbuf = vec![0u8; self.frame.chroma_width() as usize * self.frame.chroma_height() as usize]; self.top_border = vec![127u8; self.frame.width as usize + 4 + 16]; self.left_border = vec![129u8; 1 + 16]; } let mut buf = vec![0; first_partition_size as usize]; self.r.read_exact(&mut buf)?; // initialise binary decoder self.b.init(buf)?; if self.frame.keyframe { let color_space = self.b.read_literal(1); self.frame.pixel_type = self.b.read_literal(1); if color_space != 0 { return Err(DecoderError::ColorSpaceInvalid(color_space).into()); } } self.segments_enabled = self.b.read_flag(); if self.segments_enabled { self.read_segment_updates(); } self.frame.filter = self.b.read_literal(1); self.frame.filter_level = self.b.read_literal(6); self.frame.sharpness_level = self.b.read_literal(3); let lf_adjust_enable = self.b.read_flag(); if lf_adjust_enable { self.read_loop_filter_adjustments(); } self.num_partitions = (1usize << self.b.read_literal(2) as usize) as u8; let num_partitions = self.num_partitions as usize; self.init_partitions(num_partitions)?; self.read_quantization_indices(); if !self.frame.keyframe { // 9.7 refresh golden frame and altref frame // FIXME: support this? return Err(ImageError::Unsupported( UnsupportedError::from_format_and_kind( ImageFormat::WebP.into(), UnsupportedErrorKind::GenericFeature("Non-keyframe frames".to_owned()), ), )); } else { // Refresh entropy probs ????? let _ = self.b.read_literal(1); } self.update_token_probabilities(); let mb_no_skip_coeff = self.b.read_literal(1); self.prob_skip_false = if mb_no_skip_coeff == 1 { Some(self.b.read_literal(8)) } else { None }; if !self.frame.keyframe { // 9.10 remaining frame data self.prob_intra = 0; // FIXME: support this? return Err(ImageError::Unsupported( UnsupportedError::from_format_and_kind( ImageFormat::WebP.into(), UnsupportedErrorKind::GenericFeature("Non-keyframe frames".to_owned()), ), )); } else { // Reset motion vectors } Ok(()) } fn read_macroblock_header(&mut self, mbx: usize) -> ImageResult<(bool, MacroBlock)> { let mut mb = MacroBlock::default(); if self.segments_enabled && self.segments_update_map { mb.segmentid = self.b .read_with_tree(&SEGMENT_ID_TREE, &self.segment_tree_probs, 0) as u8; }; let skip_coeff = if self.prob_skip_false.is_some() { self.b.read_bool(*self.prob_skip_false.as_ref().unwrap()) } else { false }; let inter_predicted = if !self.frame.keyframe { self.b.read_bool(self.prob_intra) } else { false }; if inter_predicted { return Err(ImageError::Unsupported( UnsupportedError::from_format_and_kind( ImageFormat::WebP.into(), UnsupportedErrorKind::GenericFeature("VP8 inter-prediction".to_owned()), ), )); } if self.frame.keyframe { // intra prediction let luma = self.b .read_with_tree(&KEYFRAME_YMODE_TREE, &KEYFRAME_YMODE_PROBS, 0); mb.luma_mode = LumaMode::from_i8(luma) .ok_or(DecoderError::LumaPredictionModeInvalid(luma))?; match mb.luma_mode.into_intra() { // `LumaMode::B` - This is predicted individually None => { for y in 0usize..4 { for x in 0usize..4 { let top = self.top[mbx].bpred[12 + x]; let left = self.left.bpred[y]; let intra = self.b.read_with_tree( &KEYFRAME_BPRED_MODE_TREE, &KEYFRAME_BPRED_MODE_PROBS[top as usize][left as usize], 0, ); let bmode = IntraMode::from_i8(intra) .ok_or(DecoderError::IntraPredictionModeInvalid(intra))?; mb.bpred[x + y * 4] = bmode; self.top[mbx].bpred[12 + x] = bmode; self.left.bpred[y] = bmode; } } }, Some(mode) => { for i in 0usize..4 { mb.bpred[12 + i] = mode; self.left.bpred[i] = mode; } } } let chroma = self.b .read_with_tree(&KEYFRAME_UV_MODE_TREE, &KEYFRAME_UV_MODE_PROBS, 0); mb.chroma_mode = ChromaMode::from_i8(chroma) .ok_or(DecoderError::ChromaPredictionModeInvalid(chroma))?; } self.top[mbx].chroma_mode = mb.chroma_mode; self.top[mbx].luma_mode = mb.luma_mode; self.top[mbx].bpred = mb.bpred; Ok((skip_coeff, mb)) } fn intra_predict_luma(&mut self, mbx: usize, mby: usize, mb: &MacroBlock, resdata: &[i32]) { let stride = 1usize + 16 + 4; let w = self.frame.width as usize; let mw = self.mbwidth as usize; let mut ws = create_border_luma(mbx, mby, mw, &self.top_border, &self.left_border); match mb.luma_mode { LumaMode::V => predict_vpred(&mut ws, 16, 1, 1, stride), LumaMode::H => predict_hpred(&mut ws, 16, 1, 1, stride), LumaMode::TM => predict_tmpred(&mut ws, 16, 1, 1, stride), LumaMode::DC => predict_dcpred(&mut ws, 16, stride, mby != 0, mbx != 0), LumaMode::B => predict_4x4(&mut ws, stride, &mb.bpred, resdata), } if mb.luma_mode != LumaMode::B { for y in 0usize..4 { for x in 0usize..4 { let i = x + y * 4; // Create a reference to a [i32; 16] array for add_residue (slices of size 16 do not work). let rb: &[i32; 16] = resdata[i*16..][..16].try_into().unwrap(); let y0 = 1 + y * 4; let x0 = 1 + x * 4; add_residue(&mut ws, rb, y0, x0, stride); } } } self.left_border[0] = ws[16]; for i in 0usize..16 { self.top_border[mbx * 16 + i] = ws[16 * stride + 1 + i]; self.left_border[i + 1] = ws[(i + 1) * stride + 16]; } // Length is the remainder to the border, but maximally the current chunk. let ylength = cmp::min(self.frame.height as usize - mby*16, 16); let xlength = cmp::min(self.frame.width as usize - mbx*16, 16); for y in 0usize..ylength { for x in 0usize..xlength { self.frame.ybuf[(mby * 16 + y) * w + mbx * 16 + x] = ws[(1 + y) * stride + 1 + x]; } } } fn intra_predict_chroma(&mut self, mbx: usize, mby: usize, mb: &MacroBlock, resdata: &[i32]) { let stride = 1usize + 8; let w = self.frame.chroma_width() as usize; //8x8 with left top border of 1 let mut uws = [0u8; (8 + 1) * (8 + 1)]; let mut vws = [0u8; (8 + 1) * (8 + 1)]; let ylength = cmp::min(self.frame.chroma_height() as usize - mby*8, 8); let xlength = cmp::min(self.frame.chroma_width() as usize - mbx*8, 8); //left border for y in 0usize..8 { let (uy, vy) = if mbx == 0 || y >= ylength { (127, 127) } else { let index = (mby * 8 + y) * w + ((mbx - 1) * 8 + 7); ( self.frame.ubuf[index], self.frame.vbuf[index] ) }; uws[(y + 1) * stride] = uy; vws[(y + 1) * stride] = vy; } //top border for x in 0usize..8 { let (ux, vx) = if mby == 0 || x >= xlength { (129, 129) } else { let index = ((mby - 1) * 8 + 7) * w + (mbx * 8 + x); ( self.frame.ubuf[index], self.frame.vbuf[index] ) }; uws[x + 1] = ux; vws[x + 1] = vx; } //top left point let (u1, v1) = if mby == 0 { (127, 127) } else if mbx == 0 { (129, 129) } else { let index = ((mby - 1) * 8 + 7) * w + (mbx - 1) * 8 + 7; if index >= self.frame.ubuf.len() { (127, 127) } else { ( self.frame.ubuf[index], self.frame.vbuf[index] ) } }; uws[0] = u1; vws[0] = v1; match mb.chroma_mode { ChromaMode::DC => { predict_dcpred(&mut uws, 8, stride, mby != 0, mbx != 0); predict_dcpred(&mut vws, 8, stride, mby != 0, mbx != 0); }, ChromaMode::V => { predict_vpred(&mut uws, 8, 1, 1, stride); predict_vpred(&mut vws, 8, 1, 1, stride); }, ChromaMode::H => { predict_hpred(&mut uws, 8, 1, 1, stride); predict_hpred(&mut vws, 8, 1, 1, stride); }, ChromaMode::TM => { predict_tmpred(&mut uws, 8, 1, 1, stride); predict_tmpred(&mut vws, 8, 1, 1, stride); }, } for y in 0usize..2 { for x in 0usize..2 { let i = x + y * 2; let urb: &[i32; 16] = resdata[16 * 16 + i * 16..][..16].try_into().unwrap(); let y0 = 1 + y * 4; let x0 = 1 + x * 4; add_residue(&mut uws, urb, y0, x0, stride); let vrb: &[i32; 16] = resdata[20 * 16 + i * 16..][..16].try_into().unwrap(); add_residue(&mut vws, vrb, y0, x0, stride); } } for y in 0usize..ylength { for x in 0usize..xlength { self.frame.ubuf[(mby * 8 + y) * w + mbx * 8 + x] = uws[(1 + y) * stride + 1 + x]; self.frame.vbuf[(mby * 8 + y) * w + mbx * 8 + x] = vws[(1 + y) * stride + 1 + x]; } } } fn read_coefficients( &mut self, block: &mut [i32], p: usize, plane: usize, complexity: usize, dcq: i16, acq: i16, ) -> bool { let first = if plane == 0 { 1usize } else { 0usize }; let probs = &self.token_probs[plane]; let tree = &DCT_TOKEN_TREE; let mut complexity = complexity; let mut has_coefficients = false; let mut skip = false; for i in first..16usize { let table = &probs[COEFF_BANDS[i] as usize][complexity]; let token = if !skip { self.partitions[p].read_with_tree(tree, table, 0) } else { self.partitions[p].read_with_tree(tree, table, 2) }; let mut abs_value = i32::from(match token { DCT_EOB => break, DCT_0 => { skip = true; has_coefficients = true; complexity = 0; continue; } literal @ DCT_1..=DCT_4 => i16::from(literal), category @ DCT_CAT1..=DCT_CAT6 => { let t = PROB_DCT_CAT[(category - DCT_CAT1) as usize]; let mut extra = 0i16; let mut j = 0; while t[j] > 0 { extra = extra + extra + self.partitions[p].read_bool(t[j]) as i16; j += 1; } i16::from(DCT_CAT_BASE[(category - DCT_CAT1) as usize]) + extra } c => panic!("unknown token: {}", c), }); skip = false; complexity = if abs_value == 0 { 0 } else if abs_value == 1 { 1 } else { 2 }; if self.partitions[p].read_bool(128) { abs_value = -abs_value; } block[ZIGZAG[i] as usize] = abs_value * i32::from(if ZIGZAG[i] > 0 { acq } else { dcq }); has_coefficients = true; } has_coefficients } fn read_residual_data(&mut self, mb: &MacroBlock, mbx: usize, p: usize) -> [i32; 384] { let sindex = mb.segmentid as usize; let mut blocks = [0i32; 384]; let mut plane = if mb.luma_mode == LumaMode::B { 3 } else { 1 }; if plane == 1 { let complexity = self.top[mbx].complexity[0] + self.left.complexity[0]; let mut block = [0i32; 16]; let dcq = self.segment[sindex].y2dc; let acq = self.segment[sindex].y2ac; let n = self.read_coefficients(&mut block, p, plane, complexity as usize, dcq, acq); self.left.complexity[0] = if n { 1 } else { 0 }; self.top[mbx].complexity[0] = if n { 1 } else { 0 }; transform::iwht4x4(&mut block); for k in 0usize..16 { blocks[16 * k] = block[k]; } plane = 0; } for y in 0usize..4 { let mut left = self.left.complexity[y + 1]; for x in 0usize..4 { let i = x + y * 4; let block = &mut blocks[i * 16..i * 16 + 16]; let complexity = self.top[mbx].complexity[x + 1] + left; let dcq = self.segment[sindex].ydc; let acq = self.segment[sindex].yac; let n = self.read_coefficients(block, p, plane, complexity as usize, dcq, acq); if block[0] != 0 || n { transform::idct4x4(block); } left = if n { 1 } else { 0 }; self.top[mbx].complexity[x + 1] = if n { 1 } else { 0 }; } self.left.complexity[y + 1] = left; } plane = 2; for &j in &[5usize, 7usize] { for y in 0usize..2 { let mut left = self.left.complexity[y + j]; for x in 0usize..2 { let i = x + y * 2 + if j == 5 { 16 } else { 20 }; let block = &mut blocks[i * 16..i * 16 + 16]; let complexity = self.top[mbx].complexity[x + j] + left; let dcq = self.segment[sindex].uvdc; let acq = self.segment[sindex].uvac; let n = self.read_coefficients(block, p, plane, complexity as usize, dcq, acq); if block[0] != 0 || n { transform::idct4x4(block); } left = if n { 1 } else { 0 }; self.top[mbx].complexity[x + j] = if n { 1 } else { 0 }; } self.left.complexity[y + j] = left; } } blocks } /// Decodes the current frame and returns a reference to it pub fn decode_frame(&mut self) -> ImageResult<&Frame> { self.read_frame_header()?; for mby in 0..self.mbheight as usize { let p = mby % self.num_partitions as usize; self.left = MacroBlock::default(); for mbx in 0..self.mbwidth as usize { let (skip, mb) = self.read_macroblock_header(mbx)?; let blocks = if !skip { self.read_residual_data(&mb, mbx, p) } else { if mb.luma_mode != LumaMode::B { self.left.complexity[0] = 0; self.top[mbx].complexity[0] = 0; } for i in 1usize..9 { self.left.complexity[i] = 0; self.top[mbx].complexity[i] = 0; } [0i32; 384] }; self.intra_predict_luma(mbx, mby, &mb, &blocks); self.intra_predict_chroma(mbx, mby, &mb, &blocks); } self.left_border = vec![129u8; 1 + 16]; } Ok(&self.frame) } } impl LumaMode { fn from_i8(val: i8) -> Option<Self> { Some(match val { DC_PRED => LumaMode::DC, V_PRED => LumaMode::V, H_PRED => LumaMode::H, TM_PRED => LumaMode::TM, B_PRED => LumaMode::B, _ => return None, }) } fn into_intra(self) -> Option<IntraMode> { Some(match self { LumaMode::DC => IntraMode::DC, LumaMode::V => IntraMode::VE, LumaMode::H => IntraMode::HE, LumaMode::TM => IntraMode::TM, LumaMode::B => return None, }) } } impl Default for LumaMode { fn default() -> Self { LumaMode::DC } } impl ChromaMode { fn from_i8(val: i8) -> Option<Self> { Some(match val { DC_PRED => ChromaMode::DC, V_PRED => ChromaMode::V, H_PRED => ChromaMode::H, TM_PRED => ChromaMode::TM, _ => return None, }) } } impl Default for ChromaMode { fn default() -> Self { ChromaMode::DC } } impl IntraMode { fn from_i8(val: i8) -> Option<Self> { Some(match val { B_DC_PRED => IntraMode::DC, B_TM_PRED => IntraMode::TM, B_VE_PRED => IntraMode::VE, B_HE_PRED => IntraMode::HE, B_LD_PRED => IntraMode::LD, B_RD_PRED => IntraMode::RD, B_VR_PRED => IntraMode::VR, B_VL_PRED => IntraMode::VL, B_HD_PRED => IntraMode::HD, B_HU_PRED => IntraMode::HU, _ => return None, }) } } impl Default for IntraMode { fn default() -> Self { IntraMode::DC } } fn init_top_macroblocks(width: usize) -> Vec<MacroBlock> { let mb_width = (width + 15) / 16; let mb = MacroBlock { // Section 11.3 #3 bpred: [IntraMode::DC; 16], luma_mode: LumaMode::DC, .. MacroBlock::default() }; vec![mb; mb_width] } fn create_border_luma(mbx: usize, mby: usize, mbw: usize, top: &[u8], left: &[u8]) -> [u8; 357] { let stride = 1usize + 16 + 4; let mut ws = [0u8; (1 + 16) * (1 + 16 + 4)]; // A { let above = &mut ws[1..stride]; if mby == 0 { for above in above.iter_mut() { *above = 127; } } else { for i in 0usize..16 { above[i] = top[mbx * 16 + i]; } if mbx == mbw - 1 { for above in above.iter_mut().skip(16) { *above = top[mbx * 16 + 15]; } } else { for i in 16usize..above.len() { above[i] = top[mbx * 16 + i]; } } } } for i in 17usize..stride { ws[4 * stride + i] = ws[i]; ws[8 * stride + i] = ws[i]; ws[12 * stride + i] = ws[i]; } // L if mbx == 0 { for i in 0usize..16 { ws[(i + 1) * stride] = 129; } } else { for i in 0usize..16 { ws[(i + 1) * stride] = left[i + 1]; } } // P ws[0] = if mby == 0 { 127 } else if mbx == 0 { 129 } else { left[0] }; ws } fn avg3(left: u8, this: u8, right: u8) -> u8 { let avg = (u16::from(left) + 2 * u16::from(this) + u16::from(right) + 2) >> 2; avg as u8 } fn avg2(this: u8, right: u8) -> u8 { let avg = (u16::from(this) + u16::from(right) + 1) >> 1; avg as u8 } // Only 16 elements from rblock are used to add residue, so it is restricted to 16 elements // to enable SIMD and other optimizations. fn add_residue(pblock: &mut [u8], rblock: &[i32; 16], y0: usize, x0: usize, stride: usize) { let mut pos = y0 * stride + x0; for row in rblock.chunks(4) { for (p, &a) in pblock[pos..pos+4].iter_mut().zip(row.iter()) { *p = clamp(a + i32::from(*p), 0, 255) as u8; } pos += stride; } } fn predict_4x4(ws: &mut [u8], stride: usize, modes: &[IntraMode], resdata: &[i32]) { for sby in 0usize..4 { for sbx in 0usize..4 { let i = sbx + sby * 4; let y0 = sby * 4 + 1; let x0 = sbx * 4 + 1; match modes[i] { IntraMode::TM => predict_tmpred(ws, 4, x0, y0, stride), IntraMode::VE => predict_bvepred(ws, x0, y0, stride), IntraMode::HE => predict_bhepred(ws, x0, y0, stride), IntraMode::DC => predict_bdcpred(ws, x0, y0, stride), IntraMode::LD => predict_bldpred(ws, x0, y0, stride), IntraMode::RD => predict_brdpred(ws, x0, y0, stride), IntraMode::VR => predict_bvrpred(ws, x0, y0, stride), IntraMode::VL => predict_bvlpred(ws, x0, y0, stride), IntraMode::HD => predict_bhdpred(ws, x0, y0, stride), IntraMode::HU => predict_bhupred(ws, x0, y0, stride), } let rb: &[i32; 16] = resdata[i * 16..][..16].try_into().unwrap(); add_residue(ws, rb, y0, x0, stride); } } } fn predict_vpred(a: &mut [u8], size: usize, x0: usize, y0: usize, stride: usize) { for y in 0usize..size { for x in 0usize..size { a[(x + x0) + stride * (y + y0)] = a[(x + x0) + stride * (y0 + y - 1)]; } } } fn predict_hpred(a: &mut [u8], size: usize, x0: usize, y0: usize, stride: usize) { for y in 0usize..size { for x in 0usize..size { a[(x + x0) + stride * (y + y0)] = a[(x + x0 - 1) + stride * (y0 + y)]; } } } fn predict_dcpred(a: &mut [u8], size: usize, stride: usize, above: bool, left: bool) { let mut sum = 0; let mut shf = if size == 8 { 2 } else { 3 }; if left { for y in 0usize..size { sum += u32::from(a[(y + 1) * stride]); } shf += 1; } if above { for x in 0usize..size { sum += u32::from(a[x + 1]); } shf += 1; } let dcval = if !left && !above { 128 } else { (sum + (1 << (shf - 1))) >> shf }; for y in 0usize..size { for x in 0usize..size { a[(x + 1) + stride * (y + 1)] = dcval as u8; } } } fn predict_tmpred(a: &mut [u8], size: usize, x0: usize, y0: usize, stride: usize) { for y in 0usize..size { for x in 0usize..size { let pred = i32::from(a[(y0 + y) * stride + x0 - 1]) + i32::from(a[(y0 - 1) * stride + x0 + x]) - i32::from(a[(y0 - 1) * stride + x0 - 1]); a[(x + x0) + stride * (y + y0)] = clamp(pred, 0, 255) as u8; } } } fn predict_bdcpred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let mut v = 4; for i in 0usize..4 { v += u32::from(a[(y0 + i) * stride + x0 - 1]) + u32::from(a[(y0 - 1) * stride + x0 + i]); } v >>= 3; for y in 0usize..4 { for x in 0usize..4 { a[x + x0 + stride * (y + y0)] = v as u8; } } } fn topleft_pixel(a: &[u8], x0: usize, y0: usize, stride: usize) -> u8 { a[(y0 - 1) * stride + x0 - 1] } fn top_pixels(a: &[u8], x0: usize, y0: usize, stride: usize) -> (u8, u8, u8, u8, u8, u8, u8, u8) { let pos = (y0 - 1) * stride + x0; let a_slice = &a[pos..pos+8]; let a0 = a_slice[0]; let a1 = a_slice[1]; let a2 = a_slice[2]; let a3 = a_slice[3]; let a4 = a_slice[4]; let a5 = a_slice[5]; let a6 = a_slice[6]; let a7 = a_slice[7]; (a0, a1, a2, a3, a4, a5, a6, a7) } fn left_pixels(a: &[u8], x0: usize, y0: usize, stride: usize) -> (u8, u8, u8, u8) { let l0 = a[y0 * stride + x0 - 1]; let l1 = a[(y0 + 1) * stride + x0 - 1]; let l2 = a[(y0 + 2) * stride + x0 - 1]; let l3 = a[(y0 + 3) * stride + x0 - 1]; (l0, l1, l2, l3) } fn edge_pixels( a: &[u8], x0: usize, y0: usize, stride: usize, ) -> (u8, u8, u8, u8, u8, u8, u8, u8, u8) { let pos = (y0 - 1) * stride + x0 - 1; let a_slice = &a[pos..=pos+4]; let e0 = a[pos + 4 * stride]; let e1 = a[pos + 3 * stride]; let e2 = a[pos + 2 * stride]; let e3 = a[pos + stride]; let e4 = a_slice[0]; let e5 = a_slice[1]; let e6 = a_slice[2]; let e7 = a_slice[3]; let e8 = a_slice[4]; (e0, e1, e2, e3, e4, e5, e6, e7, e8) } fn predict_bvepred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let p = topleft_pixel(a, x0, y0, stride); let (a0, a1, a2, a3, a4, _, _, _) = top_pixels(a, x0, y0, stride); let avg_1 = avg3(p, a0, a1); let avg_2 = avg3(a0, a1, a2); let avg_3 = avg3(a1, a2, a3); let avg_4 = avg3(a2, a3, a4); let avg = [avg_1, avg_2, avg_3, avg_4]; let mut pos = y0 * stride + x0; for _ in 0..4 { a[pos..=pos + 3].copy_from_slice(&avg); pos += stride; } } fn predict_bhepred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let p = topleft_pixel(a, x0, y0, stride); let (l0, l1, l2, l3) = left_pixels(a, x0, y0, stride); let avgs = [avg3(p, l0, l1), avg3(l0, l1, l2), avg3(l1, l2, l3), avg3(l2, l3, l3)]; let mut pos = y0 * stride + x0; for &avg in avgs.iter() { for a_p in a[pos..=pos+3].iter_mut() { *a_p = avg; } pos += stride; } } fn predict_bldpred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let (a0, a1, a2, a3, a4, a5, a6, a7) = top_pixels(a, x0, y0, stride); let avgs = [avg3(a0, a1, a2), avg3(a1, a2, a3), avg3(a2, a3, a4), avg3(a3, a4, a5), avg3(a4, a5, a6), avg3(a5, a6, a7), avg3(a6, a7, a7)]; let mut pos = y0 * stride + x0; for i in 0..4 { a[pos..=pos + 3].copy_from_slice(&avgs[i..=i+3]); pos += stride; } } fn predict_brdpred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let (e0, e1, e2, e3, e4, e5, e6, e7, e8) = edge_pixels(a, x0, y0, stride); let avgs = [avg3(e0, e1, e2), avg3(e1, e2, e3), avg3(e2, e3, e4), avg3(e3, e4, e5), avg3(e4, e5, e6), avg3(e5, e6, e7), avg3(e6, e7, e8)]; let mut pos = y0 * stride + x0; for i in 0..4 { a[pos..=pos + 3].copy_from_slice(&avgs[3-i..7-i]); pos += stride; } } fn predict_bvrpred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let (_, e1, e2, e3, e4, e5, e6, e7, e8) = edge_pixels(a, x0, y0, stride); a[(y0 + 3) * stride + x0] = avg3(e1, e2, e3); a[(y0 + 2) * stride + x0] = avg3(e2, e3, e4); a[(y0 + 3) * stride + x0 + 1] = avg3(e3, e4, e5); a[(y0 + 1) * stride + x0] = avg3(e3, e4, e5); a[(y0 + 2) * stride + x0 + 1] = avg2(e4, e5); a[y0 * stride + x0] = avg2(e4, e5); a[(y0 + 3) * stride + x0 + 2] = avg3(e4, e5, e6); a[(y0 + 1) * stride + x0 + 1] = avg3(e4, e5, e6); a[(y0 + 2) * stride + x0 + 2] = avg2(e5, e6); a[y0 * stride + x0 + 1] = avg2(e5, e6); a[(y0 + 3) * stride + x0 + 3] = avg3(e5, e6, e7); a[(y0 + 1) * stride + x0 + 2] = avg3(e5, e6, e7); a[(y0 + 2) * stride + x0 + 3] = avg2(e6, e7); a[y0 * stride + x0 + 2] = avg2(e6, e7); a[(y0 + 1) * stride + x0 + 3] = avg3(e6, e7, e8); a[y0 * stride + x0 + 3] = avg2(e7, e8); } fn predict_bvlpred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let (a0, a1, a2, a3, a4, a5, a6, a7) = top_pixels(a, x0, y0, stride); a[y0 * stride + x0] = avg2(a0, a1); a[(y0 + 1) * stride + x0] = avg3(a0, a1, a2); a[(y0 + 2) * stride + x0] = avg2(a1, a2); a[y0 * stride + x0 + 1] = avg2(a1, a2); a[(y0 + 1) * stride + x0 + 1] = avg3(a1, a2, a3); a[(y0 + 3) * stride + x0] = avg3(a1, a2, a3); a[(y0 + 2) * stride + x0 + 1] = avg2(a2, a3); a[y0 * stride + x0 + 2] = avg2(a2, a3); a[(y0 + 3) * stride + x0 + 1] = avg3(a2, a3, a4); a[(y0 + 1) * stride + x0 + 2] = avg3(a2, a3, a4); a[(y0 + 2) * stride + x0 + 2] = avg2(a3, a4); a[y0 * stride + x0 + 3] = avg2(a3, a4); a[(y0 + 3) * stride + x0 + 2] = avg3(a3, a4, a5); a[(y0 + 1) * stride + x0 + 3] = avg3(a3, a4, a5); a[(y0 + 2) * stride + x0 + 3] = avg3(a4, a5, a6); a[(y0 + 3) * stride + x0 + 3] = avg3(a5, a6, a7); } fn predict_bhdpred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let (e0, e1, e2, e3, e4, e5, e6, e7, _) = edge_pixels(a, x0, y0, stride); a[(y0 + 3) * stride + x0] = avg2(e0, e1); a[(y0 + 3) * stride + x0 + 1] = avg3(e0, e1, e2); a[(y0 + 2) * stride + x0] = avg2(e1, e2); a[(y0 + 3) * stride + x0 + 2] = avg2(e1, e2); a[(y0 + 2) * stride + x0 + 1] = avg3(e1, e2, e3); a[(y0 + 3) * stride + x0 + 3] = avg3(e1, e2, e3); a[(y0 + 2) * stride + x0 + 2] = avg2(e2, e3); a[(y0 + 1) * stride + x0] = avg2(e2, e3); a[(y0 + 2) * stride + x0 + 3] = avg3(e2, e3, e4); a[(y0 + 1) * stride + x0 + 1] = avg3(e2, e3, e4); a[(y0 + 1) * stride + x0 + 2] = avg2(e3, e4); a[y0 * stride + x0] = avg2(e3, e4); a[(y0 + 1) * stride + x0 + 3] = avg3(e3, e4, e5); a[y0 * stride + x0 + 1] = avg3(e3, e4, e5); a[y0 * stride + x0 + 2] = avg3(e4, e5, e6); a[y0 * stride + x0 + 3] = avg3(e5, e6, e7); } fn predict_bhupred(a: &mut [u8], x0: usize, y0: usize, stride: usize) { let (l0, l1, l2, l3) = left_pixels(a, x0, y0, stride); a[y0 * stride + x0] = avg2(l0, l1); a[y0 * stride + x0 + 1] = avg3(l0, l1, l2); a[y0 * stride + x0 + 2] = avg2(l1, l2); a[(y0 + 1) * stride + x0] = avg2(l1, l2); a[y0 * stride + x0 + 3] = avg3(l1, l2, l3); a[(y0 + 1) * stride + x0 + 1] = avg3(l1, l2, l3); a[(y0 + 1) * stride + x0 + 2] = avg2(l2, l3); a[(y0 + 2) * stride + x0] = avg2(l2, l3); a[(y0 + 1) * stride + x0 + 3] = avg3(l2, l3, l3); a[(y0 + 2) * stride + x0 + 1] = avg3(l2, l3, l3); a[(y0 + 2) * stride + x0 + 2] = l3; a[(y0 + 2) * stride + x0 + 3] = l3; a[(y0 + 3) * stride + x0] = l3; a[(y0 + 3) * stride + x0 + 1] = l3; a[(y0 + 3) * stride + x0 + 2] = l3; a[(y0 + 3) * stride + x0 + 3] = l3; } #[cfg(test)] mod test { #[cfg(feature = "benchmarks")] extern crate test; use super::{top_pixels, edge_pixels, avg2, avg3, predict_bvepred, predict_brdpred, predict_bldpred, predict_bhepred, add_residue}; #[cfg(feature = "benchmarks")] use super::{IntraMode, predict_4x4}; #[cfg(feature = "benchmarks")] use test::{Bencher, black_box}; #[cfg(feature = "benchmarks")] const W: usize = 256; #[cfg(feature = "benchmarks")] const H: usize = 256; #[cfg(feature = "benchmarks")] fn make_sample_image() -> Vec<u8> { let mut v = Vec::with_capacity((W * H * 4) as usize); for c in 0u8..=255 { for k in 0u8..=255 { v.push(c); v.push(0); v.push(0); v.push(k); } } v } #[cfg(feature = "benchmarks")] #[bench] fn bench_predict_4x4(b: &mut Bencher) { let mut v = black_box(make_sample_image()); let res_data = vec![1i32; W * H * 4]; let modes = [ IntraMode::TM, IntraMode::VE, IntraMode::HE, IntraMode::DC, IntraMode::LD, IntraMode::RD, IntraMode::VR, IntraMode::VL, IntraMode::HD, IntraMode::HU, IntraMode::TM, IntraMode::VE, IntraMode::HE, IntraMode::DC, IntraMode::LD, IntraMode::RD ]; b.iter(|| { black_box(predict_4x4(& mut v, W * 2, &modes, &res_data)); }); } #[cfg(feature = "benchmarks")] #[bench] fn bench_predict_bvepred(b: &mut Bencher) { let mut v = make_sample_image(); b.iter(|| { predict_bvepred(black_box(&mut v), 5, 5, W * 2); }); } #[cfg(feature = "benchmarks")] #[bench] fn bench_predict_bldpred(b: &mut Bencher) { let mut v = black_box(make_sample_image()); b.iter(|| { black_box(predict_bldpred(black_box(&mut v), 5, 5, W * 2)); }); } #[cfg(feature = "benchmarks")] #[bench] fn bench_predict_brdpred(b: &mut Bencher) { let mut v = black_box(make_sample_image()); b.iter(|| { black_box(predict_brdpred(black_box(&mut v), 5, 5, W * 2)); }); } #[cfg(feature = "benchmarks")] #[bench] fn bench_predict_bhepred(b: &mut Bencher) { let mut v = black_box(make_sample_image()); b.iter(|| { black_box(predict_bhepred(black_box(&mut v), 5, 5, W * 2)); }); } #[cfg(feature = "benchmarks")] #[bench] fn bench_top_pixels(b: &mut Bencher) { let v = black_box(make_sample_image()); b.iter(|| { black_box(top_pixels(black_box(&v), 5, 5, W * 2)); }); } #[cfg(feature = "benchmarks")] #[bench] fn bench_edge_pixels(b: &mut Bencher) { let v = black_box(make_sample_image()); b.iter(|| { black_box(edge_pixels(black_box(&v), 5, 5, W * 2)); }); } #[test] fn test_avg2() { for i in 0u8..=255 { for j in 0u8..=255 { let ceil_avg = ((i as f32) + (j as f32)) / 2.0; let ceil_avg = ceil_avg.ceil() as u8; assert_eq!(ceil_avg, avg2(i, j), "avg2({}, {}), expected {}, got {}.", i, j, ceil_avg, avg2(i, j)); } } } #[test] fn test_avg2_specific() { assert_eq!(255, avg2(255, 255), "avg2(255, 255), expected 255, got {}.", avg2(255, 255)); assert_eq!(1, avg2(1, 1), "avg2(1, 1), expected 1, got {}.", avg2(1, 1)); assert_eq!(2, avg2(2, 1), "avg2(2, 1), expected 2, got {}.", avg2(2, 1)); } #[test] fn test_avg3() { for i in 0u8..=255 { for j in 0u8..=255 { for k in 0u8..=255 { let floor_avg = ((i as f32) + 2.0 * (j as f32) + { k as f32 } + 2.0) / 4.0; let floor_avg = floor_avg.floor() as u8; assert_eq!(floor_avg, avg3(i, j, k), "avg3({}, {}, {}), expected {}, got {}.", i, j, k, floor_avg, avg3(i, j, k)); } } } } #[test] fn test_edge_pixels() { let im = vec![5, 6, 7, 8, 9, 4, 0, 0, 0, 0, 3, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0]; let (e0, e1, e2, e3, e4, e5, e6, e7, e8) = edge_pixels(&im, 1, 1, 5); assert_eq!(e0, 1); assert_eq!(e1, 2); assert_eq!(e2, 3); assert_eq!(e3, 4); assert_eq!(e4, 5); assert_eq!(e5, 6); assert_eq!(e6, 7); assert_eq!(e7, 8); assert_eq!(e8, 9); } #[test] fn test_top_pixels() { let im = vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let (e0, e1, e2, e3, e4, e5, e6, e7) = top_pixels(&im, 0, 1, 8); assert_eq!(e0, 1); assert_eq!(e1, 2); assert_eq!(e2, 3); assert_eq!(e3, 4); assert_eq!(e4, 5); assert_eq!(e5, 6); assert_eq!(e6, 7); assert_eq!(e7, 8); } #[test] fn test_add_residue() { let mut pblock = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; let rblock = [-1, -2, -3, -4, 250, 249, 248, 250, -10, -18, -192, -17, -3, 15, 18, 9]; let expected:[u8; 16] = [0, 0, 0, 0, 255, 255, 255, 255, 0, 0, 0, 0, 10, 29, 33, 25]; add_residue(& mut pblock, &rblock, 0, 0, 4); for (&e, &i) in expected.iter().zip(&pblock) { assert_eq!(e, i); } } #[test] fn test_predict_bhepred() { let expected: Vec<u8> = vec![5, 0, 0, 0, 0, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1]; let mut im = vec![5, 0, 0, 0, 0, 4, 0, 0, 0, 0, 3, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0]; predict_bhepred(& mut im, 1, 1, 5); for (&e, i) in expected.iter().zip(im) { assert_eq!(e, i); } } #[test] fn test_predict_brdpred() { let expected: Vec<u8> = vec![5, 6, 7, 8, 9, 4, 5, 6, 7, 8, 3, 4, 5, 6, 7, 2, 3, 4, 5, 6, 1, 2, 3, 4, 5]; let mut im = vec![5, 6, 7, 8, 9, 4, 0, 0, 0, 0, 3, 0, 0, 0, 0, 2, 0, 0, 0, 0, 1, 0, 0, 0, 0]; predict_brdpred(& mut im, 1, 1, 5); for (&e, i) in expected.iter().zip(im) { assert_eq!(e, i); } } #[test] fn test_predict_bldpred() { let mut im: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let avg_1 = 2u8; let avg_2 = 3u8; let avg_3 = 4u8; let avg_4 = 5u8; let avg_5 = 6u8; let avg_6 = 7u8; let avg_7 = 8u8; predict_bldpred(&mut im, 0, 1, 8); assert_eq!(im[8], avg_1); assert_eq!(im[9], avg_2); assert_eq!(im[10], avg_3); assert_eq!(im[11], avg_4); assert_eq!(im[16], avg_2); assert_eq!(im[17], avg_3); assert_eq!(im[18], avg_4); assert_eq!(im[19], avg_5); assert_eq!(im[24], avg_3); assert_eq!(im[25], avg_4); assert_eq!(im[26], avg_5); assert_eq!(im[27], avg_6); assert_eq!(im[32], avg_4); assert_eq!(im[33], avg_5); assert_eq!(im[34], avg_6); assert_eq!(im[35], avg_7); } #[test] fn test_predict_bvepred() { let mut im: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let avg_1 = 2u8; let avg_2 = 3u8; let avg_3 = 4u8; let avg_4 = 5u8; predict_bvepred(&mut im, 1, 1, 9); assert_eq!(im[10], avg_1); assert_eq!(im[11], avg_2); assert_eq!(im[12], avg_3); assert_eq!(im[13], avg_4); assert_eq!(im[19], avg_1); assert_eq!(im[20], avg_2); assert_eq!(im[21], avg_3); assert_eq!(im[22], avg_4); assert_eq!(im[28], avg_1); assert_eq!(im[29], avg_2); assert_eq!(im[30], avg_3); assert_eq!(im[31], avg_4); assert_eq!(im[37], avg_1); assert_eq!(im[38], avg_2); assert_eq!(im[39], avg_3); assert_eq!(im[40], avg_4); } }
33.000804
167
0.466426
56abec208aaf9c41daca232b8d085dacbddd10b8
215
use std::time::{SystemTime, UNIX_EPOCH}; pub fn now_millis() -> u128 { let start = SystemTime::now(); start .duration_since(UNIX_EPOCH) .expect("Time went backwards") .as_millis() }
21.5
40
0.609302
dd08e4ee56d0e49706569e469705a852ed7a3dc7
7,587
// Copyright 2020 The Grin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // A note on encoding efficiency: 0.75 for Base64, 0.744 for Base62, 0.732 for Base58 // slatepack uses a modified Base58Check encoding to create armored slate payloads: // 1. Take first four bytes of SHA256(SHA256(slate.as_bytes())) // 2. Concatenate result of step 1 and slate.as_bytes() // 3. Base58 encode bytes from step 2 // Finally add armor framing and space/newline formatting as desired use crate::{Error, ErrorKind}; use regex::Regex; use sha2::{Digest, Sha256}; use std::str; // Framing and formatting for slate armor. Headers and footers better to be the same size, otherwise formatting makes it ugly pub static HEADER_ENC: &str = "BEGINSLATEPACK."; static FOOTER_ENC: &str = ". ENDSLATEPACK."; pub static HEADER_BIN: &str = "BEGINSLATE_BIN."; static FOOTER_BIN: &str = ". ENDSLATE_BIN."; const WORD_LENGTH: usize = 15; const WORDS_PER_LINE: usize = 200; lazy_static! { static ref HEADER_REGEX_ENC: Regex = Regex::new(concat!(r"^[>\n\r\t ]*BEGINSLATEPACK[>\n\r\t ]*$")).unwrap(); static ref FOOTER_REGEX_ENC: Regex = Regex::new(concat!(r"^[>\n\r\t ]*ENDSLATEPACK[>\n\r\t ]*$")).unwrap(); static ref HEADER_REGEX_BIN: Regex = Regex::new(concat!(r"^[>\n\r\t ]*BEGINSLATE_BIN[>\n\r\t ]*$")).unwrap(); static ref FOOTER_REGEX_BIN: Regex = Regex::new(concat!(r"^[>\n\r\t ]*ENDSLATE_BIN[>\n\r\t ]*$")).unwrap(); static ref WHITESPACE_LIST: [u8; 5] = [b'>', b'\n', b'\r', b'\t', b' ']; } /// Wrapper for associated functions pub struct SlatepackArmor; impl SlatepackArmor { /// Decode an armored Slatepack pub fn decode(armor_bytes: &[u8]) -> Result<(Vec<u8>, bool), Error> { // Collect the bytes up to the first period, this is the header let header_bytes = armor_bytes .iter() .take_while(|byte| **byte != b'.') .cloned() .collect::<Vec<u8>>(); // Verify the header... let henc = check_header(&header_bytes)?; // Get the length of the header let header_len = header_bytes.len() + 1; if armor_bytes.len() <= header_len { return Err(ErrorKind::SlatepackDecodeError("Bad armor header".to_string()).into()); } // Skip the length of the header to read for the payload until the next period let payload_bytes = armor_bytes[header_len as usize..] .iter() .take_while(|byte| **byte != b'.') .cloned() .collect::<Vec<u8>>(); // Get length of the payload to check the footer framing let payload_len = payload_bytes.len(); // Get footer bytes and verify them let consumed_bytes = header_len + payload_len + 1; if armor_bytes.len() <= consumed_bytes { return Err(ErrorKind::SlatepackDecodeError("Bad armor content".to_string()).into()); } let footer_bytes = armor_bytes[consumed_bytes as usize..] .iter() .take_while(|byte| **byte != b'.') .cloned() .collect::<Vec<u8>>(); let fenc = check_footer(&footer_bytes)?; if henc != fenc { return Err(ErrorKind::SlatepackDecodeError( "Non matched armor header and footer".to_string(), ) .into()); } // Clean up the payload bytes to be deserialized let clean_payload = payload_bytes .iter() .filter(|byte| !WHITESPACE_LIST.contains(byte)) .cloned() .collect::<Vec<u8>>(); // Decode payload from base58 let base_decode = bs58::decode(&clean_payload) .into_vec() .map_err(|_| ErrorKind::SlatepackDecodeError("Invalid armored data".into()))?; let error_code = &base_decode[0..4]; let slatepack_bytes = &base_decode[4..]; // Make sure the error check code is valid for the slate data error_check(error_code, slatepack_bytes)?; // Return slate as binary or string Ok((slatepack_bytes.to_vec(), henc)) } /// Encode an armored slatepack pub fn encode(slatepack_bytes: &Vec<u8>, encrypted: bool) -> Result<String, Error> { let encoded_slatepack = base58check(&slatepack_bytes)?; let header = if encrypted { HEADER_ENC } else { HEADER_BIN }; let footer = if encrypted { FOOTER_ENC } else { FOOTER_BIN }; let formatted_slatepack = format_slatepack(&format!("{}{}", header, encoded_slatepack))?; Ok(format!("{}{}", formatted_slatepack, footer)) } } // Takes an error check code and a slate binary and verifies that the code was generated from slate fn error_check(error_code: &[u8], slate_bytes: &[u8]) -> Result<(), Error> { let new_check = generate_check(slate_bytes)?; if error_code.iter().eq(new_check.iter()) { Ok(()) } else { Err(ErrorKind::SlatepackDecodeError( "Invalid armored data, some data was corrupted".to_string(), ) .into()) } } // Checks header framing bytes and returns an error if they are invalid fn check_header(header: &[u8]) -> Result<bool, Error> { let framing = str::from_utf8(header) .map_err(|_| ErrorKind::SlatepackDecodeError("Bad bytes at armored data".into()))?; if HEADER_REGEX_ENC.is_match(framing) { Ok(true) } else if HEADER_REGEX_BIN.is_match(framing) { Ok(false) } else { Err(ErrorKind::SlatepackDecodeError("Bad armor header".to_string()).into()) } } // Checks footer framing bytes and returns an error if they are invalid fn check_footer(footer: &[u8]) -> Result<bool, Error> { let framing = str::from_utf8(footer) .map_err(|_| ErrorKind::SlatepackDecodeError("Bad bytes at armored data".into()))?; if FOOTER_REGEX_ENC.is_match(framing) { Ok(true) } else if FOOTER_REGEX_BIN.is_match(framing) { Ok(false) } else { Err(ErrorKind::SlatepackDecodeError("Bad armor footer".to_string()).into()) } } // MODIFIED Base58Check encoding for slate bytes fn base58check(slate: &[u8]) -> Result<String, Error> { // Serialize the slate json string to a vector of bytes let mut slate_bytes: Vec<u8> = slate.to_vec(); // Get the four byte checksum for the slate binary let mut check_bytes: Vec<u8> = generate_check(&slate_bytes)?; // Make a new buffer and concatenate checksum bytes and slate bytes let mut slate_buf = Vec::new(); slate_buf.append(&mut check_bytes); slate_buf.append(&mut slate_bytes); // Encode the slate buffer containing the slate binary and checksum bytes as Base58 let b58_slate = bs58::encode(slate_buf).into_string(); Ok(b58_slate) } // Adds human readable formatting to the slate payload for armoring fn format_slatepack(slatepack: &str) -> Result<String, Error> { let formatter = slatepack .chars() .enumerate() .flat_map(|(i, c)| { if i != 0 && i % WORD_LENGTH == 0 { if WORDS_PER_LINE != 0 && i % (WORD_LENGTH * WORDS_PER_LINE) == 0 { Some('\n') } else { Some(' ') } } else { None } .into_iter() .chain(std::iter::once(c)) }) .collect::<String>(); Ok(formatter) } /// Returns the first four bytes of a double sha256 hash of some bytes /// This method is reused for swaps for BSV, just don't want to duplicates pub fn generate_check(payload: &[u8]) -> Result<Vec<u8>, Error> { let mut first_hash = Sha256::new(); first_hash.input(payload); let mut second_hash = Sha256::new(); second_hash.input(&first_hash.result()); let checksum = second_hash.result(); let check_bytes: Vec<u8> = checksum[0..4].to_vec(); Ok(check_bytes) }
36.301435
125
0.695005
ab9e7e7492dd7e81925aecbac0be71a7d376e014
1,448
//! # 738. 单调递增的数字 //! https://leetcode-cn.com/problems/monotone-increasing-digits/ //!给定一个非负整数 N,找出小于或等于 N 的最大的整数,同时这个整数需要满足其各个位数上的数字是单调递增。 //!(当且仅当每个相邻位数上的数字 x 和 y 满足 x <= y 时,我们称这个整数是单调递增的。) //! #解题思路 ///! 贪心,分治思想,数字从高位到地位,一旦出现逆序,把前一高位减一,后面所有位置9,把高位数字重新计算 pub struct Solution; impl Solution { pub fn monotone_increasing_digits(n: i32) -> i32 { let chars: Vec<u8> = n.to_string().bytes().map(|b| b - b'0').collect(); let mut high = chars[0] as i32; for i in 1..chars.len() { //出现逆序跳出 if chars[i] < chars[i - 1] { let next_factor = 10u32.pow((chars.len() - i) as u32); //重新计算高位数字 high = Self::monotone_increasing_digits(high - 1); high *= next_factor as i32; high += (next_factor - 1) as i32; break; } else { high = high * 10 + (chars[i] as i32); } } high } } #[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(super::Solution::monotone_increasing_digits(10), 9); assert_eq!( super::Solution::monotone_increasing_digits(15324868), 14999999 ); assert_eq!(super::Solution::monotone_increasing_digits(33332), 29999); assert_eq!(super::Solution::monotone_increasing_digits(0), 0); assert_eq!(super::Solution::monotone_increasing_digits(9), 9); } }
32.909091
79
0.562845
ef931ecb1205b3538ff35f00a26ef3277740c895
44,587
use crate::QueryExecutionType; use ic_canister_sandbox_replica_controller::sandboxed_execution_controller::SandboxedExecutionController; use ic_config::feature_status::FeatureStatus; use ic_config::{embedders::Config as EmbeddersConfig, execution_environment::Config}; use ic_cow_state::{error::CowError, CowMemoryManager}; use ic_cycles_account_manager::CyclesAccountManager; use ic_embedders::{ wasm_executor::WasmExecutor, WasmExecutionInput, WasmExecutionOutput, WasmtimeEmbedder, }; use ic_interfaces::execution_environment::{ ExecutionParameters, HypervisorError, HypervisorResult, }; use ic_interfaces::messages::RequestOrIngress; use ic_logger::{debug, fatal, ReplicaLogger}; use ic_metrics::{buckets::exponential_buckets, MetricsRegistry}; use ic_registry_routing_table::RoutingTable; use ic_registry_subnet_type::SubnetType; use ic_replicated_state::{ page_map::allocated_pages_count, CallContextAction, CallOrigin, CanisterState, ExecutionState, SchedulerState, SystemState, }; use ic_sys::PAGE_SIZE; use ic_system_api::{ ApiType, NonReplicatedQueryKind, StaticSystemState, SystemStateAccessorDirect, }; use ic_types::{ canonical_error::{internal_error, not_found_error, permission_denied_error, CanonicalError}, ingress::WasmResult, messages::Payload, methods::{Callback, FuncRef, SystemMethod, WasmMethod}, CanisterId, CanisterStatusType, Cycles, NumBytes, NumInstructions, PrincipalId, SubnetId, Time, }; use prometheus::{Histogram, IntCounterVec, IntGauge}; use std::{collections::BTreeMap, path::PathBuf, sync::Arc}; #[doc(hidden)] // pub for usage in tests pub struct HypervisorMetrics { accessed_pages: Histogram, dirty_pages: Histogram, allocated_pages: IntGauge, executed_messages: IntCounterVec, } impl HypervisorMetrics { #[doc(hidden)] // pub for usage in tests pub fn new(metrics_registry: &MetricsRegistry) -> Self { Self { accessed_pages: metrics_registry.histogram( "hypervisor_accessed_pages", "Number of pages accessed per execution round.", // 1 page, 2 pages, …, 2^21 (8GiB worth of) pages exponential_buckets(1.0, 2.0, 22), ), dirty_pages: metrics_registry.histogram( "hypervisor_dirty_pages", "Number of pages modified (dirtied) per execution round.", exponential_buckets(1.0, 2.0, 22), ), allocated_pages: metrics_registry.int_gauge( "hypervisor_allocated_pages", "Total number of currently allocated pages.", ), executed_messages: metrics_registry.int_counter_vec( "hypervisor_executed_messages_total", "Number of messages executed, by type and status.", &["api_type", "status"], ), } } fn observe(&self, api_type: &str, result: &WasmExecutionOutput) { self.accessed_pages .observe(result.instance_stats.accessed_pages as f64); self.dirty_pages .observe(result.instance_stats.dirty_pages as f64); self.allocated_pages.set(allocated_pages_count() as i64); let status = match &result.wasm_result { Ok(Some(WasmResult::Reply(_))) => "success", Ok(Some(WasmResult::Reject(_))) => "Reject", Ok(None) => "NoResponse", Err(e) => e.as_str(), }; self.executed_messages .with_label_values(&[api_type, status]) .inc(); } } #[doc(hidden)] pub struct Hypervisor { wasm_executor: Arc<WasmExecutor>, sandbox_executor: Option<Arc<SandboxedExecutionController>>, metrics: Arc<HypervisorMetrics>, own_subnet_id: SubnetId, own_subnet_type: SubnetType, log: ReplicaLogger, cycles_account_manager: Arc<CyclesAccountManager>, } impl Hypervisor { /// Execute an update call. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - An enum describing the different actions that should be taken based /// the current state of the call context associated with the request that /// was executed. /// /// - The size of the heap delta change that the canister produced during /// execution. If execution failed, then the value is 0. #[allow(clippy::too_many_arguments)] pub fn execute_update( &self, canister: CanisterState, mut request: RequestOrIngress, time: Time, routing_table: Arc<RoutingTable>, subnet_records: Arc<BTreeMap<SubnetId, SubnetType>>, execution_parameters: ExecutionParameters, nns_subnet_id: SubnetId, ) -> (CanisterState, NumInstructions, CallContextAction, NumBytes) { debug!(self.log, "execute_update: method {}", request.method_name()); let incoming_cycles = request.take_cycles(); // Validate that the canister is running. if CanisterStatusType::Running != canister.status() { return ( canister, execution_parameters.instruction_limit, CallContextAction::Fail { error: HypervisorError::CanisterStopped, refund: incoming_cycles, }, NumBytes::from(0), ); } let method = WasmMethod::Update(request.method_name().to_string()); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, mut system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, system_state, scheduler_state), execution_parameters.instruction_limit, CallContextAction::Fail { error: HypervisorError::WasmModuleNotFound, refund: incoming_cycles, }, NumBytes::from(0), ); } Some(es) => es, }; // Validate that the Wasm module exports the method. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), system_state, scheduler_state), execution_parameters.instruction_limit, CallContextAction::Fail { error: HypervisorError::MethodNotFound(method), refund: incoming_cycles, }, NumBytes::from(0), ); } let call_context_id = system_state .call_context_manager_mut() .unwrap() .new_call_context(CallOrigin::from(&request), incoming_cycles); let api_type = ApiType::update( time, request.method_payload().to_vec(), incoming_cycles, *request.sender(), call_context_id, self.own_subnet_id, self.own_subnet_type, nns_subnet_id, routing_table, subnet_records, ); let (output, output_execution_state, output_system_state) = self.execute( api_type, system_state.clone(), memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); let (mut system_state, heap_delta) = if output.wasm_result.is_ok() { ( output_system_state, NumBytes::from((output.instance_stats.dirty_pages * PAGE_SIZE) as u64), ) } else { // In contrast to other methods, an update methods ignores the // Wasm execution error and returns 0 as the heap delta. (system_state, NumBytes::from(0)) }; let action = system_state .call_context_manager_mut() .unwrap() .on_canister_result(call_context_id, output.wasm_result); let canister = CanisterState::from_parts(Some(output_execution_state), system_state, scheduler_state); (canister, output.num_instructions_left, action, heap_delta) } /// Execute a query call. /// /// Query calls are different from update calls as follows: /// - A different set of system APIs can be used. /// - Any modifications to the canister's state (like Wasm heap, etc.) will /// be rolled back. #[allow(clippy::too_many_arguments)] pub fn execute_query( &self, query_execution_type: QueryExecutionType, method: &str, payload: &[u8], caller: PrincipalId, canister: CanisterState, data_certificate: Option<Vec<u8>>, time: Time, execution_parameters: ExecutionParameters, ) -> ( CanisterState, NumInstructions, HypervisorResult<Option<WasmResult>>, ) { // Validate that the canister is running. if CanisterStatusType::Running != canister.status() { return ( canister, execution_parameters.instruction_limit, Err(HypervisorError::CanisterStopped), ); } let method = WasmMethod::Query(method.to_string()); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let mut execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::WasmModuleNotFound), ); } Some(state) => state, }; // Validate that the Wasm module exports the method. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::MethodNotFound(method)), ); } match query_execution_type { QueryExecutionType::Replicated => { if execution_state.cow_mem_mgr.is_valid() { // Replicated queries are similar to update executions and they operate // against the "current" canister state execution_state.mapped_state = Some(Arc::new(execution_state.cow_mem_mgr.get_map())); } let api_type = ApiType::replicated_query(time, payload.to_vec(), caller, data_certificate); // As we are executing the query in the replicated mode, we do // not want to commit updates, i.e. we must return the // unmodified version of the canister. Hence, execute on clones // of system and execution states so that we have the original // versions. let (output, _output_execution_state, _system_state_accessor) = self.execute( api_type, system_state.clone(), memory_usage, execution_parameters, FuncRef::Method(method), execution_state.clone(), ); let canister = CanisterState::from_parts(Some(execution_state), system_state, scheduler_state); (canister, output.num_instructions_left, output.wasm_result) } QueryExecutionType::NonReplicated { call_context_id, routing_table, query_kind, } => { if execution_state.cow_mem_mgr.is_valid() { // Non replicated queries execute against // older snapshotted state. execution_state.mapped_state = match execution_state .cow_mem_mgr .get_map_for_snapshot(execution_state.last_executed_round.get()) { Ok(state) => Some(Arc::new(state)), Err(err @ CowError::SlotDbError { .. }) => { fatal!(self.log, "Failure due to {}", err) } }; } let api_type = ApiType::non_replicated_query( time, payload.to_vec(), caller, call_context_id, self.own_subnet_id, routing_table, data_certificate, query_kind.clone(), ); // As we are executing the query in non-replicated mode, we can // modify the canister as the caller is not going to be able to // commit modifications to the canister anyway. let (output, output_execution_state, output_system_state) = self.execute( api_type, system_state, memory_usage, execution_parameters, FuncRef::Method(method), execution_state.clone(), ); let new_execution_state = match query_kind { NonReplicatedQueryKind::Pure => execution_state, NonReplicatedQueryKind::Stateful => output_execution_state, }; let canister = CanisterState::from_parts( Some(new_execution_state), output_system_state, scheduler_state, ); (canister, output.num_instructions_left, output.wasm_result) } } } /// Execute a callback. /// /// Callbacks are executed when a canister receives a response to an /// outbound request it had made. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - The size of the heap delta change that the execution produced. /// /// - A HypervisorResult that on success contains an optional wasm execution /// result or the relevant error if execution failed. #[allow(clippy::type_complexity, clippy::too_many_arguments)] pub fn execute_callback( &self, mut canister: CanisterState, call_origin: &CallOrigin, callback: Callback, payload: Payload, incoming_cycles: Cycles, time: Time, routing_table: Arc<RoutingTable>, subnet_records: Arc<BTreeMap<SubnetId, SubnetType>>, execution_parameters: ExecutionParameters, nns_subnet_id: SubnetId, ) -> ( CanisterState, NumInstructions, NumBytes, HypervisorResult<Option<WasmResult>>, ) { // Validate that the canister is not stopped. if canister.status() == CanisterStatusType::Stopped { return ( canister, execution_parameters.instruction_limit, NumBytes::from(0), Err(HypervisorError::CanisterStopped), ); } // Validate that the canister has an `ExecutionState`. if canister.execution_state.is_none() { return ( canister, execution_parameters.instruction_limit, NumBytes::from(0), Err(HypervisorError::WasmModuleNotFound), ); } let call_responded = canister .system_state .call_context_manager_mut() .unwrap() .call_responded(callback.call_context_id) // NOTE: Since we retrieved the `call_origin` earlier, we are now // sure that a call context exists and that this unwrap is safe. .unwrap(); let closure = match payload { Payload::Data(_) => callback.on_reply.clone(), Payload::Reject(_) => callback.on_reject.clone(), }; let api_type = match payload { Payload::Data(payload) => ApiType::reply_callback( time, payload.to_vec(), incoming_cycles, callback.call_context_id, call_responded, self.own_subnet_id, self.own_subnet_type, nns_subnet_id, routing_table, subnet_records, ), Payload::Reject(context) => ApiType::reject_callback( time, context, incoming_cycles, callback.call_context_id, call_responded, self.own_subnet_id, self.own_subnet_type, nns_subnet_id, routing_table, subnet_records, ), }; let func_ref = match call_origin { CallOrigin::Ingress(_, _) | CallOrigin::CanisterUpdate(_, _) | CallOrigin::Heartbeat => FuncRef::UpdateClosure(closure), CallOrigin::CanisterQuery(_, _) | CallOrigin::Query(_) => { FuncRef::QueryClosure(closure) } }; let (output, output_execution_state, output_system_state) = self.execute( api_type, canister.system_state.clone(), canister.memory_usage(self.own_subnet_type), execution_parameters.clone(), func_ref, canister.execution_state.take().unwrap(), ); let canister_current_memory_usage = canister.memory_usage(self.own_subnet_type); let call_origin = call_origin.clone(); canister.execution_state = Some(output_execution_state); match output.wasm_result { result @ Ok(_) => { // Executing the reply/reject closure succeeded. canister.system_state = output_system_state; let heap_delta = NumBytes::from((output.instance_stats.dirty_pages * PAGE_SIZE) as u64); (canister, output.num_instructions_left, heap_delta, result) } Err(callback_err) => { // A trap has occurred when executing the reply/reject closure. // Execute the cleanup if it exists. match callback.on_cleanup { None => { // No cleanup closure present. Return the callback error as-is. ( canister, output.num_instructions_left, NumBytes::from(0), Err(callback_err), ) } Some(cleanup_closure) => { let func_ref = match call_origin { CallOrigin::Ingress(_, _) | CallOrigin::CanisterUpdate(_, _) | CallOrigin::Heartbeat => FuncRef::UpdateClosure(cleanup_closure), CallOrigin::CanisterQuery(_, _) | CallOrigin::Query(_) => { FuncRef::QueryClosure(cleanup_closure) } }; let (cleanup_output, output_execution_state, output_system_state) = self .execute( ApiType::Cleanup { time }, canister.system_state.clone(), canister_current_memory_usage, ExecutionParameters { instruction_limit: output.num_instructions_left, ..execution_parameters }, func_ref, canister.execution_state.take().unwrap(), ); canister.execution_state = Some(output_execution_state); match cleanup_output.wasm_result { Ok(_) => { // Executing the cleanup callback has succeeded. canister.system_state = output_system_state; let heap_delta = NumBytes::from( (cleanup_output.instance_stats.dirty_pages * PAGE_SIZE) as u64, ); // Note that, even though the callback has succeeded, // the original callback error is returned. ( canister, cleanup_output.num_instructions_left, heap_delta, Err(callback_err), ) } Err(cleanup_err) => { // Executing the cleanup call back failed. ( canister, cleanup_output.num_instructions_left, NumBytes::from(0), Err(HypervisorError::Cleanup { callback_err: Box::new(callback_err), cleanup_err: Box::new(cleanup_err), }), ) } } } } } } } /// Executes the system method `canister_start`. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - A HypervisorResult containing the size of the heap delta change if /// execution was successful or the relevant error if execution failed. #[allow(clippy::type_complexity)] pub fn execute_canister_start( &self, canister: CanisterState, execution_parameters: ExecutionParameters, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let method = WasmMethod::System(SystemMethod::CanisterStart); let memory_usage = canister.memory_usage(self.own_subnet_type); let canister_id = canister.canister_id(); let (execution_state, system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::WasmModuleNotFound), ); } Some(es) => es, }; // If the Wasm module does not export the method, then this execution // succeeds as a no-op. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), system_state, scheduler_state), execution_parameters.instruction_limit, Ok(NumBytes::from(0)), ); } let (output, output_execution_state, _system_state_accessor) = self.execute( ApiType::start(), SystemState::new_for_start(canister_id), memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); self.system_execution_result_with_old_system_state( output, output_execution_state, system_state, scheduler_state, ) } /// Executes the system method `canister_pre_upgrade`. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - A HypervisorResult containing the size of the heap delta change if /// execution was successful or the relevant error if execution failed. #[allow(clippy::type_complexity)] pub fn execute_canister_pre_upgrade( &self, canister: CanisterState, caller: PrincipalId, time: Time, execution_parameters: ExecutionParameters, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let method = WasmMethod::System(SystemMethod::CanisterPreUpgrade); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, old_system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, old_system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::WasmModuleNotFound), ); } Some(es) => es, }; // If the Wasm module does not export the method, then this execution // succeeds as a no-op. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), old_system_state, scheduler_state), execution_parameters.instruction_limit, Ok(NumBytes::from(0)), ); } let (output, output_execution_state, output_system_state) = self.execute( ApiType::pre_upgrade(time, caller), old_system_state.clone(), memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); self.system_execution_result( output, output_execution_state, old_system_state, scheduler_state, output_system_state, ) } /// Executes the system method `canister_init`. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - A HypervisorResult containing the size of the heap delta change if /// execution was successful or the relevant error if execution failed. #[allow(clippy::type_complexity)] pub fn execute_canister_init( &self, canister: CanisterState, caller: PrincipalId, payload: &[u8], time: Time, execution_parameters: ExecutionParameters, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let method = WasmMethod::System(SystemMethod::CanisterInit); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, old_system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, old_system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::WasmModuleNotFound), ); } Some(es) => es, }; // If the Wasm module does not export the method, then this execution // succeeds as a no-op. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), old_system_state, scheduler_state), execution_parameters.instruction_limit, Ok(NumBytes::from(0)), ); } let (output, output_execution_state, output_system_state) = self.execute( ApiType::init(time, payload.to_vec(), caller), old_system_state.clone(), memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); self.system_execution_result( output, output_execution_state, old_system_state, scheduler_state, output_system_state, ) } /// Executes the system method `canister_post_upgrade`. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - A HypervisorResult containing the size of the heap delta change if /// execution was successful or the relevant error if execution failed. #[allow(clippy::type_complexity)] pub fn execute_canister_post_upgrade( &self, canister: CanisterState, caller: PrincipalId, payload: &[u8], time: Time, execution_parameters: ExecutionParameters, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let method = WasmMethod::System(SystemMethod::CanisterPostUpgrade); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, old_system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, old_system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::WasmModuleNotFound), ); } Some(es) => es, }; // If the Wasm module does not export the method, then this execution // succeeds as a no-op. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), old_system_state, scheduler_state), execution_parameters.instruction_limit, Ok(NumBytes::from(0)), ); } let (output, output_execution_state, output_system_state) = self.execute( ApiType::init(time, payload.to_vec(), caller), old_system_state.clone(), memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); self.system_execution_result( output, output_execution_state, old_system_state, scheduler_state, output_system_state, ) } /// Executes the system method `canister_inspect_message`. /// /// This method is called pre-consensus to let the canister decide if it /// wants to accept the message or not. pub fn execute_inspect_message( &self, canister: CanisterState, sender: PrincipalId, method_name: String, method_payload: Vec<u8>, time: Time, execution_parameters: ExecutionParameters, ) -> Result<(), CanonicalError> { let method = WasmMethod::System(SystemMethod::CanisterInspectMessage); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, system_state, _) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => return Err(not_found_error("Requested canister has no wasm module")), Some(execution_state) => execution_state, }; // If the Wasm module does not export the method, then this execution // succeeds as a no-op. if !execution_state.exports_method(&method) { return Ok(()); } let system_api = ApiType::inspect_message(sender, method_name, method_payload, time); let log = self.log.clone(); let (output, _output_execution_state, _system_state_accessor) = self.execute( system_api, system_state, memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); match output.wasm_result { Ok(maybe_wasm_result) => match maybe_wasm_result { None => Ok(()), Some(_result) => fatal!( log, "SystemApi should guarantee that the canister does not reply" ), }, Err(err) => match err { HypervisorError::MessageRejected => Err(permission_denied_error( "Requested canister rejected the message", )), err => { let canonical_error = match err { HypervisorError::MethodNotFound(_) => not_found_error( "Attempt to execute non-existent method on the canister", ), HypervisorError::CalledTrap(_) => { permission_denied_error("Requested canister rejected the message") } _ => internal_error( "Requested canister failed to process the message acceptance request", ), }; Err(canonical_error) } }, } } /// Executes the `canister_heartbeat` system method. /// /// Returns: /// /// - The updated `CanisterState` if the execution succeeded, otherwise /// the old `CanisterState`. /// /// - Number of instructions left. This should be <= `instructions_limit`. /// /// - A HypervisorResult containing the size of the heap delta change if /// execution was successful or the relevant error if execution failed. #[allow(clippy::type_complexity)] pub fn execute_canister_heartbeat( &self, canister: CanisterState, routing_table: Arc<RoutingTable>, subnet_records: Arc<BTreeMap<SubnetId, SubnetType>>, time: Time, execution_parameters: ExecutionParameters, nns_subnet_id: SubnetId, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let method = WasmMethod::System(SystemMethod::CanisterHeartbeat); let memory_usage = canister.memory_usage(self.own_subnet_type); let (execution_state, mut old_system_state, scheduler_state) = canister.into_parts(); // Validate that the Wasm module is present. let execution_state = match execution_state { None => { return ( CanisterState::from_parts(None, old_system_state, scheduler_state), execution_parameters.instruction_limit, Err(HypervisorError::WasmModuleNotFound), ); } Some(es) => es, }; // If the Wasm module does not export the method, then this execution // succeeds as a no-op. if !execution_state.exports_method(&method) { return ( CanisterState::from_parts(Some(execution_state), old_system_state, scheduler_state), execution_parameters.instruction_limit, Ok(NumBytes::from(0)), ); } let call_context_id = old_system_state .call_context_manager_mut() .unwrap() .new_call_context(CallOrigin::Heartbeat, Cycles::from(0)); let api_type = ApiType::heartbeat( time, call_context_id, self.own_subnet_id, self.own_subnet_type, nns_subnet_id, routing_table, subnet_records, ); let (output, output_execution_state, output_system_state) = self.execute( api_type, old_system_state.clone(), memory_usage, execution_parameters, FuncRef::Method(method), execution_state, ); { let wasm_result = output.wasm_result.clone(); let (mut canister, num_instructions_left, heap_delta) = self.system_execution_result( output, output_execution_state, old_system_state, scheduler_state, output_system_state, ); let _action = canister .system_state .call_context_manager_mut() .unwrap() .on_canister_result(call_context_id, wasm_result); (canister, num_instructions_left, heap_delta) } } // A helper that converts a Wasm execution output to an execution // result of `execution_canister_*` functions. // // The components of the resulting `CanisterState` are computed // as follows: // - `execution_state` is taken from the Wasm output. // - `scheduler_state` is taken from the corresponding argument. // - `system_state` is taken from the system_state_accessor if the execution // succeeded; otherwise, it is taken from the corresponding argument. fn system_execution_result( &self, output: WasmExecutionOutput, execution_state: ExecutionState, old_system_state: SystemState, scheduler_state: SchedulerState, output_system_state: SystemState, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let (system_state, heap_delta) = match output.wasm_result { Ok(opt_result) => { if opt_result.is_some() { fatal!(self.log, "[EXC-BUG] System methods cannot use msg_reply."); } let bytes = NumBytes::from((output.instance_stats.dirty_pages * PAGE_SIZE) as u64); (output_system_state, Ok(bytes)) } Err(err) => (old_system_state, Err(err)), }; let canister = CanisterState::from_parts(Some(execution_state), system_state, scheduler_state); (canister, output.num_instructions_left, heap_delta) } // Similar to `system_execution_result` but unconditionally uses // the given `old_system_state` for the resulting canister state. fn system_execution_result_with_old_system_state( &self, output: WasmExecutionOutput, execution_state: ExecutionState, old_system_state: SystemState, scheduler_state: SchedulerState, ) -> (CanisterState, NumInstructions, HypervisorResult<NumBytes>) { let heap_delta = match output.wasm_result { Ok(opt_result) => { if opt_result.is_some() { fatal!(self.log, "[EXC-BUG] System methods cannot use msg_reply."); } Ok(NumBytes::from( (output.instance_stats.dirty_pages * PAGE_SIZE) as u64, )) } Err(err) => Err(err), }; let canister = CanisterState::from_parts(Some(execution_state), old_system_state, scheduler_state); (canister, output.num_instructions_left, heap_delta) } pub fn create_execution_state( &self, wasm_binary: Vec<u8>, canister_root: PathBuf, canister_id: CanisterId, ) -> HypervisorResult<ExecutionState> { if let Some(sandbox_executor) = self.sandbox_executor.as_ref() { sandbox_executor.create_execution_state(wasm_binary, canister_root, canister_id) } else { self.wasm_executor .create_execution_state(wasm_binary, canister_root, canister_id) } } #[allow(clippy::too_many_arguments)] pub fn new( config: Config, num_runtime_threads: usize, metrics_registry: &MetricsRegistry, own_subnet_id: SubnetId, own_subnet_type: SubnetType, log: ReplicaLogger, cycles_account_manager: Arc<CyclesAccountManager>, ) -> Self { let mut embedder_config = EmbeddersConfig::new(); embedder_config.persistence_type = config.persistence_type; embedder_config.num_runtime_generic_threads = num_runtime_threads; embedder_config.num_runtime_query_threads = std::cmp::min(num_runtime_threads, 4); let wasm_embedder = WasmtimeEmbedder::new(embedder_config.clone(), log.clone()); let wasm_executor = WasmExecutor::new( wasm_embedder, metrics_registry, embedder_config, log.clone(), ); let sandbox_executor = match config.canister_sandboxing_flag { FeatureStatus::Enabled => { Some(Arc::new(SandboxedExecutionController::new(log.clone()))) } FeatureStatus::Disabled => None, }; Self { wasm_executor: Arc::new(wasm_executor), sandbox_executor, metrics: Arc::new(HypervisorMetrics::new(metrics_registry)), own_subnet_id, own_subnet_type, log, cycles_account_manager, } } #[cfg(test)] pub fn compile_count(&self) -> u64 { if let Some(sandbox_executor) = &self.sandbox_executor { sandbox_executor.compile_count_for_testing() } else { self.wasm_executor.compile_count_for_testing() } } /// Wrapper around the standalone `execute`. /// NOTE: this is public to enable integration testing. #[doc(hidden)] pub fn execute( &self, api_type: ApiType, system_state: SystemState, canister_current_memory_usage: NumBytes, execution_parameters: ExecutionParameters, func_ref: FuncRef, execution_state: ExecutionState, ) -> (WasmExecutionOutput, ExecutionState, SystemState) { let api_type_str = api_type.as_str(); let static_system_state = StaticSystemState::new(&system_state, self.cycles_account_manager.subnet_type()); let system_state_accessor = SystemStateAccessorDirect::new(system_state, Arc::clone(&self.cycles_account_manager)); let (output, execution_state, system_state_accessor) = if let Some(sandbox_executor) = self.sandbox_executor.as_ref() { sandbox_executor.process(WasmExecutionInput { api_type: api_type.clone(), static_system_state, canister_current_memory_usage, execution_parameters, func_ref, execution_state, system_state_accessor, }) } else { self.wasm_executor.process(WasmExecutionInput { api_type: api_type.clone(), static_system_state, canister_current_memory_usage, execution_parameters, func_ref, execution_state, system_state_accessor, }) }; self.metrics.observe(api_type_str, &output); ( output, execution_state, system_state_accessor.release_system_state(), ) } }
39.111404
105
0.567318
d9e369cb00b3f5544e52e5d9f935ba8d37eab562
3,263
#![allow(unused_imports)] use super::*; use wasm_bindgen::prelude::*; #[wasm_bindgen] extern "C" { # [wasm_bindgen (extends = :: js_sys :: Object , js_name = PublicKeyCredentialUserEntity)] #[derive(Debug, Clone, PartialEq, Eq)] #[doc = "The `PublicKeyCredentialUserEntity` dictionary."] #[doc = ""] #[doc = "*This API requires the following crate features to be activated: `PublicKeyCredentialUserEntity`*"] pub type PublicKeyCredentialUserEntity; } impl PublicKeyCredentialUserEntity { #[doc = "Construct a new `PublicKeyCredentialUserEntity`."] #[doc = ""] #[doc = "*This API requires the following crate features to be activated: `PublicKeyCredentialUserEntity`*"] pub fn new(name: &str, display_name: &str, id: &::js_sys::Object) -> Self { #[allow(unused_mut)] let mut ret: Self = ::wasm_bindgen::JsCast::unchecked_into(::js_sys::Object::new()); ret.name(name); ret.display_name(display_name); ret.id(id); ret } #[doc = "Change the `icon` field of this object."] #[doc = ""] #[doc = "*This API requires the following crate features to be activated: `PublicKeyCredentialUserEntity`*"] pub fn icon(&mut self, val: &str) -> &mut Self { use wasm_bindgen::JsValue; let r = ::js_sys::Reflect::set(self.as_ref(), &JsValue::from("icon"), &JsValue::from(val)); debug_assert!( r.is_ok(), "setting properties should never fail on our dictionary objects" ); let _ = r; self } #[doc = "Change the `name` field of this object."] #[doc = ""] #[doc = "*This API requires the following crate features to be activated: `PublicKeyCredentialUserEntity`*"] pub fn name(&mut self, val: &str) -> &mut Self { use wasm_bindgen::JsValue; let r = ::js_sys::Reflect::set(self.as_ref(), &JsValue::from("name"), &JsValue::from(val)); debug_assert!( r.is_ok(), "setting properties should never fail on our dictionary objects" ); let _ = r; self } #[doc = "Change the `displayName` field of this object."] #[doc = ""] #[doc = "*This API requires the following crate features to be activated: `PublicKeyCredentialUserEntity`*"] pub fn display_name(&mut self, val: &str) -> &mut Self { use wasm_bindgen::JsValue; let r = ::js_sys::Reflect::set( self.as_ref(), &JsValue::from("displayName"), &JsValue::from(val), ); debug_assert!( r.is_ok(), "setting properties should never fail on our dictionary objects" ); let _ = r; self } #[doc = "Change the `id` field of this object."] #[doc = ""] #[doc = "*This API requires the following crate features to be activated: `PublicKeyCredentialUserEntity`*"] pub fn id(&mut self, val: &::js_sys::Object) -> &mut Self { use wasm_bindgen::JsValue; let r = ::js_sys::Reflect::set(self.as_ref(), &JsValue::from("id"), &JsValue::from(val)); debug_assert!( r.is_ok(), "setting properties should never fail on our dictionary objects" ); let _ = r; self } }
39.792683
112
0.594851
e5082f1a46360cf47e7a4011dd52e15fc18800ef
424
pub fn main() { let mut builder = env_logger::Builder::new(); builder.parse_filters("libmdns=trace"); builder.init(); let responder = libmdns::Responder::new().unwrap(); let _svc = responder.register( "_http._tcp".to_owned(), "libmdns Web Server".to_owned(), 80, &["path=/"], ); loop { ::std::thread::sleep(::std::time::Duration::from_secs(10)); } }
23.555556
67
0.558962
87472f252e6fa20f3796e4dda064b7479651d910
678
#[macro_use] extern crate serde_derive; use async_trait::async_trait; use error::SignerError; use zksync_types::tx::TxEthSignature; use zksync_types::Address; pub use json_rpc_signer::JsonRpcSigner; pub use pk_signer::PrivateKeySigner; pub use raw_ethereum_tx::RawTransaction; pub mod error; pub mod json_rpc_signer; pub mod pk_signer; pub mod raw_ethereum_tx; #[async_trait] pub trait EthereumSigner: Send + Sync + Clone { async fn sign_message(&self, message: &[u8]) -> Result<TxEthSignature, SignerError>; async fn sign_transaction(&self, raw_tx: RawTransaction) -> Result<Vec<u8>, SignerError>; async fn get_address(&self) -> Result<Address, SignerError>; }
28.25
93
0.768437
623b491d141ef39a1ab0e41c5e729b8027e4e707
1,768
use proptest::{arbitrary::any, array, prelude::*}; use crate::{ amount::{Amount, NonNegative}, primitives::ZkSnarkProof, }; use super::{commitment, note, tree, JoinSplit}; impl<P: ZkSnarkProof + Arbitrary + 'static> Arbitrary for JoinSplit<P> { type Parameters = (); fn arbitrary_with(_args: Self::Parameters) -> Self::Strategy { ( any::<Amount<NonNegative>>(), any::<Amount<NonNegative>>(), any::<tree::Root>(), array::uniform2(any::<note::Nullifier>()), array::uniform2(any::<commitment::NoteCommitment>()), array::uniform32(any::<u8>()), array::uniform32(any::<u8>()), array::uniform2(any::<note::MAC>()), any::<P>(), array::uniform2(any::<note::EncryptedNote>()), ) .prop_map( |( vpub_old, vpub_new, anchor, nullifiers, commitments, ephemeral_key_bytes, random_seed, vmacs, zkproof, enc_ciphertexts, )| { Self { vpub_old, vpub_new, anchor, nullifiers, commitments, ephemeral_key: x25519_dalek::PublicKey::from(ephemeral_key_bytes), random_seed, vmacs, zkproof, enc_ciphertexts, } }, ) .boxed() } type Strategy = BoxedStrategy<Self>; }
30.482759
90
0.418552
e4c3d79eec9116b68a10a718b72cbf00a6bd21eb
4,070
use std::io; use bytes::BytesMut; /// Decoding of frames via buffers. /// /// This trait is used when constructing an instance of `Framed` or /// `FramedRead`. An implementation of `Decoder` takes a byte stream that has /// already been buffered in `src` and decodes the data into a stream of /// `Self::Item` frames. /// /// Implementations are able to track state on `self`, which enables /// implementing stateful streaming parsers. In many cases, though, this type /// will simply be a unit struct (e.g. `struct HttpDecoder`). pub trait Decoder { /// The type of decoded frames. type Item; /// The type of unrecoverable frame decoding errors. /// /// If an individual message is ill-formed but can be ignored without /// interfering with the processing of future messages, it may be more /// useful to report the failure as an `Item`. /// /// `From<io::Error>` is required in the interest of making `Error` suitable /// for returning directly from a `FramedRead`, and to enable the default /// implementation of `decode_eof` to yield an `io::Error` when the decoder /// fails to consume all available data. /// /// Note that implementors of this trait can simply indicate `type Error = /// io::Error` to use I/O errors as this type. type Error: From<io::Error>; /// Attempts to decode a frame from the provided buffer of bytes. /// /// This method is called by `FramedRead` whenever bytes are ready to be /// parsed. The provided buffer of bytes is what's been read so far, and /// this instance of `Decode` can determine whether an entire frame is in /// the buffer and is ready to be returned. /// /// If an entire frame is available, then this instance will remove those /// bytes from the buffer provided and return them as a decoded /// frame. Note that removing bytes from the provided buffer doesn't always /// necessarily copy the bytes, so this should be an efficient operation in /// most circumstances. /// /// If the bytes look valid, but a frame isn't fully available yet, then /// `Ok(None)` is returned. This indicates to the `Framed` instance that /// it needs to read some more bytes before calling this method again. /// /// Note that the bytes provided may be empty. If a previous call to /// `decode` consumed all the bytes in the buffer then `decode` will be /// called again until it returns `None`, indicating that more bytes need to /// be read. /// /// Finally, if the bytes in the buffer are malformed then an error is /// returned indicating why. This informs `Framed` that the stream is now /// corrupt and should be terminated. fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error>; /// A default method available to be called when there are no more bytes /// available to be read from the underlying I/O. /// /// This method defaults to calling `decode` and returns an error if /// `Ok(None)` is returned while there is unconsumed data in `buf`. /// Typically this doesn't need to be implemented unless the framing /// protocol differs near the end of the stream. /// /// Note that the `buf` argument may be empty. If a previous call to /// `decode_eof` consumed all the bytes in the buffer, `decode_eof` will be /// called again until it returns `None`, indicating that there are no more /// frames to yield. This behavior enables returning finalization frames /// that may not be based on inbound data. fn decode_eof(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { match try!(self.decode(buf)) { Some(frame) => Ok(Some(frame)), None => { if buf.is_empty() { Ok(None) } else { Err(io::Error::new(io::ErrorKind::Other, "bytes remaining on stream").into()) } } } } }
46.781609
93
0.647912
db1fbfdf2bf6a4538a06e10d7a9f36a358a21f0f
807
use crate::{entities::Document, errors::GatewayError}; mod all_documents; mod get_document_by_id; use get_document_by_id::{get_loader, DocumentLoader}; #[derive(Clone)] pub struct DocumentData { channel: tonic::transport::Channel, documents_by_id: DocumentLoader, } impl DocumentData { pub fn new(channel: tonic::transport::Channel) -> Self { Self { documents_by_id: get_loader(channel.clone()), channel, } } pub async fn documents_by_id(&self, id: i32) -> Document { self.documents_by_id.load(id).await } pub async fn all_documents( &self, limit: i32, offset: i32, ) -> Result<Vec<Document>, GatewayError> { all_documents::all_documents(self.channel.clone(), limit, offset).await } }
23.735294
79
0.644362
ef88072bd881c8c590ed222d54e39b0c2ba11387
7,576
use super::ffi; /// EGL errors #[derive(thiserror::Error, Debug)] pub enum Error { /// The requested OpenGL version is not supported #[error("The requested OpenGL version {0:?} is not supported")] OpenGlVersionNotSupported((u8, u8)), /// The EGL implementation does not support creating OpenGL ES contexts #[error("The EGL implementation does not support creating OpenGL ES contexts. Err: {0:?}")] OpenGlesNotSupported(#[source] Option<EGLError>), /// No available pixel format matched the criteria #[error("No available pixel format matched the criteria")] NoAvailablePixelFormat, /// Backend does not match the context type #[error("The expected backend '{0:?}' does not match the runtime")] NonMatchingBackend(&'static str), /// Display creation failed #[error("Display creation failed with error: {0:}")] DisplayCreationError(#[source] EGLError), /// Unable to obtain a valid EGL Display #[error("Unable to obtain a valid EGL Display.")] DisplayNotSupported, /// `eglInitialize` returned an error #[error("Failed to initialize EGL. Err: {0:}")] InitFailed(#[source] EGLError), /// Failed to configure the EGL context #[error("Failed to configure the EGL context")] ConfigFailed(#[source] EGLError), /// Context creation failed as one or more requirements could not be met. Try removing some gl attributes or pixel format requirements #[error("Context creation failed as one or more requirements could not be met. Try removing some gl attributes or pixel format requirements. Err: {0:}")] CreationFailed(#[source] EGLError), /// The required EGL extension is not supported by the underlying EGL implementation #[error("None of the following EGL extensions is supported by the underlying EGL implementation, at least one is required: {0:?}")] EglExtensionNotSupported(&'static [&'static str]), /// Only one EGLDisplay may be bound to a given `WlDisplay` at any time #[error("Only one EGLDisplay may be bound to a given `WlDisplay` at any time")] OtherEGLDisplayAlreadyBound(#[source] EGLError), /// No EGLDisplay is currently bound to this `WlDisplay` #[error("No EGLDisplay is currently bound to this `WlDisplay`")] NoEGLDisplayBound, /// Index of plane is out of bounds for `EGLImages` #[error("Index of plane is out of bounds for `EGLImages`")] PlaneIndexOutOfBounds, /// Failed to create `EGLImages` from the buffer #[error("Failed to create `EGLImages` from the buffer")] EGLImageCreationFailed, } /// Raw EGL error #[derive(thiserror::Error, Debug)] pub enum EGLError { /// EGL is not initialized, or could not be initialized, for the specified EGL display connection. #[error( "EGL is not initialized, or could not be initialized, for the specified EGL display connection." )] NotInitialized, /// EGL cannot access a requested resource (for example a context is bound in another thread). #[error("EGL cannot access a requested resource (for example a context is bound in another thread).")] BadAccess, /// EGL failed to allocate resources for the requested operation. #[error("EGL failed to allocate resources for the requested operation.")] BadAlloc, /// An unrecognized attribute or attribute value was passed in the attribute list. #[error("An unrecognized attribute or attribute value was passed in the attribute list.")] BadAttribute, /// An EGLContext argument does not name a valid EGL rendering context. #[error("An EGLContext argument does not name a valid EGL rendering context.")] BadContext, /// An EGLConfig argument does not name a valid EGL frame buffer configuration. #[error("An EGLConfig argument does not name a valid EGL frame buffer configuration.")] BadConfig, /// The current surface of the calling thread is a window, pixel buffer or pixmap that is no longer valid. #[error("The current surface of the calling thread is a window, pixel buffer or pixmap that is no longer valid.")] BadCurrentSurface, /// An EGLDisplay argument does not name a valid EGL display connection. #[error("An EGLDisplay argument does not name a valid EGL display connection.")] BadDisplay, /// An EGLSurface argument does not name a valid surface (window, pixel buffer or pixmap) configured for GL rendering. #[error("An EGLSurface argument does not name a valid surface (window, pixel buffer or pixmap) configured for GL rendering.")] BadSurface, /// Arguments are inconsistent (for example, a valid context requires buffers not supplied by a valid surface). #[error("Arguments are inconsistent (for example, a valid context requires buffers not supplied by a valid surface).")] BadMatch, /// One or more argument values are invalid. #[error("One or more argument values are invalid.")] BadParameter, /// A NativePixmapType argument does not refer to a valid native pixmap. #[error("A NativePixmapType argument does not refer to a valid native pixmap.")] BadNativePixmap, /// A NativeWindowType argument does not refer to a valid native window. #[error("A NativeWindowType argument does not refer to a valid native window.")] BadNativeWindow, #[cfg(feature = "backend_drm_eglstream")] /// The EGL operation failed due to temporary unavailability of a requested resource, but the arguments were otherwise valid, and a subsequent attempt may succeed. #[error("The EGL operation failed due to temporary unavailability of a requested resource, but the arguments were otherwise valid, and a subsequent attempt may succeed.")] ResourceBusy, /// A power management event has occurred. The application must destroy all contexts and reinitialise OpenGL ES state and objects to continue rendering. #[error("A power management event has occurred. The application must destroy all contexts and reinitialise OpenGL ES state and objects to continue rendering.")] ContextLost, /// An unknown error #[error("An unknown error ({0:x})")] Unknown(u32), } impl From<u32> for EGLError { fn from(value: u32) -> Self { match value { ffi::egl::NOT_INITIALIZED => EGLError::NotInitialized, ffi::egl::BAD_ACCESS => EGLError::BadAccess, ffi::egl::BAD_ALLOC => EGLError::BadAlloc, ffi::egl::BAD_ATTRIBUTE => EGLError::BadAttribute, ffi::egl::BAD_CONTEXT => EGLError::BadContext, ffi::egl::BAD_CURRENT_SURFACE => EGLError::BadCurrentSurface, ffi::egl::BAD_DISPLAY => EGLError::BadDisplay, ffi::egl::BAD_SURFACE => EGLError::BadSurface, ffi::egl::BAD_MATCH => EGLError::BadMatch, ffi::egl::BAD_PARAMETER => EGLError::BadParameter, ffi::egl::BAD_NATIVE_PIXMAP => EGLError::BadNativePixmap, ffi::egl::BAD_NATIVE_WINDOW => EGLError::BadNativeWindow, #[cfg(feature = "backend_drm_eglstream")] ffi::egl::RESOURCE_BUSY_EXT => EGLError::ResourceBusy, ffi::egl::CONTEXT_LOST => EGLError::ContextLost, x => EGLError::Unknown(x), } } } impl EGLError { fn from_last_call() -> Result<(), EGLError> { match unsafe { ffi::egl::GetError() as u32 } { ffi::egl::SUCCESS => Ok(()), x => Err(EGLError::from(x)), } } } pub(crate) fn wrap_egl_call<R, F: FnOnce() -> R>(call: F) -> Result<R, EGLError> { let res = call(); EGLError::from_last_call().map(|()| res) }
53.352113
175
0.69311
8a5c24480bef0a60e9204db15a2b8a5a186dff14
3,298
use crate::tls_stub::{ node_id_from_cert_subject_common_name, tls_cert_from_registry, TlsCertFromRegistryError, }; use ic_crypto_internal_csp::api::CspTlsClientHandshake; use ic_crypto_tls_interfaces::TlsPublicKeyCert; use ic_crypto_tls_interfaces::{ MalformedPeerCertificateError, PeerNotAllowedError, TlsClientHandshakeError, TlsStream, }; use ic_interfaces::registry::RegistryClient; use ic_types::{NodeId, RegistryVersion}; use std::sync::Arc; use tokio::net::TcpStream; #[allow(unused)] pub async fn perform_tls_client_handshake<C: CspTlsClientHandshake>( csp: &C, self_node_id: NodeId, registry_client: &Arc<dyn RegistryClient>, tcp_stream: TcpStream, server: NodeId, registry_version: RegistryVersion, ) -> Result<TlsStream, TlsClientHandshakeError> { let self_tls_cert = tls_cert_from_registry(registry_client, self_node_id, registry_version) .map_err(|e| map_cert_from_registry_error(e, CertFromRegistryOwner::Myself))?; let trusted_server_cert = tls_cert_from_registry(registry_client, server, registry_version) .map_err(|e| map_cert_from_registry_error(e, CertFromRegistryOwner::Server))?; let (tls_stream, peer_cert) = csp .perform_tls_client_handshake(tcp_stream, self_tls_cert, trusted_server_cert.clone()) .await?; check_cert(server, &trusted_server_cert, &peer_cert)?; Ok(tls_stream) } fn check_cert( trusted_server_node_id: NodeId, trusted_server_cert_from_registry: &TlsPublicKeyCert, server_cert_from_handshake: &TlsPublicKeyCert, ) -> Result<(), TlsClientHandshakeError> { let server_node_id_from_handshake_cert = node_id_from_cert_subject_common_name(server_cert_from_handshake)?; if server_node_id_from_handshake_cert != trusted_server_node_id { return Err(TlsClientHandshakeError::ServerNotAllowed( PeerNotAllowedError::HandshakeCertificateNodeIdNotAllowed, )); } if server_cert_from_handshake != trusted_server_cert_from_registry { return Err(TlsClientHandshakeError::ServerNotAllowed( PeerNotAllowedError::CertificatesDiffer, )); } Ok(()) } enum CertFromRegistryOwner { Server, Myself, } fn map_cert_from_registry_error( registry_error: TlsCertFromRegistryError, peer: CertFromRegistryOwner, ) -> TlsClientHandshakeError { match (registry_error, peer) { (TlsCertFromRegistryError::RegistryError(e), _) => { TlsClientHandshakeError::RegistryError(e) } ( TlsCertFromRegistryError::CertificateNotInRegistry { node_id, registry_version, }, _, ) => TlsClientHandshakeError::CertificateNotInRegistry { node_id, registry_version, }, ( TlsCertFromRegistryError::CertificateMalformed { internal_error }, CertFromRegistryOwner::Server, ) => TlsClientHandshakeError::MalformedServerCertificate(MalformedPeerCertificateError { internal_error, }), ( TlsCertFromRegistryError::CertificateMalformed { internal_error }, CertFromRegistryOwner::Myself, ) => TlsClientHandshakeError::MalformedSelfCertificate { internal_error }, } }
35.847826
96
0.717404