hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
89765ca9d2a6ab11b2a8385a162bcdb3dcc5b9fe
150
#[cfg(target_os = "redox")] pub const NULL_PATH: &str = "null:"; #[cfg(all(unix, not(target_os = "redox")))] pub const NULL_PATH: &str = "/dev/null";
30
43
0.633333
766d71b8e05426a3cdf3b1d6e590fe4e2f5dfb6c
1,712
use std::vec::Vec; use crate::models::structs::Submodule; use crate::search::perform_search; use tui::style::Color; pub struct App { pub submodules: Vec<Submodule>, pub filtered_submodules: Vec<Submodule>, pub last_query: Option<String>, pub current_query: String, pub skin_color: Color, } impl Default for App { fn default() -> Self { App { filtered_submodules: vec![], submodules: vec![], current_query: String::new(), last_query: None, skin_color: Color::Rgb(244, 71, 2) } } } impl App { pub fn new(submodules: Vec<Submodule>, first_query: &str) -> Self { App { submodules, current_query: first_query.to_string(), ..App::default() } } // Performs search on submodules and updates app's internal // cached search. Only performs the search if it's needed. pub fn search(&mut self) { // TODO: Process query to extract tags and name searches if self.last_query.is_none() || self.current_query != self.last_query.as_deref().unwrap() { let mut final_query = vec![]; let mut tags = vec![]; for token in self.current_query.split(' ') { if let Some(tag_name) = token.strip_prefix("tag:") { tags.push(tag_name); } else { final_query.push(token); } } self.filtered_submodules = perform_search(&self.submodules, &final_query.join(" "), &tags).expect("can search through submodules"); self.last_query = Some(self.current_query.clone()); } } }
28.533333
144
0.564836
1ee7da3101e495434b01a673fa92d07c8463864c
1,414
use indy_api_types::errors::IndyResult; use super::super::super::proto::cheqdid::cheqdnode::cheqd::MsgUpdateNym as ProtoMsgUpdateNym; use super::super::super::CheqdProto; #[derive(Eq, PartialEq, Debug)] pub struct MsgUpdateNym { pub creator: String, pub id: u64, pub alias: String, pub verkey: String, pub did: String, pub role: String, } impl MsgUpdateNym { pub fn new( creator: String, id: u64, alias: String, verkey: String, did: String, role: String, ) -> Self { MsgUpdateNym { creator, id, alias, verkey, did, role, } } } impl CheqdProto for MsgUpdateNym { type Proto = ProtoMsgUpdateNym; fn to_proto(&self) -> Self::Proto { Self::Proto { creator: self.creator.clone(), id: self.id.clone(), alias: self.alias.clone(), verkey: self.verkey.clone(), did: self.did.clone(), role: self.role.clone(), } } fn from_proto(proto: &Self::Proto) -> IndyResult<Self> { Ok(Self { creator: proto.creator.clone(), id: proto.id.clone(), alias: proto.alias.clone(), verkey: proto.verkey.clone(), did: proto.did.clone(), role: proto.role.clone(), }) } }
23.180328
93
0.522631
75570dc729f80cabaa2c9587df5afbaa0d0ea9f9
6,673
use super::*; use crate::checks::*; use std::io::BufReader; #[test] fn test_xorsafe() -> Result<()> { let mut dst = vec![0; 8]; let a = vec![1, 2, 3, 4, 5, 6, 7, 8]; let b = vec![8, 7, 7, 6, 6, 3, 4, 1]; safe_xorbytes(&mut dst, &a, &b); let c = dst.clone(); safe_xorbytes(&mut dst, &c, &a); for i in 0..dst.len() { assert_eq!(b[i], dst[i], "{} != {}", b[i], dst[i]); } Ok(()) } #[test] fn test_xorsafe_bsmaller() -> Result<()> { let mut dst = vec![0; 5]; let a = vec![1, 2, 3, 4, 5, 6, 7, 8]; let b = vec![8, 7, 7, 6, 6]; safe_xorbytes(&mut dst, &a, &b); let c = dst.clone(); safe_xorbytes(&mut dst, &c, &a); for i in 0..dst.len() { assert_eq!(b[i], dst[i], "{} != {}", b[i], dst[i]); } Ok(()) } #[test] fn test_xormapped_address_get_from() -> Result<()> { let mut m = Message::new(); let transaction_id = base64::decode("jxhBARZwX+rsC6er").unwrap(); m.transaction_id.0.copy_from_slice(&transaction_id); let addr_value = vec![0x00, 0x01, 0x9c, 0xd5, 0xf4, 0x9f, 0x38, 0xae]; m.add(ATTR_XORMAPPED_ADDRESS, &addr_value); let mut addr = XorMappedAddress { ip: "0.0.0.0".parse().unwrap(), port: 0, }; addr.get_from(&m)?; assert_eq!( addr.ip.to_string(), "213.141.156.236", "bad IP {} != 213.141.156.236", addr.ip ); assert_eq!(addr.port, 48583, "bad Port {} != 48583", addr.port); //"UnexpectedEOF" { let mut m = Message::new(); // {0, 1} is correct addr family. m.add(ATTR_XORMAPPED_ADDRESS, &[0, 1, 3, 4]); let mut addr = XorMappedAddress { ip: "0.0.0.0".parse().unwrap(), port: 0, }; let result = addr.get_from(&m); if let Err(err) = result { assert!( Error::ErrUnexpectedEof.equal(&err), "len(v) = 4 should render <{}> error, got <{}>", Error::ErrUnexpectedEof, err ); } else { assert!(false, "expected error, got ok"); } } //"AttrOverflowErr" { let mut m = Message::new(); // {0, 1} is correct addr family. m.add( ATTR_XORMAPPED_ADDRESS, &[0, 1, 3, 4, 5, 6, 7, 8, 9, 1, 1, 1, 1, 1, 2, 3, 4], ); let mut addr = XorMappedAddress { ip: "0.0.0.0".parse().unwrap(), port: 0, }; let result = addr.get_from(&m); if let Err(err) = result { assert!( is_attr_size_overflow(&err), "AddTo should return AttrOverflowErr, got: {}", err ); } else { assert!(false, "expected error, got ok"); } } Ok(()) } #[test] fn test_xormapped_address_get_from_invalid() -> Result<()> { let mut m = Message::new(); let transaction_id = base64::decode("jxhBARZwX+rsC6er").unwrap(); m.transaction_id.0.copy_from_slice(&transaction_id); let expected_ip: IpAddr = "213.141.156.236".parse().unwrap(); let expected_port = 21254u16; let mut addr = XorMappedAddress { ip: "0.0.0.0".parse().unwrap(), port: 0, }; let result = addr.get_from(&m); assert!(result.is_err(), "should be error"); addr.ip = expected_ip; addr.port = expected_port; addr.add_to(&mut m)?; m.write_header(); let mut m_res = Message::new(); m.raw[20 + 4 + 1] = 0x21; m.decode()?; let mut reader = BufReader::new(m.raw.as_slice()); m_res.read_from(&mut reader)?; let result = addr.get_from(&m); assert!(result.is_err(), "should be error"); Ok(()) } #[test] fn test_xormapped_address_add_to() -> Result<()> { let mut m = Message::new(); let transaction_id = base64::decode("jxhBARZwX+rsC6er").unwrap(); m.transaction_id.0.copy_from_slice(&transaction_id); let expected_ip: IpAddr = "213.141.156.236".parse().unwrap(); let expected_port = 21254u16; let mut addr = XorMappedAddress { ip: "213.141.156.236".parse().unwrap(), port: expected_port, }; addr.add_to(&mut m)?; m.write_header(); let mut m_res = Message::new(); m_res.write(&m.raw)?; addr.get_from(&m_res)?; assert_eq!( addr.ip, expected_ip, "{} (got) != {} (expected)", addr.ip, expected_ip ); assert_eq!( addr.port, expected_port, "bad Port {} != {}", addr.port, expected_port ); Ok(()) } #[test] fn test_xormapped_address_add_to_ipv6() -> Result<()> { let mut m = Message::new(); let transaction_id = base64::decode("jxhBARZwX+rsC6er").unwrap(); m.transaction_id.0.copy_from_slice(&transaction_id); let expected_ip: IpAddr = "fe80::dc2b:44ff:fe20:6009".parse().unwrap(); let expected_port = 21254u16; let addr = XorMappedAddress { ip: "fe80::dc2b:44ff:fe20:6009".parse().unwrap(), port: 21254, }; addr.add_to(&mut m)?; m.write_header(); let mut m_res = Message::new(); let mut reader = BufReader::new(m.raw.as_slice()); m_res.read_from(&mut reader)?; let mut got_addr = XorMappedAddress { ip: "0.0.0.0".parse().unwrap(), port: 0, }; got_addr.get_from(&m)?; assert_eq!( got_addr.ip, expected_ip, "bad IP {} != {}", got_addr.ip, expected_ip ); assert_eq!( got_addr.port, expected_port, "bad Port {} != {}", got_addr.port, expected_port ); Ok(()) } /* #[test] fn TestXORMappedAddress_AddTo_Invalid() -> Result<()> { let mut m = Message::new(); let mut addr = XORMappedAddress{ ip: 1, 2, 3, 4, 5, 6, 7, 8}, port: 21254, } if err := addr.AddTo(m); !errors.Is(err, ErrBadIPLength) { t.Errorf("AddTo should return %q, got: %v", ErrBadIPLength, err) } }*/ #[test] fn test_xormapped_address_string() -> Result<()> { let tests = vec![ ( // 0 XorMappedAddress { ip: "fe80::dc2b:44ff:fe20:6009".parse().unwrap(), port: 124, }, "[fe80::dc2b:44ff:fe20:6009]:124", ), ( // 1 XorMappedAddress { ip: "213.141.156.236".parse().unwrap(), port: 8147, }, "213.141.156.236:8147", ), ]; for (addr, ip) in tests { assert_eq!( addr.to_string(), ip, " XORMappesAddres.String() {} (got) != {} (expected)", addr.to_string(), ip, ); } Ok(()) }
26.692
75
0.51596
0aec4e614d93d49cd1001c8213a182185f61d513
13,744
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. //! Contains `ArrayData`, a generic representation of Arrow array data which encapsulates //! common attributes and operations for Arrow array. use std::mem; use std::sync::Arc; use crate::buffer::Buffer; use crate::datatypes::DataType; use crate::util::bit_util; use crate::{bitmap::Bitmap, datatypes::ArrowNativeType}; use super::equal::equal; #[inline] fn count_nulls(null_bit_buffer: Option<&Buffer>, offset: usize, len: usize) -> usize { if let Some(ref buf) = null_bit_buffer { len.checked_sub(bit_util::count_set_bits_offset(buf.data(), offset, len)) .unwrap() } else { 0 } } /// An generic representation of Arrow array data which encapsulates common attributes and /// operations for Arrow array. Specific operations for different arrays types (e.g., /// primitive, list, struct) are implemented in `Array`. #[derive(Debug, Clone)] pub struct ArrayData { /// The data type for this array data data_type: DataType, /// The number of elements in this array data len: usize, /// The number of null elements in this array data null_count: usize, /// The offset into this array data, in number of items offset: usize, /// The buffers for this array data. Note that depending on the array types, this /// could hold different kinds of buffers (e.g., value buffer, value offset buffer) /// at different positions. buffers: Vec<Buffer>, /// The child(ren) of this array. Only non-empty for nested types, currently /// `ListArray` and `StructArray`. child_data: Vec<ArrayDataRef>, /// The null bitmap. A `None` value for this indicates all values are non-null in /// this array. null_bitmap: Option<Bitmap>, } pub type ArrayDataRef = Arc<ArrayData>; impl ArrayData { pub fn new( data_type: DataType, len: usize, null_count: Option<usize>, null_bit_buffer: Option<Buffer>, offset: usize, buffers: Vec<Buffer>, child_data: Vec<ArrayDataRef>, ) -> Self { let null_count = match null_count { None => count_nulls(null_bit_buffer.as_ref(), offset, len), Some(null_count) => null_count, }; let null_bitmap = null_bit_buffer.map(Bitmap::from); Self { data_type, len, null_count, offset, buffers, child_data, null_bitmap, } } /// Returns a builder to construct a `ArrayData` instance. #[inline] pub const fn builder(data_type: DataType) -> ArrayDataBuilder { ArrayDataBuilder::new(data_type) } /// Returns a reference to the data type of this array data #[inline] pub const fn data_type(&self) -> &DataType { &self.data_type } /// Returns a slice of buffers for this array data pub fn buffers(&self) -> &[Buffer] { &self.buffers[..] } /// Returns a slice of children data arrays pub fn child_data(&self) -> &[ArrayDataRef] { &self.child_data[..] } /// Returns whether the element at index `i` is null pub fn is_null(&self, i: usize) -> bool { if let Some(ref b) = self.null_bitmap { return !b.is_set(self.offset + i); } false } /// Returns a reference to the null bitmap of this array data #[inline] pub const fn null_bitmap(&self) -> &Option<Bitmap> { &self.null_bitmap } /// Returns a reference to the null buffer of this array data. pub fn null_buffer(&self) -> Option<&Buffer> { self.null_bitmap().as_ref().map(|b| b.buffer_ref()) } /// Returns whether the element at index `i` is not null pub fn is_valid(&self, i: usize) -> bool { if let Some(ref b) = self.null_bitmap { return b.is_set(self.offset + i); } true } /// Returns the length (i.e., number of elements) of this array #[inline] pub const fn len(&self) -> usize { self.len } // Returns whether array data is empty #[inline] pub const fn is_empty(&self) -> bool { self.len == 0 } /// Returns the offset of this array #[inline] pub const fn offset(&self) -> usize { self.offset } /// Returns the total number of nulls in this array #[inline] pub const fn null_count(&self) -> usize { self.null_count } /// Returns the total number of bytes of memory occupied by the buffers owned by this [ArrayData]. pub fn get_buffer_memory_size(&self) -> usize { let mut size = 0; for buffer in &self.buffers { size += buffer.capacity(); } if let Some(bitmap) = &self.null_bitmap { size += bitmap.get_buffer_memory_size() } for child in &self.child_data { size += child.get_buffer_memory_size(); } size } /// Returns the total number of bytes of memory occupied physically by this [ArrayData]. pub fn get_array_memory_size(&self) -> usize { let mut size = 0; // Calculate size of the fields that don't have [get_array_memory_size] method internally. size += mem::size_of_val(self) - mem::size_of_val(&self.buffers) - mem::size_of_val(&self.null_bitmap) - mem::size_of_val(&self.child_data); // Calculate rest of the fields top down which contain actual data for buffer in &self.buffers { size += mem::size_of_val(&buffer); size += buffer.capacity(); } if let Some(bitmap) = &self.null_bitmap { size += bitmap.get_array_memory_size() } for child in &self.child_data { size += child.get_array_memory_size(); } size } /// Creates a zero-copy slice of itself. This creates a new [ArrayData] /// with a different offset, len and a shifted null bitmap. /// /// # Panics /// /// Panics if `offset + length > self.len()`. pub fn slice(&self, offset: usize, length: usize) -> ArrayData { assert!((offset + length) <= self.len()); let mut new_data = self.clone(); new_data.len = length; new_data.offset = offset + self.offset; new_data.null_count = count_nulls(new_data.null_buffer(), new_data.offset, new_data.len); new_data } /// Returns the `buffer` as a slice of type `T` starting at self.offset /// # Panics /// This function panics if: /// * the buffer is not byte-aligned with type T, or /// * the datatype is `Boolean` (it corresponds to a bit-packed buffer where the offset is not applicable) #[inline] pub(super) fn buffer<T: ArrowNativeType>(&self, buffer: usize) -> &[T] { let values = unsafe { self.buffers[buffer].data().align_to::<T>() }; if values.0.len() != 0 || values.2.len() != 0 { panic!("The buffer is not byte-aligned with its interpretation") }; assert_ne!(self.data_type, DataType::Boolean); &values.1[self.offset..] } } impl PartialEq for ArrayData { fn eq(&self, other: &Self) -> bool { equal(self, other) } } /// Builder for `ArrayData` type #[derive(Debug)] pub struct ArrayDataBuilder { data_type: DataType, len: usize, null_count: Option<usize>, null_bit_buffer: Option<Buffer>, offset: usize, buffers: Vec<Buffer>, child_data: Vec<ArrayDataRef>, } impl ArrayDataBuilder { #[inline] pub const fn new(data_type: DataType) -> Self { Self { data_type, len: 0, null_count: None, null_bit_buffer: None, offset: 0, buffers: vec![], child_data: vec![], } } #[inline] pub const fn len(mut self, n: usize) -> Self { self.len = n; self } #[inline] pub const fn null_count(mut self, n: usize) -> Self { self.null_count = Some(n); self } pub fn null_bit_buffer(mut self, buf: Buffer) -> Self { self.null_bit_buffer = Some(buf); self } #[inline] pub const fn offset(mut self, n: usize) -> Self { self.offset = n; self } pub fn buffers(mut self, v: Vec<Buffer>) -> Self { self.buffers = v; self } pub fn add_buffer(mut self, b: Buffer) -> Self { self.buffers.push(b); self } pub fn child_data(mut self, v: Vec<ArrayDataRef>) -> Self { self.child_data = v; self } pub fn add_child_data(mut self, r: ArrayDataRef) -> Self { self.child_data.push(r); self } pub fn build(self) -> ArrayDataRef { let data = ArrayData::new( self.data_type, self.len, self.null_count, self.null_bit_buffer, self.offset, self.buffers, self.child_data, ); Arc::new(data) } } #[cfg(test)] mod tests { use super::*; use std::sync::Arc; use crate::buffer::Buffer; use crate::datatypes::ToByteSlice; use crate::util::bit_util; #[test] fn test_new() { let arr_data = ArrayData::new(DataType::Boolean, 10, Some(1), None, 2, vec![], vec![]); assert_eq!(10, arr_data.len()); assert_eq!(1, arr_data.null_count()); assert_eq!(2, arr_data.offset()); assert_eq!(0, arr_data.buffers().len()); assert_eq!(0, arr_data.child_data().len()); } #[test] fn test_builder() { let child_arr_data = Arc::new(ArrayData::new( DataType::Int32, 5, Some(0), None, 0, vec![Buffer::from([1i32, 2, 3, 4, 5].to_byte_slice())], vec![], )); let v = vec![0, 1, 2, 3]; let b1 = Buffer::from(&v[..]); let arr_data = ArrayData::builder(DataType::Int32) .len(20) .null_count(10) .offset(5) .add_buffer(b1) .add_child_data(child_arr_data.clone()) .build(); assert_eq!(20, arr_data.len()); assert_eq!(10, arr_data.null_count()); assert_eq!(5, arr_data.offset()); assert_eq!(1, arr_data.buffers().len()); assert_eq!(&[0, 1, 2, 3], arr_data.buffers()[0].data()); assert_eq!(1, arr_data.child_data().len()); assert_eq!(child_arr_data, arr_data.child_data()[0]); } #[test] fn test_null_count() { let mut bit_v: [u8; 2] = [0; 2]; bit_util::set_bit(&mut bit_v, 0); bit_util::set_bit(&mut bit_v, 3); bit_util::set_bit(&mut bit_v, 10); let arr_data = ArrayData::builder(DataType::Int32) .len(16) .null_bit_buffer(Buffer::from(bit_v)) .build(); assert_eq!(13, arr_data.null_count()); // Test with offset let mut bit_v: [u8; 2] = [0; 2]; bit_util::set_bit(&mut bit_v, 0); bit_util::set_bit(&mut bit_v, 3); bit_util::set_bit(&mut bit_v, 10); let arr_data = ArrayData::builder(DataType::Int32) .len(12) .offset(2) .null_bit_buffer(Buffer::from(bit_v)) .build(); assert_eq!(10, arr_data.null_count()); } #[test] fn test_null_buffer_ref() { let mut bit_v: [u8; 2] = [0; 2]; bit_util::set_bit(&mut bit_v, 0); bit_util::set_bit(&mut bit_v, 3); bit_util::set_bit(&mut bit_v, 10); let arr_data = ArrayData::builder(DataType::Int32) .len(16) .null_bit_buffer(Buffer::from(bit_v)) .build(); assert!(arr_data.null_buffer().is_some()); assert_eq!(&bit_v, arr_data.null_buffer().unwrap().data()); } #[test] fn test_slice() { let mut bit_v: [u8; 2] = [0; 2]; bit_util::set_bit(&mut bit_v, 0); bit_util::set_bit(&mut bit_v, 3); bit_util::set_bit(&mut bit_v, 10); let data = ArrayData::builder(DataType::Int32) .len(16) .null_bit_buffer(Buffer::from(bit_v)) .build(); let data = data.as_ref(); let new_data = data.slice(1, 15); assert_eq!(data.len() - 1, new_data.len()); assert_eq!(1, new_data.offset()); assert_eq!(data.null_count(), new_data.null_count()); // slice of a slice (removes one null) let new_data = new_data.slice(1, 14); assert_eq!(data.len() - 2, new_data.len()); assert_eq!(2, new_data.offset()); assert_eq!(data.null_count() - 1, new_data.null_count()); } #[test] fn test_equality() { let int_data = ArrayData::builder(DataType::Int32).build(); let float_data = ArrayData::builder(DataType::Float32).build(); assert_ne!(int_data, float_data); } }
30.074398
110
0.580981
1caa73c873efb0ec374a36f5fc461c9e71520118
2,868
use cfg_if::cfg_if; pub fn rand_int(n: i32) -> i32 { (js_sys::Math::random() * n as f64) as i32 } pub fn once_in(n: i32) -> bool { rand_int(n) == 0 } pub fn rand_dir() -> i32 { let i = rand_int(1000); (i % 3) - 1 } pub fn rand_dir_2() -> i32 { let i = rand_int(1000); if (i % 2) == 0 { -1 } else { 1 } } pub fn rand_vec() -> (i32, i32) { let i = rand_int(2000); match i % 9 { 0 => (1, 1), 1 => (1, 0), 2 => (1, -1), 3 => (0, -1), 4 => (-1, -1), 5 => (-1, 0), 6 => (-1, 1), 7 => (0, 1), _ => (0, 0), } } pub fn rand_vec_8() -> (i32, i32) { let i = rand_int(2000); match i % 8 { 0 => (1, 1), 1 => (1, 0), 2 => (1, -1), 3 => (0, -1), 4 => (-1, -1), 5 => (-1, 0), 6 => (-1, 1), _ => (0, 1), } } pub fn adjacency_right(dir: (i32, i32)) -> (i32, i32) { match dir { (0, 1) => (1, 1), (1, 1) => (1, 0), (1, 0) => (1, -1), (1, -1) => (0, -1), (0, -1) => (-1, -1), (-1, -1) => (-1, 0), (-1, 0) => (-1, 1), (-1, 1) => (0, 1), _ => (0, 0), } } pub fn adjacency_left(dir: (i32, i32)) -> (i32, i32) { match dir { (0, 1) => (-1, 1), (1, 1) => (0, 1), (1, 0) => (1, 1), (1, -1) => (1, 0), (0, -1) => (1, -1), (-1, -1) => (0, -1), (-1, 0) => (-1, -1), (-1, 1) => (-1, 0), _ => (0, 0), } } pub fn join_dy_dx(dx: i32, dy: i32) -> u8 { (((dx + 1) * 3) + (dy + 1)) as u8 } pub fn split_dy_dx(s: u8) -> (i32, i32) { let s: i32 = s as i32; let dx: i32 = (s / 3) - 1; let dy: i32 = (s % 3) - 1; (dx, dy) } /// Returns a vector with coordinates for all surrounding cells pub fn get_neighbor_cell_coords() -> Vec<(i32, i32)> { vec![ (1, 1), (0, 1), (-1, 1), (-1, 0), (1, 0), (0, -1), (-1, -1), (1, -1), ] } /// Returns a vector with coordinates for top and side cells pub fn get_top_side_neighbor_cell_coords() -> Vec<(i32, i32)> { vec![ (1, -1), (0, -1), (-1, -1), (-1, 0), (1, 0), ] } cfg_if! { // When the `console_error_panic_hook` feature is enabled, we can call the // `set_panic_hook` function at least once during initialization, and then // we will get better error messages if our code ever panics. // // For more details see // https://github.com/rustwasm/console_error_panic_hook#readme if #[cfg(feature = "console_error_panic_hook")] { extern crate console_error_panic_hook; pub use self::console_error_panic_hook::set_once as set_panic_hook; } else { #[inline] pub fn set_panic_hook() {} } }
21.89313
78
0.411437
c13f83877c752481fc31530ed24cedb6a0c3dcda
14,020
use super::debug_with::DebugWithContext; use std::{ cmp::Ordering, fmt::{self, Debug, Formatter}, hash::{Hash, Hasher}, marker::PhantomData, mem::size_of, num::NonZeroU32, ops::{Index, IndexMut, Range}, }; // ----------------------------------------------- // ID (NonZeroU32) // ----------------------------------------------- #[allow(unused)] const ID_MIN: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(1) }; const ID_MAX: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(u32::MAX) }; const fn id_from_index(index: usize) -> NonZeroU32 { let value: u32 = (index + 1) as u32; unsafe { NonZeroU32::new_unchecked(value) } } const fn id_to_index(id: NonZeroU32) -> usize { (id.get() - 1) as usize } #[allow(unused)] const fn id_add_offset(id: NonZeroU32, offset: usize) -> NonZeroU32 { let value: u32 = id.get() + offset as u32; unsafe { NonZeroU32::new_unchecked(value) } } fn id_iter(range: Range<NonZeroU32>) -> impl Iterator<Item = NonZeroU32> { (range.start.get()..range.end.get()).map(|i| unsafe { NonZeroU32::new_unchecked(i) }) } // ----------------------------------------------- // VecArenaId // ----------------------------------------------- /// `VecArena` の型つきのインデックスを表す。 /// /// `Tag` は値を区別するための幽霊型。 /// /// ## 意図 /// /// 型安全性: インデックスの不正な使い方を型検査により防ぐ。 /// /// サイズ: 64ビット環境で usize は8バイトだが、要素数が 2^32 (約40億) に至らないのであれば、インデックスは4バイトで十分。 /// `VecArenaId` および `Option<VecArenaId>` は4バイトに収まって、メモリの節約になる。 pub(crate) struct VecArenaId<Tag> { inner: NonZeroU32, _phantom: PhantomData<Tag>, } impl<Tag> VecArenaId<Tag> { #[allow(unused)] const MIN: Self = Self::from_inner(ID_MIN); #[allow(unused)] pub(crate) const MAX: Self = Self::from_inner(ID_MAX); #[allow(unused)] pub(crate) const TODO: Self = Self::from_inner(ID_MAX); const fn from_inner(inner: NonZeroU32) -> Self { Self { inner, _phantom: PhantomData, } } const unsafe fn new_unchecked(value: u32) -> Self { Self::from_inner(NonZeroU32::new_unchecked(value)) } pub(crate) const fn from_index(index: usize) -> Self { Self::from_inner(id_from_index(index)) } pub(crate) const fn to_index(self) -> usize { id_to_index(self.inner) } #[allow(unused)] pub(crate) const fn add_offset(self, offset: usize) -> VecArenaId<Tag> { Self::from_inner(id_add_offset(self.inner, offset)) } pub(crate) fn of<T>(self, arena: &VecArena<Tag, T>) -> &T { &arena.inner[self.to_index()] } pub(crate) fn of_mut<T>(self, arena: &mut VecArena<Tag, T>) -> &mut T { &mut arena.inner[self.to_index()] } } // VecArenaId <--> NonZeroU32 impl<T> From<NonZeroU32> for VecArenaId<T> { fn from(inner: NonZeroU32) -> Self { Self { inner, _phantom: PhantomData, } } } impl<T> From<VecArenaId<T>> for NonZeroU32 { fn from(id: VecArenaId<T>) -> Self { id.inner } } // Copy + Clone // derive(Clone) だと Tag: Clone のときしか実装されない。 impl<Tag> Clone for VecArenaId<Tag> { fn clone(&self) -> Self { Self { inner: self.inner, _phantom: PhantomData, } } } impl<Tag> Copy for VecArenaId<Tag> {} impl<Tag> PartialEq for VecArenaId<Tag> { fn eq(&self, other: &Self) -> bool { self.inner == other.inner } } impl<Tag> Eq for VecArenaId<Tag> {} impl<Tag> PartialOrd for VecArenaId<Tag> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { self.inner.partial_cmp(&other.inner) } } impl<Tag> Ord for VecArenaId<Tag> { fn cmp(&self, other: &Self) -> Ordering { self.inner.cmp(&other.inner) } } impl<Tag> Hash for VecArenaId<Tag> { fn hash<H: Hasher>(&self, state: &mut H) { Hash::hash(&self.inner, state) } } impl<Tag> Debug for VecArenaId<Tag> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Debug::fmt(&self.to_index(), f) } } #[allow(dead_code)] const SIZE_OF_ID_OPTION_IS_4_BYTE: [(); 0] = [(); 4 - size_of::<Option<VecArenaId<()>>>()]; // ----------------------------------------------- // VecArena // ----------------------------------------------- /// 型つき ID によりインデックスアクセス可能な `Vec` pub(crate) struct VecArena<Tag, T> { inner: Vec<T>, _phantom: PhantomData<*mut Tag>, } impl<Tag, T> VecArena<Tag, T> { pub(crate) const fn from_vec(inner: Vec<T>) -> Self { Self { inner, _phantom: PhantomData, } } pub(crate) fn from_iter(iter: impl IntoIterator<Item = T>) -> Self { Self::from_vec(iter.into_iter().collect()) } pub(crate) const fn new() -> Self { Self::from_vec(vec![]) } pub(crate) fn is_empty(&self) -> bool { self.inner.is_empty() } pub(crate) fn len(&self) -> usize { self.inner.len() } fn next_id(&self) -> VecArenaId<Tag> { id_from_index(self.len()).into() } pub(crate) fn reserve(&mut self, additional: usize) { self.inner.reserve(additional); } #[allow(unused)] pub(crate) fn resize_with(&mut self, new_len: usize, default_fn: impl Fn() -> T) { let additional = new_len.saturating_sub(self.inner.len()); self.inner.reserve_exact(additional); self.inner.resize_with(new_len, default_fn); } #[allow(unused)] pub(crate) fn slice(&self) -> VecArenaSlice<Tag> { let start = VecArenaId::MIN; let end = self.next_id(); VecArenaSlice(Range { start, end }) } pub(crate) fn iter(&self) -> impl Iterator<Item = &T> { self.inner.iter() } pub(crate) fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> { self.inner.iter_mut() } } impl<Tag, T> VecArena<Tag, T> { #[allow(unused)] pub(crate) fn has(&self, id: VecArenaId<Tag>) -> bool { id.to_index() < self.inner.len() } /// 要素への参照から ID を逆算する。 #[allow(unused)] pub(crate) fn id_from_ref<'a>(&'a self, value: &'a T) -> Option<VecArenaId<Tag>> { assert_ne!(std::mem::size_of::<T>(), 0); // ptr < start のとき (ptr - start) は巨大な数になるので、結果として None を返す。 let i = { let start = self.inner.as_ptr(); let ptr = value as *const T; (ptr as usize).wrapping_sub(start as usize) / std::mem::size_of::<T>() }; if i < self.len() { Some(VecArenaId::from_index(i)) } else { None } } pub(crate) fn alloc(&mut self, value: T) -> VecArenaId<Tag> { let id = self.next_id(); self.inner.push(value); id } pub(crate) fn alloc_slice(&mut self, items: impl IntoIterator<Item = T>) -> VecArenaSlice<Tag> { let start = self.next_id(); self.inner.extend(items.into_iter()); let end = self.next_id(); VecArenaSlice(Range { start, end }) } pub(crate) fn extend_with(&mut self, len: usize, default_fn: impl Fn() -> T) { self.resize_with(len.max(self.len()), default_fn); } #[allow(unused)] pub(crate) fn get(&self, id: VecArenaId<Tag>) -> Option<&T> { self.inner.get(id.to_index()) } #[allow(unused)] pub(crate) fn get_mut(&mut self, id: VecArenaId<Tag>) -> Option<&mut T> { self.inner.get_mut(id.to_index()) } pub(crate) fn into_vec(self) -> Vec<T> { self.inner } pub(crate) fn keys(&self) -> impl Iterator<Item = VecArenaId<Tag>> { (1..=self.inner.len() as u32).map(|id| unsafe { VecArenaId::new_unchecked(id) }) } pub(crate) fn enumerate(&self) -> impl Iterator<Item = (VecArenaId<Tag>, &T)> { self.keys().zip(&self.inner) } pub(crate) fn enumerate_mut(&mut self) -> impl Iterator<Item = (VecArenaId<Tag>, &mut T)> { self.keys().zip(&mut self.inner) } } impl<Tag: 'static, T: 'static> VecArena<Tag, T> { pub(crate) const EMPTY: &'static Self = &Self { inner: vec![], _phantom: PhantomData, }; } impl<Tag, T: Debug> Debug for VecArena<Tag, T> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.debug_map().entries(self.enumerate()).finish() } } // アリーナへの参照があれば ID の内容をデバッグ表示できる。 impl<'a, Tag, T> DebugWithContext<VecArena<Tag, T>> for VecArenaId<Tag> where T: DebugWithContext<VecArena<Tag, T>>, { fn fmt(&self, arena: &VecArena<Tag, T>, f: &mut Formatter<'_>) -> fmt::Result { DebugWithContext::fmt(self.of(arena), arena, f) } } impl<Tag, T> Default for VecArena<Tag, T> { fn default() -> Self { VecArena::new() } } impl<Tag, T: Clone> Clone for VecArena<Tag, T> { fn clone(&self) -> Self { Self { inner: self.inner.clone(), _phantom: PhantomData, } } } impl<Tag, T> Index<VecArenaId<Tag>> for VecArena<Tag, T> { type Output = T; fn index(&self, id: VecArenaId<Tag>) -> &T { let index = id.to_index(); debug_assert!(index < self.len()); unsafe { self.inner.get_unchecked(index) } } } impl<Tag, T> IndexMut<VecArenaId<Tag>> for VecArena<Tag, T> { fn index_mut(&mut self, id: VecArenaId<Tag>) -> &mut T { let index = id.to_index(); debug_assert!(index < self.len()); unsafe { self.inner.get_unchecked_mut(index) } } } pub(crate) type RawId = VecArenaId<()>; // ----------------------------------------------- // スライス // ----------------------------------------------- pub(crate) struct VecArenaSlice<Tag>(Range<VecArenaId<Tag>>); impl<Tag> VecArenaSlice<Tag> { pub(crate) const EMPTY: Self = Self(Range { start: VecArenaId::MAX, end: VecArenaId::MAX, }); pub(crate) fn len(&self) -> usize { let start = self.0.start; let end = self.0.end; end.to_index().saturating_sub(start.to_index()) } pub(crate) fn is_last(&self, id: VecArenaId<Tag>) -> bool { let start = self.0.start; let end = self.0.end; start != end && id.add_offset(1) == end } pub(crate) fn iter(&self) -> impl Iterator<Item = VecArenaId<Tag>> { let start = self.0.start.inner; let end = self.0.end.inner; id_iter(start..end).map(VecArenaId::from_inner) } pub(crate) fn enumerate<'a, T>( &self, arena: &'a VecArena<Tag, T>, ) -> impl Iterator<Item = (VecArenaId<Tag>, &'a T)> { self.iter().zip(self.of(arena)) } #[allow(unused)] pub(crate) fn map_with<T>(&self, f: impl Fn() -> T) -> VecArena<Tag, T> { let mut inner = Vec::with_capacity(self.len()); inner.resize_with(self.len(), f); VecArena::from_vec(inner) } #[allow(unused)] pub(crate) fn map_with_value<T: Clone>(&self, value: T) -> VecArena<Tag, T> { let mut inner = Vec::with_capacity(self.len()); inner.resize(self.len(), value); VecArena::from_vec(inner) } // 結果はスライスじゃないかもしれないが、ランダムアクセスは可能 pub(crate) fn of<'a, T>(&self, arena: &'a VecArena<Tag, T>) -> &'a [T] { let (start, end) = (self.0.start, self.0.end); if start >= end { return &[]; } &arena.inner[start.to_index()..end.to_index()] } } impl<Tag> Clone for VecArenaSlice<Tag> { fn clone(&self) -> Self { VecArenaSlice(self.0.clone()) } } impl<Tag> Default for VecArenaSlice<Tag> { fn default() -> Self { Self::EMPTY } } // ----------------------------------------------- // テスト // ----------------------------------------------- #[cfg(test)] mod tests { use super::*; use std::fmt::Display; struct UserData { name: &'static str, } impl Debug for UserData { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Display::fmt(self.name, f) } } impl From<&'static str> for UserData { fn from(name: &'static str) -> Self { Self { name } } } struct UserTag; type User = VecArenaId<UserTag>; type UserArena = VecArena<UserTag, UserData>; #[test] fn test_typed_keys() { let mut users: UserArena = UserArena::new(); // 生成 let alice: User = users.alloc("Alice".into()); // 参照 assert_eq!(alice.of(&users).name, "Alice"); // 更新 let bob: User = users.alloc("Bob".into()); assert_eq!(bob.of(&users).name, "Bob"); bob.of_mut(&mut users).name = "Brown"; let brown = bob; assert_eq!(brown.of(&users).name, "Brown"); // 列挙 assert_eq!( users .enumerate() .map(|(user, data)| format!("id={:?}, name={}", user, data.name)) .collect::<Vec<_>>() .join("; "), r#"id=0, name=Alice; id=1, name=Brown"# ); // 範囲検査 assert!(users.has(alice)); let eve = User::from_index(10000); assert!(!users.has(eve)); } type StrArena = VecArena<(), &'static str>; #[test] fn test_debug() { let mut arena = StrArena::new(); arena.alloc("Alice"); arena.alloc("Bob"); arena.alloc("Catherine"); arena.alloc("Dave"); assert_eq!( format!("{:?}", arena), r#"{0: "Alice", 1: "Bob", 2: "Catherine", 3: "Dave"}"# ); } #[test] fn test_id_from_ref() { let mut arena = StrArena::new(); let alice = arena.alloc("Alice"); let bob = arena.alloc("Bob"); assert_eq!(arena.id_from_ref(alice.of(&arena)), Some(alice)); assert_eq!(arena.id_from_ref(bob.of(&arena)), Some(bob)); let cloned_alice = alice.of(&arena).clone(); assert_eq!(arena.id_from_ref(&cloned_alice), None); } #[test] #[should_panic] fn test_id_from_ref_does_not_work_for_zero_sided_types() { let mut arena: VecArena<(), ()> = VecArena::new(); let id = arena.alloc(()); arena.id_from_ref(id.of(&arena)); } }
26.156716
100
0.544793
f76a6a219f6118e773adca41bbdcd72ed57cd20e
37,358
// Copyright 2020 Datafuse Labs. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use std::time::SystemTime; use std::time::UNIX_EPOCH; use common_base::tokio; use common_datablocks::DataBlock; use common_datavalues::prelude::*; use common_dfs_api::StorageApi; use common_exception::ErrorCode; use common_kv_api::KVApi; use common_meta_api::MetaApi; use common_meta_api_vo::*; use common_metatypes::KVMeta; use common_metatypes::KVValue; use common_metatypes::MatchSeq; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; use common_planners::DropDatabasePlan; use common_planners::DropTablePlan; use common_store_api_sdk::StoreClient; use common_tracing::tracing; use pretty_assertions::assert_eq; #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_restart() -> anyhow::Result<()> { // Issue 1134 https://github.com/datafuselabs/databend/issues/1134 // - Start a dfs server. // - create db and create table // - restart // - Test read the db and read the table. let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); let (mut tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let db_name = "db1"; let table_name = "table1"; tracing::info!("--- create db"); { let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: "Local".to_string(), options: Default::default(), }; let res = client.create_database(plan.clone()).await; tracing::debug!("create database res: {:?}", res); let res = res?; assert_eq!(1, res.database_id, "first database id is 1"); } tracing::info!("--- get db"); { let res = client.get_database(db_name).await; tracing::debug!("get present database res: {:?}", res); let res = res?; assert_eq!(1, res.database_id, "db1 id is 1"); assert_eq!(db_name, res.db, "db1.db is db1"); } tracing::info!("--- create table {}.{}", db_name, table_name); let schema = Arc::new(DataSchema::new(vec![DataField::new( "number", DataType::UInt64, false, )])); { let options = maplit::hashmap! {"opt‐1".into() => "val-1".into()}; let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: table_name.to_string(), schema: schema.clone(), options: options.clone(), engine: "JSON".to_string(), }; { let res = client.create_table(plan.clone()).await?; assert_eq!(1, res.table_id, "table id is 1"); let got = client.get_table(db_name, table_name).await?; let want = TableInfo { table_id: 1, db: db_name.into(), name: table_name.into(), schema: schema.clone(), engine: "JSON".to_owned(), options: options.clone(), }; assert_eq!(want, got, "get created table"); } } tracing::info!("--- stop StoreServer"); { let (stop_tx, fin_rx) = tc.channels.take().unwrap(); stop_tx .send(()) .map_err(|_| anyhow::anyhow!("fail to send"))?; fin_rx.await?; drop(client); tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await; // restart by opening existent meta db tc.config.meta_config.boot = false; crate::tests::start_store_server_with_context(&mut tc).await?; } tokio::time::sleep(tokio::time::Duration::from_millis(10_000)).await; // try to reconnect the restarted server. let mut _client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; // TODO(xp): db and table are still in pure memory store. the following test will no pass. // tracing::info!("--- get db"); // { // let res = client.get_database(db_name).await; // tracing::debug!("get present database res: {:?}", res); // let res = res?; // assert_eq!(1, res.database_id, "db1 id is 1"); // assert_eq!(db_name, res.db, "db1.db is db1"); // } // // tracing::info!("--- get table"); // { // let got = client // .get_table(db_name.into(), table_name.into()) // .await // .unwrap(); // let want = GetTableActionResult { // table_id: 1, // db: db_name.into(), // name: table_name.into(), // schema: schema.clone(), // }; // assert_eq!(want, got, "get created table"); // } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_create_database() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); // 1. Service starts. let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; // 2. Create database. // TODO: test arg if_not_exists: It should respond an ErrorCode { // create first db let plan = CreateDatabasePlan { // TODO test if_not_exists if_not_exists: false, db: "db1".to_string(), engine: "Local".to_string(), options: Default::default(), }; let res = client.create_database(plan.clone()).await; tracing::info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(1, res.database_id, "first database id is 1"); } { // create second db let plan = CreateDatabasePlan { if_not_exists: false, db: "db2".to_string(), engine: "Local".to_string(), options: Default::default(), }; let res = client.create_database(plan.clone()).await; tracing::info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(2, res.database_id, "second database id is 2"); } // 3. Get database. { // get present db let res = client.get_database("db1").await; tracing::debug!("get present database res: {:?}", res); let res = res?; assert_eq!(1, res.database_id, "db1 id is 1"); assert_eq!("db1".to_string(), res.db, "db1.db is db1"); } { // get absent db let res = client.get_database("ghost").await; tracing::debug!("=== get absent database res: {:?}", res); assert!(res.is_err()); let res = res.unwrap_err(); assert_eq!(3, res.code()); assert_eq!("ghost".to_string(), res.message()); } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_create_get_table() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); use std::sync::Arc; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; tracing::info!("init logging"); // 1. Service starts. let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let db_name = "db1"; let tbl_name = "tb2"; { // prepare db let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: "Local".to_string(), options: Default::default(), }; let res = client.create_database(plan.clone()).await; tracing::info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(1, res.database_id, "first database id is 1"); } { // create table and fetch it // Table schema with metadata(due to serde issue). let schema = Arc::new(DataSchema::new(vec![DataField::new( "number", DataType::UInt64, false, )])); let options = maplit::hashmap! {"opt‐1".into() => "val-1".into()}; // Create table plan. let mut plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: options.clone(), engine: "JSON".to_string(), }; { // create table OK let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "table id is 1"); let got = client.get_table(db_name, tbl_name).await.unwrap(); let want = TableInfo { table_id: 1, db: db_name.into(), name: tbl_name.into(), schema: schema.clone(), engine: "JSON".to_owned(), options: options.clone(), }; assert_eq!(want, got, "get created table"); } { // create table again with if_not_exists = true plan.if_not_exists = true; let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "new table id"); let got = client.get_table(db_name, tbl_name).await.unwrap(); let want = TableInfo { table_id: 1, db: db_name.into(), name: tbl_name.into(), schema: schema.clone(), engine: "JSON".to_owned(), options: options.clone(), }; assert_eq!(want, got, "get created table"); } { // create table again with if_not_exists=false plan.if_not_exists = false; let res = client.create_table(plan.clone()).await; tracing::info!("create table res: {:?}", res); let status = res.err().unwrap(); assert_eq!( format!("Code: 4003, displayText = table exists: {}.", tbl_name), status.to_string() ); // get_table returns the old table let got = client.get_table("db1", "tb2").await.unwrap(); let want = TableInfo { table_id: 1, db: db_name.into(), name: tbl_name.into(), schema: schema.clone(), engine: "JSON".to_owned(), options: options.clone(), }; assert_eq!(want, got, "get old table"); } } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_drop_table() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); use std::sync::Arc; use common_datavalues::DataField; use common_datavalues::DataSchema; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; tracing::info!("init logging"); // 1. Service starts. let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let db_name = "db1"; let tbl_name = "tb2"; { // prepare db let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: "Local".to_string(), options: Default::default(), }; let res = client.create_database(plan.clone()).await; tracing::info!("create database res: {:?}", res); let res = res.unwrap(); assert_eq!(1, res.database_id, "first database id is 1"); } { // create table and fetch it // Table schema with metadata(due to serde issue). let schema = Arc::new(DataSchema::new(vec![DataField::new( "number", DataType::UInt64, false, )])); let options = maplit::hashmap! {"opt‐1".into() => "val-1".into()}; // Create table plan. let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: options.clone(), engine: "JSON".to_string(), }; { // create table OK let res = client.create_table(plan.clone()).await.unwrap(); assert_eq!(1, res.table_id, "table id is 1"); let got = client.get_table(db_name, tbl_name).await.unwrap(); let want = TableInfo { table_id: 1, db: db_name.into(), name: tbl_name.into(), schema: schema.clone(), engine: "JSON".to_owned(), options: options.clone(), }; assert_eq!(want, got, "get created table"); } { // drop table let plan = DropTablePlan { if_exists: true, db: db_name.to_string(), table: tbl_name.to_string(), }; let res = client.drop_table(plan.clone()).await.unwrap(); assert_eq!((), res, "drop table {}", tbl_name) } { let res = client.get_table(db_name, tbl_name).await; let status = res.err().unwrap(); assert_eq!( format!("Code: 25, displayText = table not found: {}.", tbl_name), status.to_string(), "get dropped table {}", tbl_name ); } } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_do_append() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); use std::sync::Arc; use common_datavalues::prelude::*; use common_planners::CreateDatabasePlan; use common_planners::CreateTablePlan; let (_tc, addr) = crate::tests::start_store_server().await?; let schema = Arc::new(DataSchema::new(vec![ DataField::new("col_i", DataType::Int64, false), DataField::new("col_s", DataType::String, false), ])); let db_name = "test_db"; let tbl_name = "test_tbl"; let series0 = Series::new(vec![0i64, 1, 2]); let series1 = Series::new(vec!["str1", "str2", "str3"]); let expected_rows = series0.len() * 2; let expected_cols = 2; let block = DataBlock::create_by_array(schema.clone(), vec![series0, series1]); let batches = vec![block.clone(), block]; let num_batch = batches.len(); let stream = futures::stream::iter(batches); let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { let plan = CreateDatabasePlan { if_not_exists: false, db: db_name.to_string(), engine: "Local".to_string(), options: Default::default(), }; let res = client.create_database(plan.clone()).await; let res = res.unwrap(); assert_eq!(res.database_id, 1, "db created"); let plan = CreateTablePlan { if_not_exists: false, db: db_name.to_string(), table: tbl_name.to_string(), schema: schema.clone(), options: maplit::hashmap! {"opt‐1".into() => "val-1".into()}, engine: "PARQUET".to_string(), }; client.create_table(plan.clone()).await.unwrap(); } let res = client .append_data( db_name.to_string(), tbl_name.to_string(), schema, Box::pin(stream), ) .await .unwrap(); tracing::info!("append res is {:?}", res); let summary = res.summary; assert_eq!(summary.rows, expected_rows, "rows eq"); assert_eq!(res.parts.len(), num_batch, "batch eq"); res.parts.iter().for_each(|p| { assert_eq!(p.rows, expected_rows / num_batch); assert_eq!(p.cols, expected_cols); }); Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv_mget() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv_list"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; client .upsert_kv("k1", MatchSeq::Any, Some(b"v1".to_vec()), None) .await?; client .upsert_kv("k2", MatchSeq::Any, Some(b"v2".to_vec()), None) .await?; let res = client .mget_kv(&["k1".to_string(), "k2".to_string()]) .await?; assert_eq!(res.result, vec![ Some((1, KVValue { meta: None, value: b"v1".to_vec() })), // NOTE, the sequence number is increased globally (inside the namespace of generic kv) Some((2, KVValue { meta: None, value: b"v2".to_vec() })), ]); let res = client .mget_kv(&["k1".to_string(), "key_no exist".to_string()]) .await?; assert_eq!(res.result, vec![ Some((1, KVValue { meta: None, value: b"v1".to_vec() })), None ]); } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv_list() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv_list"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let mut values = vec![]; { client .upsert_kv("t", MatchSeq::Any, Some("".as_bytes().to_vec()), None) .await?; for i in 0..9 { let key = format!("__users/{}", i); let val = format!("val_{}", i); values.push(val.clone()); client .upsert_kv(&key, MatchSeq::Any, Some(val.as_bytes().to_vec()), None) .await?; } client .upsert_kv("v", MatchSeq::Any, Some(b"".to_vec()), None) .await?; } let res = client.prefix_list_kv("__users/").await?; assert_eq!( res.iter() .map(|(_key, (_s, val))| val.clone()) .collect::<Vec<_>>(), values .iter() .map(|v| KVValue { meta: None, value: v.as_bytes().to_vec() }) .collect::<Vec<_>>() ); } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv_delete() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv_list"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let test_key = "test_key"; client .upsert_kv(test_key, MatchSeq::Any, Some(b"v1".to_vec()), None) .await?; let current = client.get_kv(test_key).await?; if let Some((seq, _val)) = current.result { // seq mismatch let wrong_seq = Some(seq + 1); let res = client .upsert_kv(test_key, wrong_seq.into(), None, None) .await?; assert_eq!(res.prev, res.result); // seq match let res = client .upsert_kv(test_key, MatchSeq::Exact(seq), None, None) .await?; assert!(res.result.is_none()); // read nothing let r = client.get_kv(test_key).await?; assert!(r.result.is_none()); } else { panic!("expecting a value, but got nothing"); } // key not exist let res = client .upsert_kv("not exists", MatchSeq::Any, None, None) .await?; assert_eq!(None, res.prev); assert_eq!(None, res.result); // do not care seq client .upsert_kv(test_key, MatchSeq::Any, Some(b"v2".to_vec()), None) .await?; let res = client .upsert_kv(test_key, MatchSeq::Any, None, None) .await?; assert_eq!( ( Some((2, KVValue { meta: None, value: b"v2".to_vec() })), None ), (res.prev, res.result) ); } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv_update() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv_list"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let test_key = "test_key_for_update"; let r = client .upsert_kv(test_key, MatchSeq::GE(1), Some(b"v1".to_vec()), None) .await?; assert_eq!((None, None), (r.prev, r.result), "not changed"); let r = client .upsert_kv(test_key, MatchSeq::Any, Some(b"v1".to_vec()), None) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.result ); let seq = r.result.unwrap().0; // unmatched seq let r = client .upsert_kv( test_key, MatchSeq::Exact(seq + 1), Some(b"v2".to_vec()), None, ) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.prev ); assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.result ); // matched seq let r = client .upsert_kv(test_key, MatchSeq::Exact(seq), Some(b"v2".to_vec()), None) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.prev ); assert_eq!( Some((2, KVValue { meta: None, value: b"v2".to_vec() })), r.result ); // blind update let r = client .upsert_kv(test_key, MatchSeq::GE(1), Some(b"v3".to_vec()), None) .await?; assert_eq!( Some((2, KVValue { meta: None, value: b"v2".to_vec() })), r.prev ); assert_eq!( Some((3, KVValue { meta: None, value: b"v3".to_vec() })), r.result ); // value updated let kv = client.get_kv(test_key).await?; assert!(kv.result.is_some()); assert_eq!(kv.result.unwrap().1, KVValue { meta: None, value: b"v3".to_vec() }); } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv_update_meta() -> anyhow::Result<()> { // Only update meta, do not touch the value part. let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv_update_meta"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let test_key = "test_key_for_update_meta"; let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); let r = client .upsert_kv(test_key, MatchSeq::Any, Some(b"v1".to_vec()), None) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.result ); let seq = r.result.unwrap().0; tracing::info!("--- mismatching seq does nothing"); let r = client .update_kv_meta( test_key, MatchSeq::Exact(seq + 1), Some(KVMeta { expire_at: Some(now + 20), }), ) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.prev ); assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.result ); tracing::info!("--- matching seq only update meta"); let r = client .update_kv_meta( test_key, MatchSeq::Exact(seq), Some(KVMeta { expire_at: Some(now + 20), }), ) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"v1".to_vec() })), r.prev ); assert_eq!( Some((2, KVValue { meta: Some(KVMeta { expire_at: Some(now + 20) }), value: b"v1".to_vec() })), r.result ); tracing::info!("--- get returns the value with meta and seq updated"); let kv = client.get_kv(test_key).await?; assert!(kv.result.is_some()); assert_eq!( (seq + 1, KVValue { meta: Some(KVMeta { expire_at: Some(now + 20) }), value: b"v1".to_vec() }), kv.result.unwrap(), ); } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv_timeout() -> anyhow::Result<()> { // - Test get expired and non-expired. // - Test mget expired and non-expired. // - Test list expired and non-expired. // - Test update with a new expire value. let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv_timeout"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); client .upsert_kv( "k1", MatchSeq::Any, Some(b"v1".to_vec()), Some(KVMeta { expire_at: Some(now + 1), }), ) .await?; tracing::info!("---get unexpired"); { let res = client.get_kv(&"k1".to_string()).await?; assert!(res.result.is_some(), "got unexpired"); } tracing::info!("---get expired"); { tokio::time::sleep(tokio::time::Duration::from_millis(2000)).await; let res = client.get_kv(&"k1".to_string()).await?; tracing::debug!("got k1:{:?}", res); assert!(res.result.is_none(), "got expired"); } let now = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_secs(); tracing::info!("--- expired entry act as if it does not exist, an ADD op should apply"); { client .upsert_kv( "k1", MatchSeq::Exact(0), Some(b"v1".to_vec()), Some(KVMeta { expire_at: Some(now - 1), }), ) .await?; client .upsert_kv( "k2", MatchSeq::Exact(0), Some(b"v2".to_vec()), Some(KVMeta { expire_at: Some(now + 2), }), ) .await?; tracing::info!("--- mget should not return expired"); let res = client .mget_kv(&["k1".to_string(), "k2".to_string()]) .await?; assert_eq!(res.result, vec![ None, Some((3, KVValue { meta: Some(KVMeta { expire_at: Some(now + 2) }), value: b"v2".to_vec() })), ]); } tracing::info!("--- list should not return expired"); { let res = client.prefix_list_kv("k").await?; let res_vec = res.iter().map(|(key, _)| key.clone()).collect::<Vec<_>>(); assert_eq!(res_vec, vec!["k2".to_string(),]); } tracing::info!("--- update expire"); { client .upsert_kv( "k2", MatchSeq::Exact(3), Some(b"v2".to_vec()), Some(KVMeta { expire_at: Some(now - 1), }), ) .await?; let res = client.get_kv(&"k2".to_string()).await?; assert!(res.result.is_none(), "k2 expired"); } } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_generic_kv() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); { let span = tracing::span!(tracing::Level::INFO, "test_flight_generic_kv"); let _ent = span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; { // write let res = client .upsert_kv("foo", MatchSeq::Any, Some(b"bar".to_vec()), None) .await?; assert_eq!(None, res.prev); assert_eq!( Some((1, KVValue { meta: None, value: b"bar".to_vec() })), res.result ); } { // write fails with unmatched seq let res = client .upsert_kv("foo", MatchSeq::Exact(2), Some(b"bar".to_vec()), None) .await?; assert_eq!( ( Some((1, KVValue { meta: None, value: b"bar".to_vec() })), Some((1, KVValue { meta: None, value: b"bar".to_vec(), })), ), (res.prev, res.result), "nothing changed" ); } { // write done with matching seq let res = client .upsert_kv("foo", MatchSeq::Exact(1), Some(b"wow".to_vec()), None) .await?; assert_eq!( Some((1, KVValue { meta: None, value: b"bar".to_vec() })), res.prev, "old value" ); assert_eq!( Some((2, KVValue { meta: None, value: b"wow".to_vec() })), res.result, "new value" ); } } Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_drop_database() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: "Local".to_string(), options: Default::default(), }; client.create_database(plan).await?; let res = client.get_databases().await?; assert_eq!(1, res.len()); // drop db let plan = DropDatabasePlan { if_exists: true, db: "db1".to_string(), }; let res = client.drop_database(plan).await; assert!(res.is_ok()); let res = client.get_databases().await?; assert!(res.is_empty()); let plan = DropDatabasePlan { if_exists: true, db: "db1".to_string(), }; let res = client.drop_database(plan).await; assert!(res.is_ok()); let plan = DropDatabasePlan { if_exists: false, db: "db1".to_string(), }; let res = client.drop_database(plan).await; assert!(res.is_err()); let res = client.get_databases().await?; assert!(res.is_empty()); Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_get_databases() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; // empty db meta let res = client.get_databases().await?; assert!(res.is_empty()); // create-db operation will increases meta_version let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: "Local".to_string(), options: Default::default(), }; client.create_database(plan).await?; let res = client.get_databases().await?; assert_eq!(1, res.len()); assert_eq!(res[0], DatabaseInfo { database_id: 1, engine: "Local".to_string(), db: "db1".to_string() }); Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 1)] async fn test_flight_get_tables() -> anyhow::Result<()> { let (_log_guards, ut_span) = init_store_ut!(); let _ent = ut_span.enter(); let (_tc, addr) = crate::tests::start_store_server().await?; let client = StoreClient::try_create(addr.as_str(), "root", "xxx").await?; // not such db let res = client.get_tables("none").await; assert!(res.is_err()); assert_eq!( res.unwrap_err().code(), ErrorCode::UnknownDatabase("").code() ); // empty db let plan = CreateDatabasePlan { if_not_exists: false, db: "db1".to_string(), engine: "Local".to_string(), options: Default::default(), }; client.create_database(plan).await?; let res = client.get_tables("db1").await?; assert!(res.is_empty()); // with tables let schema = Arc::new(DataSchema::new(vec![DataField::new( "number", DataType::UInt64, false, )])); let options = maplit::hashmap! {"opt‐1".into() => "val-1".into()}; let plan = CreateTablePlan { if_not_exists: false, db: "db1".to_string(), table: "t1".to_string(), schema: schema.clone(), options: options.clone(), engine: "JSON".to_string(), }; client.create_table(plan.clone()).await?; let tbls = client.get_tables("db1").await; assert!(tbls.is_ok()); let tbls = tbls?; assert_eq!(1, tbls.len()); Ok(()) }
30.421824
99
0.504631
711493da8958d669e931a3e8b29a44ad7ba934dc
5,753
use super::*; struct GraphQLHelper { repo_path: std::path::PathBuf, ref_prefix: String, headref: String, } impl GraphQLHelper { fn josh_helper( &self, hash: &std::collections::BTreeMap<&str, handlebars::PathAndJson>, template_name: &str, ) -> JoshResult<serde_json::Value> { let path = if let Some(f) = hash.get("file") { f.render() } else { return Err(josh_error("missing pattern")); }; let path = std::path::PathBuf::from(template_name) .join("..") .join(path); let path = normalize_path(&path); let transaction = cache::Transaction::open(&self.repo_path, Some(&self.ref_prefix))?; let reference = transaction.repo().find_reference(&self.headref)?; let tree = reference.peel_to_tree()?; let blob = tree .get_path(&path)? .to_object(&transaction.repo())? .peel_to_blob() .map(|x| x.content().to_vec()) .unwrap_or(vec![]); let query = String::from_utf8(blob)?; let mut variables = juniper::Variables::new(); for (k, v) in hash.iter() { variables.insert(k.to_string(), juniper::InputValue::scalar(v.render())); } let transaction = cache::Transaction::open(&self.repo_path, None)?; let (res, _errors) = juniper::execute_sync( &query, None, &graphql::commit_schema(reference.target().ok_or(josh_error("missing target"))?), &variables, &graphql::context(transaction), )?; let j = serde_json::to_string(&res)?; let j: serde_json::Value = serde_json::from_str(&j)?; let j = if let Some(at) = hash.get("at") { j.pointer(&at.render()).unwrap_or(&json!({})).to_owned() } else { j }; return Ok(j); } } impl handlebars::HelperDef for GraphQLHelper { fn call_inner<'reg: 'rc, 'rc>( &self, h: &handlebars::Helper, _: &handlebars::Handlebars, _: &handlebars::Context, rc: &mut handlebars::RenderContext, ) -> Result<handlebars::ScopedJson<'reg, 'rc>, handlebars::RenderError> { return Ok(handlebars::ScopedJson::Derived( self.josh_helper( h.hash(), &rc.get_current_template_name().unwrap_or(&"/".to_owned()), ) .map_err(|_| handlebars::RenderError::new("josh"))?, )); } } mod helpers { handlebars_helper!(concat_helper: |x: str, y: str| format!("{}{}", x, y) ); } pub fn render( repo: &git2::Repository, ref_prefix: &str, headref: &str, query_and_params: &str, ) -> JoshResult<Option<String>> { let mut parameters = query_and_params.split("&"); let query = parameters .next() .ok_or(josh_error(&format!("invalid query {:?}", query_and_params)))?; let mut split = query.splitn(2, "="); let cmd = split .next() .ok_or(josh_error(&format!("invalid query {:?}", query_and_params)))?; let path = split .next() .ok_or(josh_error(&format!("invalid query {:?}", query_and_params)))?; let reference = repo.find_reference(&headref)?; let tree = reference.peel_to_tree()?; let obj = ok_or!( tree.get_path(&std::path::PathBuf::from(path))? .to_object(&repo), { return Ok(None); } ); let mut params = std::collections::BTreeMap::new(); for p in parameters { let mut split = p.splitn(2, "="); let name = split .next() .ok_or(josh_error(&format!("invalid query {:?}", query_and_params)))?; let value = split .next() .ok_or(josh_error(&format!("invalid query {:?}", query_and_params)))?; params.insert(name.to_string(), value.to_string()); } let template = if let Ok(blob) = obj.peel_to_blob() { let template = std::str::from_utf8(blob.content())?; if cmd == "get" { return Ok(Some(template.to_string())); } if cmd == "graphql" { let mut variables = juniper::Variables::new(); for (k, v) in params { variables.insert(k.to_string(), juniper::InputValue::scalar(v)); } let transaction = cache::Transaction::open(&repo.path(), None)?; let (res, _errors) = juniper::execute_sync( &template.to_string(), None, &graphql::commit_schema(reference.target().ok_or(josh_error("missing target"))?), &variables, &graphql::context(transaction), )?; let j = serde_json::to_string_pretty(&res)?; return Ok(Some(j)); } if cmd == "render" { template.to_string() } else { return Err(josh_error("no such cmd")); } } else { return Ok(Some("".to_string())); }; std::mem::drop(obj); std::mem::drop(tree); let mut handlebars = handlebars::Handlebars::new(); handlebars.register_template_string(&path, template)?; handlebars.register_helper("concat", Box::new(helpers::concat_helper)); handlebars.register_helper( "graphql", Box::new(GraphQLHelper { repo_path: repo.path().to_owned(), ref_prefix: ref_prefix.to_owned(), headref: headref.to_string(), }), ); handlebars.set_strict_mode(true); match handlebars.render(&path, &json!(params)) { Ok(res) => return Ok(Some(format!("{}", res))), Err(res) => return Err(josh_error(&format!("{}", res))), } }
31.60989
97
0.54389
086ed9a6cc17527b6ad1af9c3031624d4bcdce79
4,118
// This file is part of Substrate. // Copyright (C) 2017-2021 Parity Technologies (UK) Ltd. // SPDX-License-Identifier: Apache-2.0 // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Some instance placeholder to be used in [`frame_support::pallet`] attribute macro. //! //! [`frame_support::pallet`] attribute macro does only requires the instance generic `I` to be //! static (contrary to `decl_*` macro which requires instance generic to implement //! [`frame_support::traits::Instance`]). //! //! Thus support provides some instance types to be used, This allow some instantiable pallet to //! depend on specific instance of another: //! ``` //! # mod another_pallet { pub trait Config<I: 'static = ()> {} } //! pub trait Config<I: 'static = ()>: another_pallet::Config<I> {} //! ``` //! //! NOTE: [`frame_support::pallet`] will reexport them inside the module, in order to make them //! accessible to [`frame_support::construct_runtime`]. /// Instance0 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance0; /// Instance1 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance1; /// Instance2 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance2; /// Instance3 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance3; /// Instance4 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance4; /// Instance5 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance5; /// Instance6 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance6; /// Instance7 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance7; /// Instance8 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance8; /// Instance9 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance9; /// Instance10 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance10; /// Instance11 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance11; /// Instance12 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance12; /// Instance13 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance13; /// Instance14 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance14; /// Instance15 to be used for instantiable pallet define with `pallet` macro. #[derive(Clone, Copy, PartialEq, Eq, crate::RuntimeDebugNoBound)] pub struct Instance15;
42.453608
96
0.749879
33dbdc1d785f6267733d735bed2803c5d7db81a0
6,482
use crate::Mutator; /** Wrap a mutator and prioritise the generation of a few given values. ``` use fuzzcheck::DefaultMutator; use fuzzcheck::mutators::dictionary::DictionaryMutator; let m = usize::default_mutator(); let m = DictionaryMutator::new(m, [256, 65_536, 1_000_000]); // m will first generate the values given to the DictionaryMutator constructor // and will then use usize’s default mutator ``` */ pub struct DictionaryMutator<T: Clone, M: Mutator<T>> { m: M, dictionary: Vec<(T, f64)>, rng: fastrand::Rng, } impl<T: Clone, M: Mutator<T>> DictionaryMutator<T, M> { #[no_coverage] pub fn new(value_mutator: M, dictionary: impl IntoIterator<Item = T>) -> Self { let dictionary = dictionary .into_iter() .filter_map(|v| { if let Some((cache, _)) = value_mutator.validate_value(&v) { let complexity = value_mutator.complexity(&v, &cache); Some((v, complexity)) } else { None } }) .collect(); Self { m: value_mutator, dictionary, rng: fastrand::Rng::new(), } } } #[derive(Clone)] pub struct MutationStep<T> { idx: usize, wrapped: T, } impl<T> MutationStep<T> { #[no_coverage] fn new(wrapped: T) -> Self { Self { idx: 0, wrapped } } } pub enum UnmutateToken<T: Clone, M: Mutator<T>> { Replace(T), Unmutate(M::UnmutateToken), } #[derive(Clone)] pub enum ArbitraryStep<T> { Dictionary(usize), Wrapped(T), } impl<T> Default for ArbitraryStep<T> { #[no_coverage] fn default() -> Self { Self::Dictionary(0) } } impl<T: Clone + 'static, M: Mutator<T>> Mutator<T> for DictionaryMutator<T, M> { #[doc(hidden)] type Cache = M::Cache; #[doc(hidden)] type MutationStep = self::MutationStep<M::MutationStep>; #[doc(hidden)] type ArbitraryStep = self::ArbitraryStep<M::ArbitraryStep>; #[doc(hidden)] type UnmutateToken = UnmutateToken<T, M>; #[doc(hidden)] #[no_coverage] fn default_arbitrary_step(&self) -> Self::ArbitraryStep { <_>::default() } #[doc(hidden)] #[no_coverage] fn validate_value(&self, value: &T) -> Option<(Self::Cache, Self::MutationStep)> { if let Some((cache, step)) = self.m.validate_value(value) { Some((cache, Self::MutationStep::new(step))) } else { None } } #[doc(hidden)] #[no_coverage] fn ordered_arbitrary(&self, step: &mut Self::ArbitraryStep, max_cplx: f64) -> Option<(T, f64)> { match step { ArbitraryStep::Dictionary(inner_step) => { if *inner_step < self.dictionary.len() { let (v, c) = self.dictionary[*inner_step].clone(); *inner_step += 1; Some((v, c)) } else { let inner_step = self.m.default_arbitrary_step(); *step = self::ArbitraryStep::Wrapped(inner_step); self.ordered_arbitrary(step, max_cplx) } } ArbitraryStep::Wrapped(inner_step) => self.m.ordered_arbitrary(inner_step, max_cplx).map( #[no_coverage] |(v, c)| (v, c), ), } } #[doc(hidden)] #[no_coverage] fn random_arbitrary(&self, max_cplx: f64) -> (T, f64) { let (v, c) = if !self.dictionary.is_empty() && self.rng.usize(..20) == 0 { let idx = self.rng.usize(..self.dictionary.len()); self.dictionary[idx].clone() } else { self.m.random_arbitrary(max_cplx) }; (v, c) } #[doc(hidden)] #[no_coverage] fn max_complexity(&self) -> f64 { self.m.max_complexity() } #[doc(hidden)] #[no_coverage] fn min_complexity(&self) -> f64 { self.m.min_complexity() } #[doc(hidden)] #[no_coverage] fn complexity(&self, value: &T, cache: &Self::Cache) -> f64 { self.m.complexity(value, cache) } #[doc(hidden)] #[no_coverage] fn ordered_mutate( &self, value: &mut T, cache: &mut Self::Cache, step: &mut Self::MutationStep, max_cplx: f64, ) -> Option<(Self::UnmutateToken, f64)> { if step.idx < self.dictionary.len() { let (new_value, new_value_cplx) = self.dictionary[step.idx].clone(); step.idx += 1; let old_value = std::mem::replace(value, new_value); Some((UnmutateToken::Replace(old_value), new_value_cplx)) } else { self.m.ordered_mutate(value, cache, &mut step.wrapped, max_cplx).map( #[no_coverage] |(t, c)| (self::UnmutateToken::Unmutate(t), c), ) } } #[doc(hidden)] #[no_coverage] fn random_mutate(&self, value: &mut T, cache: &mut Self::Cache, max_cplx: f64) -> (Self::UnmutateToken, f64) { if !self.dictionary.is_empty() && self.rng.usize(..20) == 0 { let idx = self.rng.usize(..self.dictionary.len()); let (new_value, new_value_cplx) = self.dictionary[idx].clone(); let old_value = std::mem::replace(value, new_value); (UnmutateToken::Replace(old_value), new_value_cplx) } else { let (t, cplx) = self.m.random_mutate(value, cache, max_cplx); (self::UnmutateToken::Unmutate(t), cplx) } } #[doc(hidden)] #[no_coverage] fn unmutate(&self, value: &mut T, cache: &mut Self::Cache, t: Self::UnmutateToken) { match t { UnmutateToken::Replace(new_value) => { let _ = std::mem::replace(value, new_value); } UnmutateToken::Unmutate(t) => self.m.unmutate(value, cache, t), } } #[doc(hidden)] type RecursingPartIndex = M::RecursingPartIndex; #[doc(hidden)] #[no_coverage] fn default_recursing_part_index(&self, value: &T, cache: &Self::Cache) -> Self::RecursingPartIndex { self.m.default_recursing_part_index(value, cache) } #[doc(hidden)] #[no_coverage] fn recursing_part<'a, V, N>(&self, parent: &N, value: &'a T, index: &mut Self::RecursingPartIndex) -> Option<&'a V> where V: Clone + 'static, N: Mutator<V>, { self.m.recursing_part::<V, N>(parent, value, index) } }
30.28972
119
0.55199
bb3649b68f71a198d8c36ac5abce18a62d05311d
3,528
use super::verify_signature; use crate::error::{LoftyError, Result}; use crate::ogg::constants::{OPUSTAGS, VORBIS_COMMENT_HEAD}; use crate::ogg::tag::VorbisCommentsRef; use crate::types::picture::PictureInformation; use crate::types::tag::{Tag, TagType}; use std::convert::TryFrom; use std::fs::File; use std::io::{Cursor, Read, Seek, SeekFrom, Write}; use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; use ogg_pager::Page; pub(in crate) fn write_to(data: &mut File, tag: &Tag, sig: &[u8]) -> Result<()> { match tag.tag_type() { #[cfg(feature = "vorbis_comments")] TagType::VorbisComments => write(data, &mut Into::<VorbisCommentsRef>::into(tag), sig), _ => Err(LoftyError::UnsupportedTag), } } #[cfg(feature = "vorbis_comments")] pub(crate) fn create_comments( packet: &mut impl Write, count: &mut u32, items: &mut dyn Iterator<Item = (&str, &String)>, ) -> Result<()> { for (k, v) in items { if !v.is_empty() { let comment = format!("{}={}", k, v); let comment_b = comment.as_bytes(); let bytes_len = comment_b.len(); if u32::try_from(bytes_len as u64).is_ok() { *count += 1; packet.write_all(&(bytes_len as u32).to_le_bytes())?; packet.write_all(comment_b)?; } } } Ok(()) } #[cfg(feature = "vorbis_comments")] pub(super) fn create_pages( tag: &mut VorbisCommentsRef, writer: &mut Cursor<Vec<u8>>, ) -> Result<Vec<Page>> { const PICTURE_KEY: &str = "METADATA_BLOCK_PICTURE="; let item_count_pos = writer.seek(SeekFrom::Current(0))?; writer.write_u32::<LittleEndian>(0)?; let mut count = 0; create_comments(writer, &mut count, &mut tag.items)?; for (pic, _) in &mut tag.pictures { let picture = pic.as_flac_bytes(PictureInformation::from_picture(pic)?, true); let bytes_len = picture.len() + PICTURE_KEY.len(); if u32::try_from(bytes_len as u64).is_ok() { count += 1; writer.write_u32::<LittleEndian>(bytes_len as u32)?; writer.write_all(PICTURE_KEY.as_bytes())?; writer.write_all(&*picture)?; } } let packet_end = writer.seek(SeekFrom::Current(0))?; writer.seek(SeekFrom::Start(item_count_pos))?; writer.write_u32::<LittleEndian>(count)?; writer.seek(SeekFrom::Start(packet_end))?; // Stream serial is retrieved later // Checksum is calculated later Ok(ogg_pager::paginate(writer.get_ref(), 0, 0, 0)) } #[cfg(feature = "vorbis_comments")] pub(super) fn write(data: &mut File, tag: &mut VorbisCommentsRef, sig: &[u8]) -> Result<()> { let first_page = Page::read(data, false)?; let ser = first_page.serial; let mut writer = Vec::new(); writer.write_all(&*first_page.as_bytes())?; let first_md_page = Page::read(data, false)?; verify_signature(&first_md_page, sig)?; // Retain the file's vendor string let md_reader = &mut &first_md_page.content()[sig.len()..]; let vendor_len = md_reader.read_u32::<LittleEndian>()?; let mut vendor = vec![0; vendor_len as usize]; md_reader.read_exact(&mut vendor)?; let mut packet = Cursor::new(Vec::new()); packet.write_all(sig)?; packet.write_u32::<LittleEndian>(vendor_len)?; packet.write_all(&vendor)?; let mut pages = create_pages(tag, &mut packet)?; match sig { VORBIS_COMMENT_HEAD => { super::vorbis::write::write_to( data, &mut writer, first_md_page.take_content(), ser, &mut pages, )?; }, OPUSTAGS => { super::opus::write::write_to(data, &mut writer, ser, &mut pages)?; }, _ => unreachable!(), } data.seek(SeekFrom::Start(0))?; data.set_len(first_page.end as u64)?; data.write_all(&*writer)?; Ok(()) }
25.941176
93
0.672619
b9ec063a9877c7ffd73d01638a4e6136d0a9d794
11,313
// Copyright 2019 The Exonum Team // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use exonum_merkledb::{Database, DatabaseExt, Patch, Result as StorageResult, Snapshot}; use std::sync::{Arc, RwLock}; /// Implementation of a `Database`, which allows to rollback its state /// to the last made checkpoint. /// /// **Note:** Intended for testing purposes only. Probably inefficient. pub struct CheckpointDb<T> { inner: Arc<RwLock<CheckpointDbInner<T>>>, } impl<T: Database> CheckpointDb<T> { /// Creates a new checkpointed database that uses the specified `db` as the underlying /// data storage. pub fn new(db: T) -> Self { CheckpointDb { inner: Arc::new(RwLock::new(CheckpointDbInner::new(db))), } } /// Returns a handler to the database. The handler could be used to roll the database back /// without having the ownership to it. pub fn handler(&self) -> CheckpointDbHandler<T> { CheckpointDbHandler { handle: self.clone(), } } } impl<T> Clone for CheckpointDb<T> { fn clone(&self) -> Self { Self { inner: Arc::clone(&self.inner), } } } impl<T> std::fmt::Debug for CheckpointDb<T> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("CheckpointDb") .field("refs", &Arc::strong_count(&self.inner)) .finish() } } impl<T: Database> Database for CheckpointDb<T> { fn snapshot(&self) -> Box<dyn Snapshot> { self.inner .read() .expect("Cannot lock CheckpointDb for snapshot") .snapshot() } fn merge(&self, patch: Patch) -> StorageResult<()> { self.inner .write() .expect("Cannot lock CheckpointDb for merge") .merge(patch) } fn merge_sync(&self, patch: Patch) -> StorageResult<()> { self.merge(patch) } } impl<T: Database> From<CheckpointDb<T>> for Arc<dyn Database> { fn from(db: CheckpointDb<T>) -> Arc<dyn Database> { Arc::new(db) } } impl<T: Database> From<T> for CheckpointDb<T> { fn from(db: T) -> Self { CheckpointDb::new(db) } } /// Handler to a checkpointed database, which /// allows rollback of transactions. #[derive(Debug, Clone)] pub struct CheckpointDbHandler<T> { handle: CheckpointDb<T>, } impl<T: Database> CheckpointDbHandler<T> { /// Sets a checkpoint for a future [`rollback`](#method.rollback). pub fn checkpoint(&self) { self.handle .inner .write() .expect("Cannot lock checkpointDb for checkpoint") .checkpoint(); } /// Rolls back this database to the latest checkpoint /// set with [`checkpoint`](#method.checkpoint). /// /// # Panics /// /// - Panics if there are no available checkpoints. pub fn rollback(&self) { self.handle .inner .write() .expect("Cannot lock CheckpointDb for rollback") .rollback(); } /// Tries to unwrap this handler. pub fn try_unwrap(self) -> Result<T, Self> { let lock = Arc::try_unwrap(self.handle.inner).map_err(|inner| { eprintln!("strong: {}", Arc::strong_count(&inner)); Self { handle: CheckpointDb { inner }, } })?; let inner = lock.into_inner().expect("cannot unwrap `RwLock`"); Ok(inner.db) } /// Gets the underlying checkpoint database. pub fn into_inner(self) -> CheckpointDb<T> { self.handle } } #[derive(Debug)] struct CheckpointDbInner<T> { db: T, backup_stack: Vec<Vec<Patch>>, } impl<T: Database> CheckpointDbInner<T> { fn new(db: T) -> Self { CheckpointDbInner { db, backup_stack: Vec::new(), } } fn snapshot(&self) -> Box<dyn Snapshot> { self.db.snapshot() } fn merge(&mut self, patch: Patch) -> StorageResult<()> { if self.backup_stack.is_empty() { self.db.merge(patch) } else { self.merge_with_logging(patch) } } fn merge_with_logging(&mut self, patch: Patch) -> StorageResult<()> { // NB: make sure that **both** the db and the journal // are updated atomically. let backup_patch = self.db.merge_with_backup(patch)?; self.backup_stack .last_mut() .expect("`merge_with_logging` called before checkpoint has been set") .push(backup_patch); Ok(()) } fn checkpoint(&mut self) { self.backup_stack.push(Vec::new()) } fn rollback(&mut self) { assert!( !self.backup_stack.is_empty(), "Checkpoint has not been set yet" ); let changelog = self.backup_stack.pop().unwrap(); for patch in changelog.into_iter().rev() { self.db.merge(patch).expect("Cannot merge roll-back patch"); } } } #[cfg(test)] mod tests { use super::*; use exonum_merkledb::{access::AccessExt, TemporaryDB}; fn stack_len<T>(db: &CheckpointDb<T>) -> usize { let inner = db.inner.read().unwrap(); inner.backup_stack.len() } #[test] fn backup_stack_length() { let db = CheckpointDb::new(TemporaryDB::new()); let handler = db.handler(); assert_eq!(stack_len(&db), 0); handler.checkpoint(); assert_eq!(stack_len(&db), 1); handler.rollback(); assert_eq!(stack_len(&db), 0); handler.checkpoint(); handler.checkpoint(); assert_eq!(stack_len(&db), 2); handler.rollback(); assert_eq!(stack_len(&db), 1); } #[test] #[allow(clippy::cognitive_complexity)] fn interleaved_rollbacks() { let db = CheckpointDb::new(TemporaryDB::new()); let handler = db.handler(); let fork = db.fork(); fork.get_list("foo").push(1_u32); fork.get_list("bar").push("...".to_owned()); db.merge(fork.into_patch()).unwrap(); // Both checkpoints are on purpose. handler.checkpoint(); handler.checkpoint(); let fork = db.fork(); fork.get_list("foo").push(2_u32); fork.get_list("bar").set(0, "!".to_owned()); db.merge(fork.into_patch()).unwrap(); { let inner = db.inner.read().unwrap(); let stack = &inner.backup_stack; assert_eq!(stack.len(), 2); assert_eq!(stack[1].len(), 1); assert_eq!(stack[0].len(), 0); } let snapshot = db.snapshot(); assert_eq!(snapshot.get_list::<_, u32>("foo").len(), 2); assert_eq!( snapshot.get_list("foo").iter().collect::<Vec<u32>>(), vec![1, 2] ); assert_eq!(snapshot.get_list::<_, String>("bar").len(), 1); assert_eq!( snapshot.get_list::<_, String>("bar").get(0), Some("!".to_owned()) ); handler.rollback(); let snapshot = db.snapshot(); assert_eq!(snapshot.get_list::<_, u32>("foo").len(), 1); assert_eq!( snapshot.get_list("foo").iter().collect::<Vec<u32>>(), vec![1] ); assert_eq!(snapshot.get_list::<_, String>("bar").len(), 1); assert_eq!( snapshot.get_list::<_, String>("bar").get(0), Some("...".to_owned()) ); { let inner = db.inner.read().unwrap(); let stack = &inner.backup_stack; assert_eq!(stack.len(), 1); assert_eq!(stack[0].len(), 0); } // Check that DB continues working as usual after a rollback. handler.checkpoint(); let fork = db.fork(); fork.get_list("foo").push(3_u32); fork.get_list("bar") .extend(vec!["?".to_owned(), ".".to_owned()]); db.merge(fork.into_patch()).unwrap(); { let inner = db.inner.read().unwrap(); let stack = &inner.backup_stack; assert_eq!(stack.len(), 2); assert_eq!(stack[1].len(), 1); assert_eq!(stack[0].len(), 0); } let snapshot = db.snapshot(); assert_eq!(snapshot.get_list::<_, u32>("foo").len(), 2); assert_eq!(snapshot.get_list::<_, u32>("bar").len(), 3); let fork = db.fork(); fork.get_list("foo").push(4_u32); fork.get_list::<_, String>("bar").clear(); db.merge(fork.into_patch()).unwrap(); { let inner = db.inner.read().unwrap(); let stack = &inner.backup_stack; assert_eq!(stack.len(), 2); assert_eq!(stack[1].len(), 2); assert_eq!(stack[0].len(), 0); } let snapshot = db.snapshot(); assert_eq!(snapshot.get_list::<_, u32>("foo").len(), 3); assert_eq!( snapshot.get_list("foo").iter().collect::<Vec<u32>>(), vec![1, 3, 4] ); assert!(snapshot.get_list::<_, String>("bar").is_empty()); handler.rollback(); { let inner = db.inner.read().unwrap(); let stack = &inner.backup_stack; assert_eq!(stack.len(), 1); assert_eq!(stack[0].len(), 0); } let snapshot = db.snapshot(); assert_eq!(snapshot.get_list::<_, u32>("foo").len(), 1); assert_eq!( snapshot.get_list("foo").iter().collect::<Vec<u32>>(), vec![1] ); assert_eq!(snapshot.get_list::<_, String>("bar").len(), 1); assert_eq!( snapshot.get_list("bar").iter().collect::<Vec<String>>(), vec!["...".to_owned()] ); handler.rollback(); { let inner = db.inner.read().unwrap(); let stack = &inner.backup_stack; assert_eq!(stack.len(), 0); } } #[test] fn rollback_via_handler() { let db = CheckpointDb::new(TemporaryDB::new()); let handler = db.handler(); handler.checkpoint(); let fork = db.fork(); fork.get_entry("foo").set(42_u32); db.merge(fork.into_patch()).unwrap(); let snapshot = db.snapshot(); assert_eq!(snapshot.get_entry::<_, u32>("foo").get(), Some(42)); handler.rollback(); let snapshot = db.snapshot(); assert!(!snapshot.get_entry::<_, u32>("foo").exists()); } #[test] #[should_panic(expected = "Checkpoint has not been set yet")] fn extra_rollback() { let db = CheckpointDb::new(TemporaryDB::new()); let handler = db.handler(); handler.checkpoint(); handler.checkpoint(); handler.rollback(); handler.rollback(); handler.rollback(); } }
30.248663
94
0.54928
e95bfb294afa01d22e7fa30725db03fc64750cc7
6,009
use std::{ env, ffi::OsStr, fs, io::{self, BufReader, BufWriter, ErrorKind, Read, Write}, path::{Path, PathBuf}, str, }; use async_trait::async_trait; use tokio::task::spawn_blocking; use uuid::Uuid; use crate::{pairing::Pairing, storage::Storage, Config, Error, Result}; /// `FileStorage` is an implementor of the `Storage` trait that stores data to the file system. #[derive(Debug)] pub struct FileStorage { dir_path: PathBuf, } impl FileStorage { /// Creates a new `FileStorage`. pub async fn new<D: AsRef<OsStr> + ?Sized>(dir: &D) -> Result<Self> { let dir_path = Path::new(dir).to_path_buf(); let dir_path = spawn_blocking(move || -> Result<PathBuf> { fs::create_dir_all(&dir_path)?; Ok(dir_path) }) .await??; Ok(FileStorage { dir_path }) } /// Creates a new `FileStorage` with the current directory as storage path. pub async fn current_dir() -> Result<Self> { let current_dir = spawn_blocking(move || -> Result<PathBuf> { env::current_dir().map_err(Error::from) }).await??; let current_dir = current_dir.to_str().expect("couldn't stringify current_dir"); let data_path = format!("{}/data", current_dir); Self::new(&data_path).await } fn path_to_file(&self, file: &str) -> PathBuf { let mut file_path = self.dir_path.clone(); file_path.push(file); file_path } async fn get_reader(&self, file: &str) -> Result<BufReader<fs::File>> { let file_path = self.path_to_file(file); let reader = spawn_blocking(move || -> Result<BufReader<fs::File>> { let file = fs::OpenOptions::new().read(true).open(file_path)?; let reader = BufReader::new(file); Ok(reader) }) .await??; Ok(reader) } async fn get_writer(&self, file: &str) -> Result<BufWriter<fs::File>> { let file_path = self.path_to_file(file); let writer = spawn_blocking(move || -> Result<BufWriter<fs::File>> { let file = fs::OpenOptions::new().write(true).create(true).open(file_path)?; let writer = BufWriter::new(file); Ok(writer) }) .await??; Ok(writer) } async fn read_bytes(&self, key: &str) -> Result<Vec<u8>> { let mut reader = self.get_reader(key).await?; let value = spawn_blocking(move || -> Result<Vec<u8>> { let mut value = Vec::new(); reader.read_to_end(&mut value)?; Ok(value) }) .await??; Ok(value) } async fn write_bytes(&self, key: &str, value: Vec<u8>) -> Result<()> { let mut writer = self.get_writer(key).await?; spawn_blocking(move || -> Result<()> { writer.write_all(&value)?; Ok(()) }) .await??; Ok(()) } async fn remove_file(&self, key: &str) -> Result<()> { let file_path = self.path_to_file(key); spawn_blocking(move || -> Result<()> { fs::remove_file(file_path)?; Ok(()) }) .await??; Ok(()) } async fn keys_with_suffix(&self, suffix: &'static str) -> Result<Vec<String>> { let dir_path = self.dir_path.clone(); let extension = Some(OsStr::new(suffix)); let keys = spawn_blocking(move || -> Result<Vec<String>> { let mut keys = Vec::new(); for entry in fs::read_dir(&dir_path)? { let entry = entry?; let path = entry.path(); if path.extension() == extension { let key = path .file_stem() .ok_or(Error::from(io::Error::from(ErrorKind::NotFound)))? .to_os_string() .into_string() .or(Err(Error::from(io::Error::from(ErrorKind::NotFound))))?; keys.push(key); } } Ok(keys) }) .await??; Ok(keys) } } #[async_trait] impl Storage for FileStorage { async fn load_config(&self) -> Result<Config> { let config_bytes = self.read_bytes("config.json").await?; let config = serde_json::from_slice(&config_bytes)?; Ok(config) } async fn save_config(&mut self, config: &Config) -> Result<()> { let config_bytes = serde_json::to_vec(&config)?; self.write_bytes("config.json", config_bytes).await } async fn delete_config(&mut self) -> Result<()> { let key = format!("config.json"); self.remove_file(&key).await } async fn load_pairing(&self, id: &Uuid) -> Result<Pairing> { let key = format!("{}.json", id.to_string()); let pairing_bytes = self.read_bytes(&key).await?; Pairing::from_bytes(&pairing_bytes) } async fn save_pairing(&mut self, pairing: &Pairing) -> Result<()> { let key = format!("{}.json", pairing.id.to_string()); let pairing_bytes = pairing.as_bytes()?; self.write_bytes(&key, pairing_bytes).await } async fn delete_pairing(&mut self, id: &Uuid) -> Result<()> { let key = format!("{}.json", id.to_string()); self.remove_file(&key).await } async fn list_pairings(&self) -> Result<Vec<Pairing>> { let mut pairings = Vec::new(); for key in self.keys_with_suffix("json").await? { if &key != "config" { let pairing_bytes = self.read_bytes(&key).await?; let pairing = Pairing::from_bytes(&pairing_bytes)?; pairings.push(pairing); } } Ok(pairings) } async fn count_pairings(&self) -> Result<usize> { let mut count = 0; for key in self.keys_with_suffix("json").await? { if &key != "device" { count += 1; } } Ok(count) } }
29.747525
107
0.539191
2339dda38ca8cf7a956032fe2609e02938fdf339
20,856
// Copyright 2018 MaidSafe.net limited. // // This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT // http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD // https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied, // modified, or distributed except according to those terms. Please review the Licences for the // specific language governing permissions and limitations relating to use of the SAFE Network // Software. //! # Least Recently Used (LRU) Cache //! //! Implementation of a Least Recently Used //! [caching algorithm](http://en.wikipedia.org/wiki/Cache_algorithms) in a container which may be //! limited by size or time, ordered by most recently seen. //! //! # Examples //! //! ``` //! extern crate lru_time_cache; //! use lru_time_cache::LruCache; //! //! # fn main() { //! // Construct an `LruCache` of `<u8, String>`s, limited by key count //! let max_count = 10; //! let _lru_cache = LruCache::<u8, String>::with_capacity(max_count); //! //! // Construct an `LruCache` of `<String, i64>`s, limited by expiry time //! let time_to_live = ::std::time::Duration::from_millis(100); //! let _lru_cache = LruCache::<String, i64>::with_expiry_duration(time_to_live); //! //! // Construct an `LruCache` of `<u64, Vec<u8>>`s, limited by key count and expiry time //! let _lru_cache = LruCache::<u64, Vec<u8>>::with_expiry_duration_and_capacity(time_to_live, //! max_count); //! # } //! ``` #![doc( html_logo_url = "https://raw.githubusercontent.com/maidsafe/QA/master/Images/maidsafe_logo.png", html_favicon_url = "https://maidsafe.net/img/favicon.ico", html_root_url = "https://docs.rs/lru_time_cache" )] // For explanation of lint checks, run `rustc -W help` or see // https://github.com/maidsafe/QA/blob/master/Documentation/Rust%20Lint%20Checks.md #![forbid( bad_style, exceeding_bitshifts, mutable_transmutes, no_mangle_const_items, unknown_crate_types, warnings )] #![deny( deprecated, improper_ctypes, missing_docs, non_shorthand_field_patterns, overflowing_literals, plugin_as_library, private_no_mangle_fns, private_no_mangle_statics, stable_features, unconditional_recursion, unknown_lints, unsafe_code, unused, unused_allocation, unused_attributes, unused_comparisons, unused_features, unused_parens, while_true )] #![warn( trivial_casts, trivial_numeric_casts, unused_extern_crates, unused_import_braces, unused_qualifications, unused_results )] #![allow( box_pointers, missing_copy_implementations, missing_debug_implementations, variant_size_differences )] #[cfg(feature = "fake_clock")] extern crate fake_clock; #[cfg(test)] extern crate rand; #[cfg(feature = "fake_clock")] use fake_clock::FakeClock as Instant; use std::borrow::Borrow; use std::collections::{btree_map, BTreeMap, VecDeque}; use std::time::Duration; #[cfg(not(feature = "fake_clock"))] use std::time::Instant; use std::usize; /// A view into a single entry in an LRU cache, which may either be vacant or occupied. pub enum Entry<'a, Key: 'a, Value: 'a> { /// A vacant Entry Vacant(VacantEntry<'a, Key, Value>), /// An occupied Entry Occupied(OccupiedEntry<'a, Value>), } /// A vacant Entry. pub struct VacantEntry<'a, Key: 'a, Value: 'a> { key: Key, cache: &'a mut LruCache<Key, Value>, } /// An occupied Entry. pub struct OccupiedEntry<'a, Value: 'a> { value: &'a mut Value, } /// An iterator over an `LruCache`'s entries that updates the timestamps as values are traversed. pub struct Iter<'a, Key: 'a, Value: 'a> { map_iter_mut: btree_map::IterMut<'a, Key, (Value, Instant)>, list: &'a mut VecDeque<Key>, has_expiry: bool, lru_cache_ttl: Duration, } impl<'a, Key, Value> Iterator for Iter<'a, Key, Value> where Key: Ord + Clone, { type Item = (&'a Key, &'a Value); #[cfg_attr(feature = "cargo-clippy", allow(while_let_on_iterator))] fn next(&mut self) -> Option<(&'a Key, &'a Value)> { let now = Instant::now(); while let Some((key, &mut (ref value, ref mut instant))) = self.map_iter_mut.next() { if !self.has_expiry || *instant + self.lru_cache_ttl > now { LruCache::<Key, Value>::update_key(self.list, key); *instant = now; return Some((key, value)); } } None } } /// An iterator over an `LruCache`'s entries that does not modify the timestamp. pub struct PeekIter<'a, Key: 'a, Value: 'a> { map_iter: btree_map::Iter<'a, Key, (Value, Instant)>, lru_cache: &'a LruCache<Key, Value>, } impl<'a, Key, Value> Iterator for PeekIter<'a, Key, Value> where Key: Ord + Clone, { type Item = (&'a Key, &'a Value); #[cfg_attr(feature = "cargo-clippy", allow(while_let_on_iterator))] fn next(&mut self) -> Option<(&'a Key, &'a Value)> { while let Some((key, &(ref value, _))) = self.map_iter.next() { if !self.lru_cache.expired(key) { return Some((key, value)); } } None } } /// Implementation of [LRU cache](index.html#least-recently-used-lru-cache). pub struct LruCache<Key, Value> { map: BTreeMap<Key, (Value, Instant)>, list: VecDeque<Key>, capacity: usize, time_to_live: Duration, } impl<Key, Value> LruCache<Key, Value> where Key: Ord + Clone, { /// Constructor for capacity based `LruCache`. pub fn with_capacity(capacity: usize) -> LruCache<Key, Value> { LruCache { map: BTreeMap::new(), list: VecDeque::new(), capacity, time_to_live: Duration::new(std::u64::MAX, 999_999_999), } } /// Constructor for time based `LruCache`. pub fn with_expiry_duration(time_to_live: Duration) -> LruCache<Key, Value> { LruCache { map: BTreeMap::new(), list: VecDeque::new(), capacity: usize::MAX, time_to_live, } } /// Constructor for dual-feature capacity and time based `LruCache`. pub fn with_expiry_duration_and_capacity( time_to_live: Duration, capacity: usize, ) -> LruCache<Key, Value> { LruCache { map: BTreeMap::new(), list: VecDeque::new(), capacity, time_to_live, } } /// Inserts a key-value pair into the cache. /// /// If the key already existed in the cache, the existing value is returned and overwritten in /// the cache. Otherwise, the key-value pair is inserted and `None` is returned. pub fn insert(&mut self, key: Key, value: Value) -> Option<Value> { if self.map.contains_key(&key) { Self::update_key(&mut self.list, &key); } else { while self.check_time_expired() || self.map.len() == self.capacity { self.remove_oldest_element(); } self.list.push_back(key.clone()); } self.map .insert(key, (value, Instant::now())) .map(|pair| pair.0) } /// Removes a key-value pair from the cache. pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<Value> where Key: Borrow<Q>, Q: Ord, { self.list .retain(|k| *k.borrow() < *key || *k.borrow() > *key); self.map.remove(key).map(|(value, _)| value) } /// Clears the `LruCache`, removing all values. pub fn clear(&mut self) { self.map.clear(); self.list.clear(); } /// Retrieves a reference to the value stored under `key`, or `None` if the key doesn't exist. /// Also removes expired elements and updates the time. pub fn get<Q: ?Sized>(&mut self, key: &Q) -> Option<&Value> where Key: Borrow<Q>, Q: Ord, { self.remove_expired(); let list = &mut self.list; self.map.get_mut(key).map(|result| { Self::update_key(list, key); result.1 = Instant::now(); &result.0 }) } /// Returns a reference to the value with the given `key`, if present and not expired, without /// updating the timestamp. pub fn peek<Q: ?Sized>(&self, key: &Q) -> Option<&Value> where Key: Borrow<Q>, Q: Ord, { if self.expired(key) { return None; } self.map.get(key).map(|&(ref value, _)| value) } /// Retrieves a mutable reference to the value stored under `key`, or `None` if the key doesn't /// exist. Also removes expired elements and updates the time. pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut Value> where Key: Borrow<Q>, Q: Ord, { self.remove_expired(); let list = &mut self.list; self.map.get_mut(key).map(|result| { Self::update_key(list, key); result.1 = Instant::now(); &mut result.0 }) } /// Returns whether `key` exists in the cache or not. pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool where Key: Borrow<Q>, Q: Ord, { self.map.contains_key(key) && !self.expired(key) } /// Returns the size of the cache, i.e. the number of cached non-expired key-value pairs. pub fn len(&self) -> usize { self.map.len() - self.list.iter().take_while(|key| self.expired(key)).count() } /// Returns `true` if there are no non-expired entries in the cache. pub fn is_empty(&self) -> bool { self.list.iter().all(|key| self.expired(key)) } /// Gets the given key's corresponding entry in the map for in-place manipulation. pub fn entry(&mut self, key: Key) -> Entry<Key, Value> { // We need to do it the ugly way below due to this issue: // https://github.com/rust-lang/rfcs/issues/811 // match self.get_mut(&key) { // Some(value) => Entry::Occupied(OccupiedEntry{value: value}), // None => Entry::Vacant(VacantEntry{key: key, cache: self}), // } if self.contains_key(&key) { Entry::Occupied(OccupiedEntry { value: self.get_mut(&key).expect("key not found"), }) } else { Entry::Vacant(VacantEntry { key, cache: self }) } } /// Returns an iterator over all entries that updates the timestamps as values are /// traversed. Also removes expired elements before creating the iterator. pub fn iter(&mut self) -> Iter<Key, Value> { self.remove_expired(); let has_expiry = self.has_expiry(); Iter { map_iter_mut: self.map.iter_mut(), list: &mut self.list, has_expiry, lru_cache_ttl: self.time_to_live, } } /// Returns an iterator over all entries that does not modify the timestamps. pub fn peek_iter(&self) -> PeekIter<Key, Value> { PeekIter { map_iter: self.map.iter(), lru_cache: self, } } fn has_expiry(&self) -> bool { self.time_to_live != Duration::new(std::u64::MAX, 999_999_999) } fn expired<Q: ?Sized>(&self, key: &Q) -> bool where Key: Borrow<Q>, Q: Ord, { let now = Instant::now(); self.has_expiry() && self .map .get(key) .map_or(false, |v| v.1 + self.time_to_live < now) } fn remove_oldest_element(&mut self) { let _ = self .list .pop_front() .map(|key| assert!(self.map.remove(&key).is_some())); } fn check_time_expired(&self) -> bool { self.has_expiry() && self.list.front().map_or(false, |key| self.expired(key)) } // Move `key` in the ordered list to the last fn update_key<Q: ?Sized>(list: &mut VecDeque<Key>, key: &Q) where Key: Borrow<Q>, Q: Ord, { if let Some(pos) = list.iter().position(|k| k.borrow() == key) { let k = list.remove(pos).unwrap(); list.push_back(k); } } fn remove_expired(&mut self) { while self.check_time_expired() { self.remove_oldest_element(); } } } impl<Key, Value> Clone for LruCache<Key, Value> where Key: Clone, Value: Clone, { fn clone(&self) -> LruCache<Key, Value> { LruCache { map: self.map.clone(), list: self.list.clone(), capacity: self.capacity, time_to_live: self.time_to_live, } } } impl<'a, Key: Ord + Clone, Value> VacantEntry<'a, Key, Value> { /// Inserts a value pub fn insert(self, value: Value) -> &'a mut Value { let _ = self.cache.insert(self.key.clone(), value); self.cache.get_mut(&self.key).expect("key not found") } } impl<'a, Value> OccupiedEntry<'a, Value> { /// Converts the entry into a mutable reference to its value. pub fn into_mut(self) -> &'a mut Value { self.value } } impl<'a, Key: Ord + Clone, Value> Entry<'a, Key, Value> { /// Ensures a value is in the entry by inserting the default if empty, and returns /// a mutable reference to the value in the entry. pub fn or_insert(self, default: Value) -> &'a mut Value { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(default), } } /// Ensures a value is in the entry by inserting the result of the default function if empty, /// and returns a mutable reference to the value in the entry. pub fn or_insert_with<F: FnOnce() -> Value>(self, default: F) -> &'a mut Value { match self { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(default()), } } } #[cfg(test)] mod test { use rand; use std::time::Duration; #[cfg(feature = "fake_clock")] fn sleep(time: u64) { use fake_clock::FakeClock; FakeClock::advance_time(time); } #[cfg(not(feature = "fake_clock"))] fn sleep(time: u64) { use std::thread; thread::sleep(Duration::from_millis(time)); } fn generate_random_vec<T>(len: usize) -> Vec<T> where T: rand::Rand, { let mut vec = Vec::<T>::with_capacity(len); for _ in 0..len { vec.push(rand::random()); } vec } #[test] fn size_only() { let size = 10usize; let mut lru_cache = super::LruCache::<usize, usize>::with_capacity(size); for i in 0..10 { assert_eq!(lru_cache.len(), i); let _ = lru_cache.insert(i, i); assert_eq!(lru_cache.len(), i + 1); } for i in 10..1000 { let _ = lru_cache.insert(i, i); assert_eq!(lru_cache.len(), size); } for _ in (0..1000).rev() { assert!(lru_cache.contains_key(&(1000 - 1))); assert!(lru_cache.get(&(1000 - 1)).is_some()); assert_eq!(*lru_cache.get(&(1000 - 1)).unwrap(), 1000 - 1); } } #[test] fn time_only() { let time_to_live = Duration::from_millis(100); let mut lru_cache = super::LruCache::<usize, usize>::with_expiry_duration(time_to_live); for i in 0..10 { assert_eq!(lru_cache.len(), i); let _ = lru_cache.insert(i, i); assert_eq!(lru_cache.len(), i + 1); } sleep(101); let _ = lru_cache.insert(11, 11); assert_eq!(lru_cache.len(), 1); for i in 0..10 { assert!(!lru_cache.is_empty()); assert_eq!(lru_cache.len(), i + 1); let _ = lru_cache.insert(i, i); assert_eq!(lru_cache.len(), i + 2); } sleep(101); assert_eq!(0, lru_cache.len()); assert!(lru_cache.is_empty()); } #[test] fn time_only_check() { let time_to_live = Duration::from_millis(50); let mut lru_cache = super::LruCache::<usize, usize>::with_expiry_duration(time_to_live); assert_eq!(lru_cache.len(), 0); let _ = lru_cache.insert(0, 0); assert_eq!(lru_cache.len(), 1); sleep(101); assert!(!lru_cache.contains_key(&0)); assert_eq!(lru_cache.len(), 0); } #[test] fn time_and_size() { let size = 10usize; let time_to_live = Duration::from_millis(100); let mut lru_cache = super::LruCache::<usize, usize>::with_expiry_duration_and_capacity(time_to_live, size); for i in 0..1000 { if i < size { assert_eq!(lru_cache.len(), i); } let _ = lru_cache.insert(i, i); if i < size { assert_eq!(lru_cache.len(), i + 1); } else { assert_eq!(lru_cache.len(), size); } } sleep(101); let _ = lru_cache.insert(1, 1); assert_eq!(lru_cache.len(), 1); } #[derive(PartialEq, PartialOrd, Ord, Clone, Eq)] struct Temp { id: Vec<u8>, } #[test] fn time_size_struct_value() { let size = 100usize; let time_to_live = Duration::from_millis(100); let mut lru_cache = super::LruCache::<Temp, usize>::with_expiry_duration_and_capacity(time_to_live, size); for i in 0..1000 { if i < size { assert_eq!(lru_cache.len(), i); } let _ = lru_cache.insert( Temp { id: generate_random_vec::<u8>(64), }, i, ); if i < size { assert_eq!(lru_cache.len(), i + 1); } else { assert_eq!(lru_cache.len(), size); } } sleep(101); let _ = lru_cache.insert( Temp { id: generate_random_vec::<u8>(64), }, 1, ); assert_eq!(lru_cache.len(), 1); } #[test] fn iter() { let mut lru_cache = super::LruCache::<usize, usize>::with_capacity(3); let _ = lru_cache.insert(0, 0); sleep(1); let _ = lru_cache.insert(1, 1); sleep(1); let _ = lru_cache.insert(2, 2); sleep(1); assert_eq!( vec![(&0, &0), (&1, &1), (&2, &2)], lru_cache.iter().collect::<Vec<_>>() ); let initial_instant0 = lru_cache.map[&0].1; let initial_instant2 = lru_cache.map[&2].1; sleep(1); // only the first two entries should have their timestamp updated (and position in list) let _ = lru_cache.iter().take(2).all(|_| true); assert_ne!(lru_cache.map[&0].1, initial_instant0); assert_eq!(lru_cache.map[&2].1, initial_instant2); assert_eq!(*lru_cache.list.front().unwrap(), 2); assert_eq!(*lru_cache.list.back().unwrap(), 1); } #[test] fn peek_iter() { let time_to_live = Duration::from_millis(500); let mut lru_cache = super::LruCache::<usize, usize>::with_expiry_duration(time_to_live); let _ = lru_cache.insert(0, 0); let _ = lru_cache.insert(2, 2); let _ = lru_cache.insert(3, 3); sleep(300); assert_eq!( vec![(&0, &0), (&2, &2), (&3, &3)], lru_cache.peek_iter().collect::<Vec<_>>() ); assert_eq!(Some(&2), lru_cache.get(&2)); let _ = lru_cache.insert(1, 1); let _ = lru_cache.insert(4, 4); sleep(300); assert_eq!( vec![(&1, &1), (&2, &2), (&4, &4)], lru_cache.peek_iter().collect::<Vec<_>>() ); sleep(300); assert!(lru_cache.is_empty()); } #[test] fn update_time_check() { let time_to_live = Duration::from_millis(500); let mut lru_cache = super::LruCache::<usize, usize>::with_expiry_duration(time_to_live); assert_eq!(lru_cache.len(), 0); let _ = lru_cache.insert(0, 0); assert_eq!(lru_cache.len(), 1); sleep(300); assert_eq!(Some(&0), lru_cache.get(&0)); sleep(300); assert_eq!(Some(&0), lru_cache.peek(&0)); sleep(300); assert_eq!(None, lru_cache.peek(&0)); } #[test] fn deref_coercions() { let mut lru_cache = super::LruCache::<String, usize>::with_capacity(1); let _ = lru_cache.insert("foo".to_string(), 0); assert_eq!(true, lru_cache.contains_key("foo")); assert_eq!(Some(&0), lru_cache.get("foo")); assert_eq!(Some(&mut 0), lru_cache.get_mut("foo")); assert_eq!(Some(&0), lru_cache.peek("foo")); assert_eq!(Some(0), lru_cache.remove("foo")); } }
30.625551
100
0.566216
d6d82d6300279ba48eb57762dde90bf49b0bd17b
51
extern crate cursive; pub mod matrix; pub mod ui;
10.2
21
0.745098
2fc439c5ac6e00c906020ba872cb9e5517585cf7
4,185
use crate::pattern::PatternId; use siko_constants::BuiltinOperator; use siko_location_info::location_id::LocationId; use siko_util::format_list; use std::fmt; #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd)] pub struct ExprId { pub id: usize, } impl fmt::Display for ExprId { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "#{}", self.id) } } impl From<usize> for ExprId { fn from(id: usize) -> ExprId { ExprId { id: id } } } #[derive(Debug, Clone)] pub struct Case { pub pattern_id: PatternId, pub body: ExprId, } impl fmt::Display for Case { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} -> {}", self.pattern_id, self.body) } } #[derive(Debug, Clone)] pub struct RecordConstructionItem { pub field_name: String, pub body: ExprId, pub location_id: LocationId, } impl fmt::Display for RecordConstructionItem { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} -> {}", self.field_name, self.body) } } #[derive(Debug, Clone)] pub enum Expr { Lambda(Vec<(String, LocationId)>, ExprId), FunctionCall(ExprId, Vec<ExprId>), Builtin(BuiltinOperator), If(ExprId, ExprId, ExprId), Tuple(Vec<ExprId>), List(Vec<ExprId>), Path(String), IntegerLiteral(i64), FloatLiteral(f64), StringLiteral(String), CharLiteral(char), Do(Vec<ExprId>), Bind(PatternId, ExprId), FieldAccess(String, ExprId), TupleFieldAccess(usize, ExprId), Formatter(String, Vec<ExprId>), CaseOf(ExprId, Vec<Case>), RecordInitialization(String, Vec<RecordConstructionItem>), RecordUpdate(String, Vec<RecordConstructionItem>), Return(ExprId), Loop(PatternId, ExprId, Vec<ExprId>), Continue(ExprId), Break(ExprId), } impl fmt::Display for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { Expr::Lambda(args, body) => { let args: Vec<_> = args.iter().map(|arg| &arg.0).collect(); write!(f, "Lambda({}, {})", format_list(&args[..]), body) } Expr::FunctionCall(expr, args) => { write!(f, "FunctionCall({}, {})", expr, format_list(args)) } Expr::Builtin(op) => write!(f, "Op({:?})", op), Expr::If(cond, true_branch, false_branch) => { write!(f, "If({}, {}, {})", cond, true_branch, false_branch) } Expr::Tuple(items) => write!(f, "Tuple({})", format_list(items)), Expr::List(items) => write!(f, "[{}]", format_list(items)), Expr::Path(path) => write!(f, "Path({})", path), Expr::IntegerLiteral(v) => write!(f, "Integer({})", v), Expr::FloatLiteral(v) => write!(f, "Float({})", v), Expr::StringLiteral(v) => write!(f, "String({})", v), Expr::CharLiteral(v) => write!(f, "Char({})", v), Expr::Do(items) => write!(f, "Do({})", format_list(items)), Expr::Bind(t, expr) => write!(f, "Bind({}, {})", t, expr), Expr::FieldAccess(name, expr) => write!(f, "FieldAccess({}, {})", name, expr), Expr::TupleFieldAccess(index, expr) => { write!(f, "TupleFieldAccess({}, {})", index, expr) } Expr::Formatter(fmt, items) => write!(f, "Formatter({}, {})", fmt, format_list(items)), Expr::CaseOf(body, cases) => write!(f, "CaseOf({}, {})", body, format_list(cases)), Expr::RecordInitialization(name, items) => { write!(f, "RecordInitialization({}, {})", name, format_list(items)) } Expr::RecordUpdate(name, items) => { write!(f, "RecordUpdate({}, {})", name, format_list(items)) } Expr::Return(expr) => write!(f, "Return({})", expr), Expr::Loop(pattern, start, block) => { write!(f, "Loop({}, {}, {})", pattern, start, format_list(block)) } Expr::Continue(expr) => write!(f, "Continue({})", expr), Expr::Break(expr) => write!(f, "Break({})", expr), } } }
34.586777
99
0.544564
9b4782cd49ea299acecf2bd3ea12e5f7b0654105
23,359
// Copyright (c) 2016-2017 Chef Software Inc. and/or applicable contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Error handling for the Supervisor. //! //! Errors in the Supervisor are of the type `SupError`, which contains an `Error` along with //! information about where the error was created in the code base, in the same way that the //! `output` module does. To simplify the creation of these annotated errors, we provide the //! `sup_error!` macro, which takes only an `Error` as its argument. //! //! To match on `Error`, do something like this: //! //! ```ignore //! let error = sup_error!(Error::CommandNotImplemented); //! let result = match error { //! SupError{err: Error::CommandNotImplemented, ..} => true, //! _ => false //! }; //! assert_eq!(result, true); //! ``` //! //! When printing errors, we automatically create a `StructuredOutput` with the `verbose` flag set, //! ensuring that you can see the file, line number, and column it was created from. //! //! Also included in this module is `Result<T>`, a type alias for `Result<T, SupError>`. Use //! it instead of the longer `Result` form. use std::io; use std::env; use std::error; use std::ffi; use std::fmt; use std::net; use std::path::PathBuf; use std::result; use std::str; use std::string; use std::sync::mpsc; use butterfly; use common; use depot_client; use glob; use handlebars; use hcore; use hcore::os::process::Pid; use hcore::output::StructuredOutput; use hcore::package::{self, Identifiable, PackageInstall}; use launcher_client; use notify; use serde_json; use toml; use PROGRAM_NAME; static LOGKEY: &'static str = "ER"; /// Our result type alias, for easy coding. pub type Result<T> = result::Result<T, SupError>; #[derive(Debug)] /// All errors in the Supervisor are kept in this struct. We store `Error`, an enum with a variant /// for every type of error we produce. It also stores the location the error was created. pub struct SupError { pub err: Error, logkey: &'static str, file: &'static str, line: u32, column: u32, } impl SupError { /// Create a new `SupError`. Usually accessed through the `sup_error!` macro, rather than /// called directly. pub fn new( err: Error, logkey: &'static str, file: &'static str, line: u32, column: u32, ) -> SupError { SupError { err: err, logkey: logkey, file: file, line: line, column: column, } } } /// All the kinds of errors we produce. #[derive(Debug)] pub enum Error { Departed, BadCompositesPath(PathBuf, io::Error), BadDataFile(PathBuf, io::Error), BadDataPath(PathBuf, io::Error), BadDesiredState(String), BadElectionStatus(String), BadPackage(PackageInstall, hcore::error::Error), BadSpecsPath(PathBuf, io::Error), BadStartStyle(String), BadEnvConfig(String), ButterflyError(butterfly::error::Error), DepotClient(depot_client::Error), EnvJoinPathsError(env::JoinPathsError), ExecCommandNotFound(String), FileNotFound(String), FileWatcherFileIsRoot, GroupNotFound(String), HabitatCommon(common::Error), HabitatCore(hcore::Error), TemplateFileError(handlebars::TemplateFileError), TemplateRenderError(handlebars::RenderError), InvalidBinding(String), InvalidBinds(Vec<String>), InvalidCompositeBinding(String), InvalidKeyParameter(String), InvalidPidFile, InvalidTopology(String), InvalidUpdateStrategy(String), Io(io::Error), IPFailed, Launcher(launcher_client::Error), MissingRequiredBind(Vec<String>), MissingRequiredIdent, NameLookup(io::Error), NetParseError(net::AddrParseError), NoLauncher, NotifyCreateError(notify::Error), NotifyError(notify::Error), NulError(ffi::NulError), PackageNotFound(package::PackageIdent), Permissions(String), PidFileCorrupt(PathBuf), PidFileIO(PathBuf, io::Error), ProcessLockCorrupt, ProcessLocked(Pid), ProcessLockIO(PathBuf, io::Error), RecvError(mpsc::RecvError), RenderContextSerialization(serde_json::Error), ServiceDeserializationError(serde_json::Error), ServiceLoaded(package::PackageIdent), ServiceNotLoaded(package::PackageIdent), ServiceSerializationError(serde_json::Error), ServiceSpecFileIO(PathBuf, io::Error), ServiceSpecParse(toml::de::Error), ServiceSpecRender(toml::ser::Error), SignalFailed, SpecWatcherDirNotFound(String), SpecWatcherGlob(glob::PatternError), StrFromUtf8Error(str::Utf8Error), StringFromUtf8Error(string::FromUtf8Error), TomlEncode(toml::ser::Error), TomlMergeError(String), TomlParser(toml::de::Error), TryRecvError(mpsc::TryRecvError), UnpackFailed, UserNotFound(String), } impl fmt::Display for SupError { // We create a string for each type of error, then create a `StructuredOutput` for it, flip // verbose on, and print it. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let content = match self.err { Error::BadCompositesPath(ref path, ref err) => { format!( "Unable to create the composites directory '{}' ({})", path.display(), err ) } Error::Departed => { format!( "This Supervisor has been manually departed.\n\nFor the safety of the system, this Supervisor cannot be started (if we did, we would risk the services on this machine behaving badly without our knowledge.) If you know that the services on this system are safe, and want them to rejoin the habitat ring, you need to:\n\n rm -rf /hab/sup/default/MEMBER_ID /hab/sup/default/data\n\nThis will cause the Supervisor to join the ring as a new member.\n\nIf you are in doubt, it is better to consider the services managed by this Supervisor as unsafe to run." ) } Error::BadDataFile(ref path, ref err) => { format!( "Unable to read or write to data file, {}, {}", path.display(), err ) } Error::BadDataPath(ref path, ref err) => { format!( "Unable to read or write to data directory, {}, {}", path.display(), err ) } Error::BadDesiredState(ref state) => { format!("Unknown service desired state style '{}'", state) } Error::BadElectionStatus(ref status) => format!("Unknown election status '{}'", status), Error::BadPackage(ref pkg, ref err) => format!("Bad package, {}, {}", pkg, err), Error::BadSpecsPath(ref path, ref err) => { format!( "Unable to create the specs directory '{}' ({})", path.display(), err ) } Error::BadStartStyle(ref style) => format!("Unknown service start style '{}'", style), Error::BadEnvConfig(ref varname) => { format!("Unable to find valid TOML or JSON in {} ENVVAR", varname) } Error::ButterflyError(ref err) => format!("Butterfly error: {}", err), Error::ExecCommandNotFound(ref c) => { format!("`{}' was not found on the filesystem or in PATH", c) } Error::Permissions(ref err) => format!("{}", err), Error::HabitatCommon(ref err) => format!("{}", err), Error::HabitatCore(ref err) => format!("{}", err), Error::TemplateFileError(ref err) => format!("{:?}", err), Error::TemplateRenderError(ref err) => format!("{}", err), Error::DepotClient(ref err) => format!("{}", err), Error::EnvJoinPathsError(ref err) => format!("{}", err), Error::FileNotFound(ref e) => format!("File not found at: {}", e), Error::FileWatcherFileIsRoot => format!("Watched file is root"), Error::GroupNotFound(ref e) => format!("No GID for group '{}' could be found", e), Error::InvalidBinding(ref binding) => { format!( "Invalid binding \"{}\", must be of the form <NAME>:<SERVICE_GROUP> where \ <NAME> is a service name and <SERVICE_GROUP> is a valid service group", binding ) } Error::InvalidBinds(ref e) => format!("Invalid bind(s), {}", e.join(", ")), Error::InvalidCompositeBinding(ref binding) => { format!( "Invalid binding \"{}\", must be of the form <SERVICE_NAME>:<NAME>:<SERVICE_GROUP> where \ <SERVICE_NAME> is the name of a service within the composite, <NAME> is a bind name for \ that service, and <SERVICE_GROUP> is a valid service group", binding ) } Error::InvalidKeyParameter(ref e) => { format!("Invalid parameter for key generation: {:?}", e) } Error::InvalidPidFile => format!("Invalid child process PID file"), Error::InvalidTopology(ref t) => format!("Invalid topology: {}", t), Error::InvalidUpdateStrategy(ref s) => format!("Invalid update strategy: {}", s), Error::Io(ref err) => format!("{}", err), Error::IPFailed => format!("Failed to discover this hosts outbound IP address"), Error::Launcher(ref err) => format!("{}", err), Error::MissingRequiredBind(ref e) => { format!("Missing required bind(s), {}", e.join(", ")) } Error::MissingRequiredIdent => { format!("Missing required ident field: (example: ident = \"core/redis\")") } Error::NameLookup(ref e) => format!("Error resolving a name or IP address: {}", e), Error::NetParseError(ref e) => format!("Can't parse ip:port: {}", e), Error::NoLauncher => format!("Supervisor must be run from `hab-launch`"), Error::NotifyCreateError(ref e) => format!("Notify create error: {}", e), Error::NotifyError(ref e) => format!("Notify error: {}", e), Error::NulError(ref e) => format!("{}", e), Error::PackageNotFound(ref pkg) => { if pkg.fully_qualified() { format!("Cannot find package: {}", pkg) } else { format!("Cannot find a release of package: {}", pkg) } } Error::PidFileCorrupt(ref path) => { format!("Unable to decode contents of PID file, {}", path.display()) } Error::PidFileIO(ref path, ref err) => { format!("Unable to read PID file, {}, {}", path.display(), err) } Error::ProcessLockCorrupt => format!("Unable to decode contents of process lock"), Error::ProcessLocked(ref pid) => { format!( "Unable to start Habitat Supervisor because another instance is already \ running with the pid {}. If your intention was to run multiple Supervisors - \ that can be done by setting a value for `--override-name` at startup - but \ it is not recommended.", pid ) } Error::ProcessLockIO(ref path, ref err) => { format!( "Unable to start Habitat Supervisor because we weren't able to write or \ read to a process lock at {}, {}", path.display(), err ) } Error::RecvError(ref err) => format!("{}", err), Error::RenderContextSerialization(ref e) => { format!("Unable to serialize rendering context, {}", e) } Error::ServiceDeserializationError(ref e) => { format!("Can't deserialize service status: {}", e) } Error::ServiceNotLoaded(ref ident) => format!("Service {} not loaded", ident), Error::ServiceLoaded(ref ident) => { format!("Service already loaded, unload '{}' and try again", ident) } Error::ServiceSerializationError(ref e) => { format!("Can't serialize service to file: {}", e) } Error::ServiceSpecFileIO(ref path, ref err) => { format!( "Unable to write or read to a service spec file at {}, {}", path.display(), err ) } Error::ServiceSpecParse(ref err) => { format!("Unable to parse contents of service spec file, {}", err) } Error::ServiceSpecRender(ref err) => { format!("Service spec could not be rendered successfully: {}", err) } Error::SignalFailed => format!("Failed to send a signal to the child process"), Error::SpecWatcherDirNotFound(ref path) => { format!( "Spec directory '{}' not created or is not a directory", path ) } Error::SpecWatcherGlob(ref e) => format!("{}", e), Error::StrFromUtf8Error(ref e) => format!("{}", e), Error::StringFromUtf8Error(ref e) => format!("{}", e), Error::TomlEncode(ref e) => format!("Failed to encode TOML: {}", e), Error::TomlMergeError(ref e) => format!("Failed to merge TOML: {}", e), Error::TomlParser(ref err) => format!("Failed to parse TOML: {}", err), Error::TryRecvError(ref err) => format!("{}", err), Error::UnpackFailed => format!("Failed to unpack a package"), Error::UserNotFound(ref e) => format!("No UID for user '{}' could be found", e), }; let progname = PROGRAM_NAME.as_str(); let mut so = StructuredOutput::new( progname, self.logkey, self.line, self.file, self.column, &content, ); so.verbose = Some(true); write!(f, "{}", so) } } impl error::Error for SupError { fn description(&self) -> &str { match self.err { Error::BadCompositesPath(_, _) => "Unable to create the composites directory", Error::Departed => "Supervisor has been manually departed", Error::BadDataFile(_, _) => "Unable to read or write to a data file", Error::BadDataPath(_, _) => "Unable to read or write to data directory", Error::BadElectionStatus(_) => "Unknown election status", Error::BadDesiredState(_) => "Unknown desired state in service spec", Error::BadPackage(_, _) => "Package was malformed or contained malformed contents", Error::BadSpecsPath(_, _) => "Unable to create the specs directory", Error::BadStartStyle(_) => "Unknown start style in service spec", Error::BadEnvConfig(_) => "Unknown syntax in Env Configuration", Error::ButterflyError(ref err) => err.description(), Error::ExecCommandNotFound(_) => "Exec command was not found on filesystem or in PATH", Error::GroupNotFound(_) => "No matching GID for group found", Error::TemplateFileError(ref err) => err.description(), Error::TemplateRenderError(ref err) => err.description(), Error::HabitatCommon(ref err) => err.description(), Error::HabitatCore(ref err) => err.description(), Error::DepotClient(ref err) => err.description(), Error::EnvJoinPathsError(ref err) => err.description(), Error::FileNotFound(_) => "File not found", Error::FileWatcherFileIsRoot => "Watched file is root", Error::InvalidBinding(_) => "Invalid binding parameter", Error::InvalidBinds(_) => { "Service binds detected that are neither required nor optional package binds" } Error::InvalidCompositeBinding(_) => "Invalid binding parameter", Error::InvalidKeyParameter(_) => "Key parameter error", Error::InvalidPidFile => "Invalid child process PID file", Error::InvalidTopology(_) => "Invalid topology", Error::InvalidUpdateStrategy(_) => "Invalid update strategy", Error::Io(ref err) => err.description(), Error::IPFailed => "Failed to discover the outbound IP address", Error::Launcher(ref err) => err.description(), Error::MissingRequiredBind(_) => { "A service to start without specifying a service group for all required binds" } Error::MissingRequiredIdent => { "Missing required ident field: (example: ident = \"core/redis\")" } Error::NetParseError(_) => "Can't parse IP:port", Error::NameLookup(_) => "Error resolving a name or IP address", Error::NoLauncher => "Supervisor must be run from `hab-launch`", Error::NotifyCreateError(_) => "Notify create error", Error::NotifyError(_) => "Notify error", Error::NulError(_) => { "An attempt was made to build a CString with a null byte inside it" } Error::PackageNotFound(_) => "Cannot find a package", Error::Permissions(_) => "File system permissions error", Error::PidFileCorrupt(_) => "Unable to decode contents of PID file", Error::PidFileIO(_, _) => "Unable to read or write to PID file", Error::ProcessLockCorrupt => "Unable to decode contents of process lock", Error::ProcessLocked(_) => { "Another instance of the Habitat Supervisor is already running" } Error::ProcessLockIO(_, _) => "Unable to read or write to a process lock", Error::RecvError(_) => "A channel failed to receive a response", Error::RenderContextSerialization(_) => "Unable to serialize rendering context", Error::ServiceDeserializationError(_) => "Can't deserialize service status", Error::ServiceNotLoaded(_) => "Service status called when service not loaded", Error::ServiceLoaded(_) => "Service load or start called when service already loaded", Error::ServiceSerializationError(_) => "Can't serialize service to file", Error::ServiceSpecFileIO(_, _) => "Unable to write or read to a service spec file", Error::ServiceSpecParse(_) => "Service spec could not be parsed successfully", Error::ServiceSpecRender(_) => "Service spec TOML could not be rendered successfully", Error::SignalFailed => "Failed to send a signal to the child process", Error::SpecWatcherDirNotFound(_) => "Spec directory not created or is not a directory", Error::SpecWatcherGlob(_) => "Spec watcher file globbing error", Error::StrFromUtf8Error(_) => "Failed to convert a str from a &[u8] as UTF-8", Error::StringFromUtf8Error(_) => "Failed to convert a string from a Vec<u8> as UTF-8", Error::TomlEncode(_) => "Failed to encode toml!", Error::TomlMergeError(_) => "Failed to merge TOML!", Error::TomlParser(_) => "Failed to parse TOML!", Error::TryRecvError(_) => "A channel failed to receive a response", Error::UnpackFailed => "Failed to unpack a package", Error::UserNotFound(_) => "No matching UID for user found", } } } impl From<net::AddrParseError> for SupError { fn from(err: net::AddrParseError) -> SupError { sup_error!(Error::NetParseError(err)) } } impl From<butterfly::error::Error> for SupError { fn from(err: butterfly::error::Error) -> SupError { sup_error!(Error::ButterflyError(err)) } } impl From<common::Error> for SupError { fn from(err: common::Error) -> SupError { sup_error!(Error::HabitatCommon(err)) } } impl From<glob::PatternError> for SupError { fn from(err: glob::PatternError) -> SupError { sup_error!(Error::SpecWatcherGlob(err)) } } impl From<handlebars::RenderError> for SupError { fn from(err: handlebars::RenderError) -> SupError { sup_error!(Error::TemplateRenderError(err)) } } impl From<handlebars::TemplateFileError> for SupError { fn from(err: handlebars::TemplateFileError) -> SupError { sup_error!(Error::TemplateFileError(err)) } } impl From<hcore::Error> for SupError { fn from(err: hcore::Error) -> SupError { sup_error!(Error::HabitatCore(err)) } } impl From<depot_client::Error> for SupError { fn from(err: depot_client::Error) -> SupError { sup_error!(Error::DepotClient(err)) } } impl From<ffi::NulError> for SupError { fn from(err: ffi::NulError) -> SupError { sup_error!(Error::NulError(err)) } } impl From<io::Error> for SupError { fn from(err: io::Error) -> SupError { sup_error!(Error::Io(err)) } } impl From<env::JoinPathsError> for SupError { fn from(err: env::JoinPathsError) -> SupError { sup_error!(Error::EnvJoinPathsError(err)) } } impl From<launcher_client::Error> for SupError { fn from(err: launcher_client::Error) -> SupError { sup_error!(Error::Launcher(err)) } } impl From<string::FromUtf8Error> for SupError { fn from(err: string::FromUtf8Error) -> SupError { sup_error!(Error::StringFromUtf8Error(err)) } } impl From<str::Utf8Error> for SupError { fn from(err: str::Utf8Error) -> SupError { sup_error!(Error::StrFromUtf8Error(err)) } } impl From<mpsc::RecvError> for SupError { fn from(err: mpsc::RecvError) -> SupError { sup_error!(Error::RecvError(err)) } } impl From<mpsc::TryRecvError> for SupError { fn from(err: mpsc::TryRecvError) -> SupError { sup_error!(Error::TryRecvError(err)) } } impl From<notify::Error> for SupError { fn from(err: notify::Error) -> SupError { sup_error!(Error::NotifyError(err)) } } impl From<toml::de::Error> for SupError { fn from(err: toml::de::Error) -> Self { sup_error!(Error::TomlParser(err)) } } impl From<toml::ser::Error> for SupError { fn from(err: toml::ser::Error) -> Self { sup_error!(Error::TomlEncode(err)) } }
41.937163
572
0.587525
5bd71c05068100bc114e306dae93e41f2380c2a9
18,243
use crate::context::*; use crate::util::*; use std::io::Write; pub enum ProgramItem { Function(Function), Declaration(Declaration), } pub struct Program { pub items: Vec<ProgramItem>, } impl Program { pub fn emit(self, context: &mut Context<impl Write, impl Write, impl Write>) { let mut entry = false; context.put_directive(".text"); context.put_directive(".globl main"); context.write_data(".data"); context.write_bss(".bss"); for item in self.items.into_iter() { match item { ProgramItem::Function(function) => { if function.name == "main" { entry = true; } function.emit(context); } ProgramItem::Declaration(declaration) => { context.create_global_variable(&declaration.name, &declaration.ty); let mangled = mangle_global_variable(&declaration.name); match &declaration.default { None => { context.write_bss(&format!( ".comm {}, {}, 4", mangled, declaration.ty.measure() )); } Some(expression) => { context.write_data(".align 4"); context.write_data(&format!(".size {}, 4", mangled)); context.write_data(&format!("{}:", mangled)); if let Expression::IntegerLiteral(value) = expression { context.write_data(&format!(".word {}", value)); } else { panic!(); } } }; } } } context.put_label("main".to_string()); context.put_jump("__main".to_string()); assert!(entry); context.check_undefined_symbol(); } } pub struct Function { pub ty: Type, pub name: String, pub parameters: Vec<(String, Type)>, pub body: Option<Vec<BlockItem>>, } impl Function { pub fn emit(self, context: &mut Context<impl Write, impl Write, impl Write>) { match self.body { Some(body) => { context.define_function( &self.name, self.ty, self.parameters.iter().map(|t| t.1.clone()).collect(), ); context.enter_function(&self.name); context.put_set_frame(self.name); context.enter_scope(); let mut offset = 0; for (parameter_name, parameter_type) in self.parameters.into_iter() { context.create_located(&parameter_name, &parameter_type, offset + 8); offset += parameter_type.measure() as i32; } for item in body.into_iter() { item.emit(context); } context.put_push(0); context.put_return(); // default return value: 0 context.leave_scope(); context.exit_function(); context.put_end_frame(); } None => { context.declare_function( &self.name, self.ty, self.parameters.into_iter().map(|tuple| tuple.1).collect(), ); } } } } pub enum Statement { Empty, Return(Expression), Expression(Expression), If { condition: Expression, true_branch: Box<Statement>, false_branch: Option<Box<Statement>>, }, Compound(Compound), Loop { initializer: Option<Box<BlockItem>>, condition: Option<Expression>, body: Box<Statement>, modifier: Option<Expression>, }, Break, Continue, } impl Statement { pub fn emit(self, context: &mut Context<impl Write, impl Write, impl Write>) { match self { Statement::Empty => {} Statement::Return(expression) => { let rt = expression.emit(context).0; context.check_return_type(rt); context.put_return(); } Statement::Expression(expression) => { expression.emit(context); context.put_pop(); } Statement::If { condition, true_branch, false_branch, } => { let label_1 = context.next_label(); let label_2 = context.next_label(); assert!(condition.emit(context).0.is_primitive()); context.put_jump_zero(label_1.clone()); true_branch.emit(context); context.put_jump(label_2.clone()); context.put_label(label_1); if let Some(false_part) = false_branch { false_part.emit(context); } context.put_label(label_2); } Statement::Compound(compound) => { compound.emit(context); } Statement::Loop { initializer, condition, body, modifier, } => { let label_restart = context.next_label(); let label_break = context.next_label(); let label_continue = context.next_label(); context.enter_loop(label_break.clone(), label_continue.clone()); context.enter_scope(); if let Some(item) = initializer { item.emit(context); } context.put_label(label_restart.clone()); if let Some(expression) = condition { assert!(expression.emit(context).0.is_primitive()); context.put_jump_zero(label_break.clone()); } body.emit(context); context.put_label(label_continue); if let Some(expression) = modifier { expression.emit(context); context.put_pop(); } context.put_jump(label_restart); context.put_label(label_break); context.leave_scope(); context.leave_loop(); } Statement::Break => { context.put_jump(context.get_loop_break()); } Statement::Continue => { context.put_jump(context.get_loop_continue()); } } } } pub struct Compound { pub items: Vec<BlockItem>, } impl Compound { pub fn emit(self, context: &mut Context<impl Write, impl Write, impl Write>) { context.enter_scope(); for item in self.items.into_iter() { item.emit(context); } context.leave_scope(); } } pub struct Declaration { pub ty: Type, pub name: String, pub default: Option<Expression>, } impl Declaration { pub fn emit(self, context: &mut Context<impl Write, impl Write, impl Write>) { context.create_variable(&self.name, &self.ty); if let Some(expression) = self.default { Expression::Assignment( Box::new(Expression::Identifier(self.name)), Box::new(expression), ) .emit(context); context.put_pop(); } } } pub enum BlockItem { Statement(Statement), Declaration(Declaration), } impl BlockItem { pub fn emit(self, context: &mut Context<impl Write, impl Write, impl Write>) { match self { BlockItem::Statement(statement) => statement.emit(context), BlockItem::Declaration(declaration) => declaration.emit(context), } } } pub enum Expression { IntegerLiteral(i32), Identifier(String), Negation(Box<Expression>), Not(Box<Expression>), LogicalNot(Box<Expression>), Addition(Box<Expression>, Box<Expression>), Subtraction(Box<Expression>, Box<Expression>), Multiplication(Box<Expression>, Box<Expression>), Division(Box<Expression>, Box<Expression>), Modulus(Box<Expression>, Box<Expression>), Equal(Box<Expression>, Box<Expression>), Unequal(Box<Expression>, Box<Expression>), Less(Box<Expression>, Box<Expression>), LessEqual(Box<Expression>, Box<Expression>), Greater(Box<Expression>, Box<Expression>), GreaterEqual(Box<Expression>, Box<Expression>), LogicalAnd(Box<Expression>, Box<Expression>), LogicalOr(Box<Expression>, Box<Expression>), Assignment(Box<Expression>, Box<Expression>), Ternary(Box<Expression>, Box<Expression>, Box<Expression>), FunctionCall(String, Vec<Expression>), Reference(Box<Expression>), Dereference(Box<Expression>), Convert(Type, Box<Expression>), Index(Box<Expression>, Vec<Expression>), } impl Expression { pub fn emit( self, context: &mut Context<impl Write, impl Write, impl Write>, ) -> (Type, LeftValue) { macro_rules! make_unary_emitter { ($rhs: ident, $next: ident) => {{ assert!($rhs.emit(context).0.is_primitive()); context.$next(); (Type::make_primitive(), false) }}; } macro_rules! make_binary_operator { ($lhs: ident, $rhs: ident, $instruction: ident) => {{ assert!($lhs.emit(context).0.is_primitive()); assert!($rhs.emit(context).0.is_primitive()); context.$instruction(); (Type::make_primitive(), false) }}; } macro_rules! make_binary_pointer_operator { ($lhs: ident, $rhs: ident, $instruction: ident) => {{ let lt = $lhs.emit(context).0; let rt = $rhs.emit(context).0; assert!(lt == rt && !lt.is_array()); context.$instruction(); (Type::make_primitive(), false) }}; } match self { Expression::IntegerLiteral(value) => { context.put_push(value); (Type::make_primitive(), false) } Expression::Identifier(name) => { let rt = context.access_variable(&name); if !rt.is_array() { context.put_load(); } (rt, true) } Expression::Negation(rhs) => make_unary_emitter!(rhs, put_negate), Expression::Not(rhs) => make_unary_emitter!(rhs, put_not), Expression::LogicalNot(rhs) => make_unary_emitter!(rhs, put_logical_not), Expression::Addition(lhs, rhs) => { let lt = lhs.emit(context).0; let rt = rhs.emit(context).0; let ret = if lt.is_primitive() && rt.is_primitive() { context.put_add(); Type::make_primitive() } else if lt.is_primitive() && rt.is_pointer() { context.put_add_pointer_left(); rt } else if lt.is_pointer() && rt.is_primitive() { context.put_add_pointer_right(); lt } else { panic!(); }; (ret, false) } Expression::Subtraction(lhs, rhs) => { let lt = lhs.emit(context).0; let rt = rhs.emit(context).0; let ret = if lt.is_primitive() && rt.is_primitive() { context.put_subtract(); Type::make_primitive() } else if lt.is_pointer() && rt.is_primitive() { context.put_negate(); context.put_add_pointer_right(); lt } else if lt.is_pointer() && rt.is_pointer() { assert!(lt == rt); context.put_subtract(); context.put_push(4); context.put_divide(); Type::make_primitive() } else { panic!(); }; (ret, false) } Expression::Multiplication(lhs, rhs) => make_binary_operator!(lhs, rhs, put_multiply), Expression::Division(lhs, rhs) => make_binary_operator!(lhs, rhs, put_divide), Expression::Modulus(lhs, rhs) => make_binary_operator!(lhs, rhs, put_modulo), Expression::Equal(lhs, rhs) => make_binary_pointer_operator!(lhs, rhs, put_equal), Expression::Unequal(lhs, rhs) => make_binary_pointer_operator!(lhs, rhs, put_unequal), Expression::Less(lhs, rhs) => make_binary_operator!(lhs, rhs, put_less), Expression::LessEqual(lhs, rhs) => make_binary_operator!(lhs, rhs, put_less_equal), Expression::Greater(lhs, rhs) => make_binary_operator!(lhs, rhs, put_greater), Expression::GreaterEqual(lhs, rhs) => { make_binary_operator!(lhs, rhs, put_greater_equal) } Expression::LogicalAnd(lhs, rhs) => make_binary_operator!(lhs, rhs, put_logical_and), Expression::LogicalOr(lhs, rhs) => make_binary_operator!(lhs, rhs, put_logical_or), Expression::Assignment(lhs, rhs) => { let rt = rhs.emit(context).0; let lt = Expression::Reference(lhs).emit(context).0; assert!(rt == lt.unwrap_pointer()); context.put_store(); (rt, false) } Expression::Ternary(condition, true_part, false_part) => { let label_1 = context.next_label(); let label_2 = context.next_label(); assert!(condition.emit(context).0.is_primitive()); context.put_jump_zero(label_1.clone()); let lt = true_part.emit(context).0; context.put_jump(label_2.clone()); context.put_label(label_1); let rt = false_part.emit(context).0; assert!(lt == rt); context.put_label(label_2); (lt, false) } Expression::FunctionCall(name, arguments) => { let mut types = Vec::new(); let arg_count = arguments.len(); for argument in arguments.into_iter().rev() { types.push(argument.emit(context).0); } context.check_arguments(&name, &types); context.put_call(&name); context.mark_function_called(&name); for _ in 0..arg_count { context.put_pop(); } context.put_returned_value(); (context.get_function_return_type(&name), false) } Expression::Reference(rhs) => ( match *rhs { Expression::Identifier(name) => { let ty = context.access_variable(&name); assert!(!ty.is_array()); ty.wrap_pointer() } Expression::Dereference(rrhs) => rrhs.emit(context).0, Expression::Index(base, indices) => { let mut ty = base.emit(context).0; let indices_count = indices.len(); let mut i = 0; for index in indices.into_iter() { if ty.is_array() { ty = ty.unwrap_array(); } else if ty.is_pointer() { ty = ty.unwrap_pointer(); } else { panic!(); } assert!(index.emit(context).0.is_primitive()); context.put_push(ty.measure() as i32); context.put_multiply(); context.put_add(); if !ty.is_array() && i + 1 < indices_count { context.put_load(); } i += 1; } ty.wrap_pointer() } Expression::Convert(target, rhs) => { assert!(!target.is_array()); rhs.emit(context); target.wrap_pointer() } _ => unreachable!(), }, false, ), Expression::Dereference(rhs) => { let rt = rhs.emit(context).0; assert!(rt.is_pointer()); context.put_load(); (rt.unwrap_pointer(), true) } Expression::Convert(target, rhs) => { assert!(!target.is_array()); let (_, vt) = rhs.emit(context); (target, vt) } Expression::Index(base, indices) => { let mut ty = base.emit(context).0; for index in indices.into_iter() { if ty.is_array() { ty = ty.unwrap_array(); } else if ty.is_pointer() { ty = ty.unwrap_pointer(); } else { panic!(); } assert!(index.emit(context).0.is_primitive()); context.put_push(ty.measure() as i32); context.put_multiply(); context.put_add(); if !ty.is_array() { context.put_load(); } } (ty, true) } } } }
37.848548
98
0.47558
098fa9e0447a46a070e825eda80a36ce02f5eee1
213
// run-pass #![feature(generic_const_exprs)] #![allow(incomplete_features)] fn with_bound<const N: usize>() where [u8; N / 2]: Sized { let _: [u8; N / 2] = [0; N / 2]; } fn main() { with_bound::<4>(); }
17.75
58
0.57277
ac7a7ea7687113b27ef2a97e835beff48b27c123
73
pub mod bert; pub mod byte_level; pub mod metaspace; pub mod whitespace;
14.6
19
0.780822
ac870713a254e3638917ee4e95e94cebcc882cda
54,077
//! Tests running SWFs in a headless Ruffle instance. //! //! Trace output can be compared with correct output from the official Flash Player. use approx::assert_relative_eq; use ruffle_core::backend::render::RenderBackend; use ruffle_core::backend::video::SoftwareVideoBackend; use ruffle_core::backend::video::VideoBackend; use ruffle_core::backend::{ audio::NullAudioBackend, locale::NullLocaleBackend, log::LogBackend, navigator::{NullExecutor, NullNavigatorBackend}, render::NullRenderer, storage::{MemoryStorageBackend, StorageBackend}, ui::NullUiBackend, video::NullVideoBackend, }; use ruffle_core::context::UpdateContext; use ruffle_core::external::Value as ExternalValue; use ruffle_core::external::{ExternalInterfaceMethod, ExternalInterfaceProvider}; use ruffle_core::tag_utils::SwfMovie; use ruffle_core::Player; use ruffle_render_wgpu::target::TextureTarget; use ruffle_render_wgpu::wgpu; use ruffle_render_wgpu::WgpuRenderBackend; use std::cell::RefCell; use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use std::rc::Rc; use std::sync::{Arc, Mutex}; use std::time::Duration; fn get_img_platform_suffix(info: &wgpu::AdapterInfo) -> String { format!("{}-{}", std::env::consts::OS, info.name) } const RUN_IMG_TESTS: bool = cfg!(feature = "imgtests"); fn set_logger() { let _ = env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")) .format_timestamp(None) .is_test(true) .try_init(); } type Error = Box<dyn std::error::Error>; macro_rules! val_or_false { ($val:literal) => { $val }; () => { false }; } // This macro generates test cases for a given list of SWFs. // If 'img' is true, then we will render an image of the final frame // of the SWF, and compare it against a reference image on disk. macro_rules! swf_tests { ($($(#[$attr:meta])* ($name:ident, $path:expr, $num_frames:literal $(, img = $img:literal)? ),)*) => { $( #[test] $(#[$attr])* fn $name() -> Result<(), Error> { set_logger(); test_swf( concat!("tests/swfs/", $path, "/test.swf"), $num_frames, concat!("tests/swfs/", $path, "/output.txt"), val_or_false!($($img)?) ) } )* }; } // This macro generates test cases for a given list of SWFs using `test_swf_approx`. macro_rules! swf_tests_approx { ($($(#[$attr:meta])* ($name:ident, $path:expr, $num_frames:literal $(, $opt:ident = $val:expr)*),)*) => { $( #[test] $(#[$attr])* fn $name() -> Result<(), Error> { set_logger(); test_swf_approx( concat!("tests/swfs/", $path, "/test.swf"), $num_frames, concat!("tests/swfs/", $path, "/output.txt"), |actual, expected| assert_relative_eq!(actual, expected $(, $opt = $val)*), ) } )* }; } // List of SWFs to test. // Format: (test_name, test_folder, number_of_frames_to_run) // The test folder is a relative to core/tests/swfs // Inside the folder is expected to be "test.swf" and "output.txt" with the correct output. swf_tests! { (add_property, "avm1/add_property", 1), (as_transformed_flag, "avm1/as_transformed_flag", 3), (as_broadcaster, "avm1/as_broadcaster", 1), (as_broadcaster_initialize, "avm1/as_broadcaster_initialize", 1), (as_set_prop_flags, "avm1/as_set_prop_flags", 1), (attach_movie, "avm1/attach_movie", 1), (as2_bitor, "avm1/bitor", 1), (as2_bitand, "avm1/bitand", 1), (as2_bitxor, "avm1/bitxor", 1), (function_base_clip, "avm1/function_base_clip", 2), (call, "avm1/call", 2), (color, "avm1/color", 1, img = true), (clip_events, "avm1/clip_events", 4), (unload_clip_event, "avm1/unload_clip_event", 2), (create_empty_movie_clip, "avm1/create_empty_movie_clip", 2), (empty_movieclip_can_attach_movies, "avm1/empty_movieclip_can_attach_movies", 1), (duplicate_movie_clip, "avm1/duplicate_movie_clip", 1), (mouse_listeners, "avm1/mouse_listeners", 1), (do_init_action, "avm1/do_init_action", 3), (execution_order1, "avm1/execution_order1", 3), (execution_order2, "avm1/execution_order2", 15), (execution_order3, "avm1/execution_order3", 5), (execution_order4, "avm1/execution_order4", 4), (export_assets, "avm1/export_assets", 1), (single_frame, "avm1/single_frame", 2), (looping, "avm1/looping", 6), (math_min_max, "avm1/math_min_max", 1), (matrix, "avm1/matrix", 1), (point, "avm1/point", 1), (rectangle, "avm1/rectangle", 1), (date_is_special, "avm1/date_is_special", 1), (get_bytes_total, "avm1/get_bytes_total", 1), (goto_advance1, "avm1/goto_advance1", 2), (goto_advance2, "avm1/goto_advance2", 2), (goto_both_ways1, "avm1/goto_both_ways1", 2), (goto_both_ways2, "avm1/goto_both_ways2", 3), (goto_frame, "avm1/goto_frame", 3), (goto_frame2, "avm1/goto_frame2", 5), (goto_frame_number, "avm1/goto_frame_number", 4), (goto_label, "avm1/goto_label", 4), (goto_methods, "avm1/goto_methods", 1), (goto_rewind1, "avm1/goto_rewind1", 4), (goto_rewind2, "avm1/goto_rewind2", 5), (goto_rewind3, "avm1/goto_rewind3", 2), (goto_execution_order, "avm1/goto_execution_order", 3), (goto_execution_order2, "avm1/goto_execution_order2", 2), (greaterthan_swf5, "avm1/greaterthan_swf5", 1), (greaterthan_swf8, "avm1/greaterthan_swf8", 1), (strictly_equals, "avm1/strictly_equals", 1), (tell_target, "avm1/tell_target", 3), (typeofs, "avm1/typeof", 1), (typeof_globals, "avm1/typeof_globals", 1), (closure_scope, "avm1/closure_scope", 1), (variable_args, "avm1/variable_args", 1), (custom_clip_methods, "avm1/custom_clip_methods", 3), (delete, "avm1/delete", 3), (selection, "avm1/selection", 1), (default_names, "avm1/default_names", 6), (array_trivial, "avm1/array_trivial", 1), (array_concat, "avm1/array_concat", 1), (array_slice, "avm1/array_slice", 1), (array_splice, "avm1/array_splice", 1), (array_properties, "avm1/array_properties", 1), (array_prototyping, "avm1/array_prototyping", 1), (array_length, "avm1/array_length", 1), (array_sort, "avm1/array_sort", 1), (array_enumerate, "avm1/array_enumerate", 1), (timeline_function_def, "avm1/timeline_function_def", 3), (root_global_parent, "avm1/root_global_parent", 3), (register_underflow, "avm1/register_underflow", 1), (object_prototypes, "avm1/object_prototypes", 1), (movieclip_prototype_extension, "avm1/movieclip_prototype_extension", 1), (movieclip_hittest, "avm1/movieclip_hittest", 1), (movieclip_hittest_shapeflag, "avm1/movieclip_hittest_shapeflag", 10), (movieclip_lockroot, "avm1/movieclip_lockroot", 10), #[ignore] (textfield_text, "avm1/textfield_text", 1), (recursive_prototypes, "avm1/recursive_prototypes", 2), (stage_object_children, "avm1/stage_object_children", 2), (has_own_property, "avm1/has_own_property", 1), (extends_chain, "avm1/extends_chain", 1), (is_prototype_of, "avm1/is_prototype_of", 1), #[ignore] (string_coercion, "avm1/string_coercion", 1), (lessthan_swf4, "avm1/lessthan_swf4", 1), (lessthan2_swf5, "avm1/lessthan2_swf5", 1), (lessthan2_swf6, "avm1/lessthan2_swf6", 1), (lessthan2_swf7, "avm1/lessthan2_swf7", 1), (logical_ops_swf4, "avm1/logical_ops_swf4", 1), (logical_ops_swf8, "avm1/logical_ops_swf8", 1), (movieclip_get_instance_at_depth, "avm1/movieclip_get_instance_at_depth", 1), (movieclip_depth_methods, "avm1/movieclip_depth_methods", 3), (get_variable_in_scope, "avm1/get_variable_in_scope", 1), (movieclip_init_object, "avm1/movieclip_init_object", 1), (greater_swf6, "avm1/greater_swf6", 1), (greater_swf7, "avm1/greater_swf7", 1), (equals_swf4, "avm1/equals_swf4", 1), (equals2_swf5, "avm1/equals2_swf5", 1), (equals2_swf6, "avm1/equals2_swf6", 1), (equals2_swf7, "avm1/equals2_swf7", 1), (escape, "avm1/escape", 1), (unescape, "avm1/unescape", 1), (register_class, "avm1/register_class", 1), (register_class_return_value, "avm1/register_class_return_value", 1), (register_class_swf6, "avm1/register_class_swf6", 1), (register_and_init_order, "avm1/register_and_init_order", 1), (on_construct, "avm1/on_construct", 1), (set_variable_scope, "avm1/set_variable_scope", 1), (slash_syntax, "avm1/slash_syntax", 2), (strictequals_swf6, "avm1/strictequals_swf6", 1), (string_methods, "avm1/string_methods", 1), (string_methods_negative_args, "avm1/string_methods_negative_args", 1), (string_ops_swf6, "avm1/string_ops_swf6", 1), (path_string, "avm1/path_string", 1), (global_is_bare, "avm1/global_is_bare", 1), (primitive_type_globals, "avm1/primitive_type_globals", 1), (primitive_instanceof, "avm1/primitive_instanceof", 1), (as2_oop, "avm1/as2_oop", 1), (extends_native_type, "avm1/extends_native_type", 1), (xml, "avm1/xml", 1), (xml_namespaces, "avm1/xml_namespaces", 1), (xml_node_namespaceuri, "avm1/xml_node_namespaceuri", 1), (xml_node_weirdnamespace, "avm1/xml_node_weirdnamespace", 1), (xml_clone_expandos, "avm1/xml_clone_expandos", 1), (xml_has_child_nodes, "avm1/xml_has_child_nodes", 1), (xml_first_last_child, "avm1/xml_first_last_child", 1), (xml_parent_and_child, "avm1/xml_parent_and_child", 1), (xml_siblings, "avm1/xml_siblings", 1), (xml_attributes_read, "avm1/xml_attributes_read", 1), (xml_append_child, "avm1/xml_append_child", 1), (xml_append_child_with_parent, "avm1/xml_append_child_with_parent", 1), (xml_remove_node, "avm1/xml_remove_node", 1), (xml_reparenting, "avm1/xml_reparenting", 1), (xml_insert_before, "avm1/xml_insert_before", 1), (xml_to_string, "avm1/xml_to_string", 1), (xml_to_string_comment, "avm1/xml_to_string_comment", 1), (xml_idmap, "avm1/xml_idmap", 1), (xml_ignore_comments, "avm1/xml_ignore_comments", 1), (xml_ignore_white, "avm1/xml_ignore_white", 1), (xml_inspect_doctype, "avm1/xml_inspect_doctype", 1), (xml_unescaping, "avm1/xml_unescaping", 1), #[ignore] (xml_inspect_xmldecl, "avm1/xml_inspect_xmldecl", 1), (xml_inspect_createmethods, "avm1/xml_inspect_createmethods", 1), (xml_inspect_parsexml, "avm1/xml_inspect_parsexml", 1), (xml_cdata, "avm1/xml_cdata", 1), (funky_function_calls, "avm1/funky_function_calls", 1), (undefined_to_string_swf6, "avm1/undefined_to_string_swf6", 1), (define_function_case_sensitive, "avm1/define_function_case_sensitive", 2), (define_function2_preload, "avm1/define_function2_preload", 1), (define_function2_preload_order, "avm1/define_function2_preload_order", 1), (mcl_as_broadcaster, "avm1/mcl_as_broadcaster", 1), (uncaught_exception, "avm1/uncaught_exception", 1), (uncaught_exception_bubbled, "avm1/uncaught_exception_bubbled", 1), (try_catch_finally, "avm1/try_catch_finally", 1), (try_finally_simple, "avm1/try_finally_simple", 1), (loadmovie, "avm1/loadmovie", 2), (loadmovienum, "avm1/loadmovienum", 2), (loadmovie_registerclass, "avm1/loadmovie_registerclass", 2), (loadmovie_replace_root, "avm1/loadmovie_replace_root", 3), (loadmovie_method, "avm1/loadmovie_method", 2), (loadmovie_fail, "avm1/loadmovie_fail", 1), (unloadmovie, "avm1/unloadmovie", 11), (unloadmovienum, "avm1/unloadmovienum", 11), (unloadmovie_method, "avm1/unloadmovie_method", 11), (mcl_loadclip, "avm1/mcl_loadclip", 11), (mcl_unloadclip, "avm1/mcl_unloadclip", 11), (mcl_getprogress, "avm1/mcl_getprogress", 6), (load_vars, "avm1/load_vars", 2), (loadvariables, "avm1/loadvariables", 3), (loadvariablesnum, "avm1/loadvariablesnum", 3), (loadvariables_method, "avm1/loadvariables_method", 3), (xml_load, "avm1/xml_load", 1), (with_return, "avm1/with_return", 1), (watch, "avm1/watch", 1), (watch_textfield, "avm1/watch_textfield", 1), #[ignore] (watch_virtual_property, "avm1/watch_virtual_property", 1), (watch_virtual_property_proto, "avm1/watch_virtual_property_proto", 1), (cross_movie_root, "avm1/cross_movie_root", 5), (roots_and_levels, "avm1/roots_and_levels", 1), (swf5_encoding, "avm1/swf5_encoding", 1), (swf6_case_insensitive, "avm1/swf6_case_insensitive", 1), (swf7_case_sensitive, "avm1/swf7_case_sensitive", 1), (prototype_enumerate, "avm1/prototype_enumerate", 1), (stage_object_enumerate, "avm1/stage_object_enumerate", 1), (new_object_enumerate, "avm1/new_object_enumerate", 1), (as2_super_and_this_v6, "avm1/as2_super_and_this_v6", 1), (as2_super_and_this_v8, "avm1/as2_super_and_this_v8", 1), (as2_super_via_manual_prototype, "avm1/as2_super_via_manual_prototype", 1), (as1_constructor_v6, "avm1/as1_constructor_v6", 1), (as1_constructor_v7, "avm1/as1_constructor_v7", 1), (issue_710, "avm1/issue_710", 1), (issue_1086, "avm1/issue_1086", 1), (issue_1104, "avm1/issue_1104", 3), (issue_1671, "avm1/issue_1671", 1), (issue_1906, "avm1/issue_1906", 2), (issue_2030, "avm1/issue_2030", 1), (issue_2084, "avm1/issue_2084", 2), (issue_2166, "avm1/issue_2166", 1), (issue_2870, "avm1/issue_2870", 10), (issue_3169, "avm1/issue_3169", 1), (issue_3446, "avm1/issue_3446", 1), (issue_3522, "avm1/issue_3522", 2), (issue_4377, "avm1/issue_4377", 1), (function_as_function, "avm1/function_as_function", 1), (infinite_recursion_function, "avm1/infinite_recursion_function", 1), (infinite_recursion_function_in_setter, "avm1/infinite_recursion_function_in_setter", 1), (infinite_recursion_virtual_property, "avm1/infinite_recursion_virtual_property", 1), (edittext_font_size, "avm1/edittext_font_size", 1), (edittext_default_format, "avm1/edittext_default_format", 1), (edittext_leading, "avm1/edittext_leading", 1), #[ignore] (edittext_newlines, "avm1/edittext_newlines", 1), (edittext_html_entity, "avm1/edittext_html_entity", 1), (edittext_password, "avm1/edittext_password", 1), (edittext_scroll, "avm1/edittext_scroll", 1), #[ignore] (edittext_html_roundtrip, "avm1/edittext_html_roundtrip", 1), (edittext_newline_stripping, "avm1/edittext_newline_stripping", 1), (edittext_width_height, "avm1/edittext_width_height", 1), (define_local, "avm1/define_local", 1), (textfield_properties, "avm1/textfield_properties", 1), (textfield_background_color, "avm1/textfield_background_color", 1), (textfield_border_color, "avm1/textfield_border_color", 1), (textfield_variable, "avm1/textfield_variable", 8), (error, "avm1/error", 1), (color_transform, "avm1/color_transform", 1), (with, "avm1/with", 1), (arguments, "avm1/arguments", 1), (prototype_properties, "avm1/prototype_properties", 1), (stage_object_properties_get_var, "avm1/stage_object_properties_get_var", 1), (set_interval, "avm1/set_interval", 20), (context_menu, "avm1/context_menu", 1), (context_menu_item, "avm1/context_menu_item", 1), (constructor_function, "avm1/constructor_function", 1), (global_array, "avm1/global_array", 1), (array_constructor, "avm1/array_constructor", 1), (object_constructor, "avm1/object_constructor", 1), (object_function, "avm1/object_function", 1), (parse_int, "avm1/parse_int", 1), (bitmap_filter, "avm1/bitmap_filter", 1), (blur_filter, "avm1/blur_filter", 1), (glow_filter, "avm1/glow_filter", 1), (date_constructor, "avm1/date/constructor", 1), (removed_clip_halts_script, "avm1/removed_clip_halts_script", 13), (target_clip_removed, "avm1/target_clip_removed", 1), (date_utc, "avm1/date/UTC", 1), (date_set_date, "avm1/date/setDate", 1), (date_set_full_year, "avm1/date/setFullYear", 1), (date_set_hours, "avm1/date/setHours", 1), (date_set_milliseconds, "avm1/date/setMilliseconds", 1), (date_set_minutes, "avm1/date/setMinutes", 1), (date_set_month, "avm1/date/setMonth", 1), (date_set_seconds, "avm1/date/setSeconds", 1), (date_set_time, "avm1/date/setTime", 1), (date_set_utc_date, "avm1/date/setUTCDate", 1), (date_set_utc_full_year, "avm1/date/setUTCFullYear", 1), (date_set_utc_hours, "avm1/date/setUTCHours", 1), (date_set_utc_milliseconds, "avm1/date/setUTCMilliseconds", 1), (date_set_utc_minutes, "avm1/date/setUTCMinutes", 1), (date_set_utc_month, "avm1/date/setUTCMonth", 1), (date_set_utc_seconds, "avm1/date/setUTCSeconds", 1), (date_set_year, "avm1/date/setYear", 1), (this_scoping, "avm1/this_scoping", 1), (bevel_filter, "avm1/bevel_filter", 1), (drop_shadow_filter, "avm1/drop_shadow_filter", 1), (color_matrix_filter, "avm1/color_matrix_filter", 1), (displacement_map_filter, "avm1/displacement_map_filter", 1), (convolution_filter, "avm1/convolution_filter", 1), (gradient_bevel_filter, "avm1/gradient_bevel_filter", 1), (gradient_glow_filter, "avm1/gradient_glow_filter", 1), (bitmap_data, "avm1/bitmap_data", 1), (bitmap_data_max_size_swf9, "avm1/bitmap_data_max_size_swf9", 1), (bitmap_data_max_size_swf10, "avm1/bitmap_data_max_size_swf10", 1), (bitmap_data_noise, "avm1/bitmap_data_noise", 1), (array_call_method, "avm1/array_call_method", 1), (bad_placeobject_clipaction, "avm1/bad_placeobject_clipaction", 2), (bad_swf_tag_past_eof, "avm1/bad_swf_tag_past_eof", 1), (sound, "avm1/sound", 1), (action_to_integer, "avm1/action_to_integer", 1), (call_method_empty_name, "avm1/call_method_empty_name", 1), (init_array_invalid, "avm1/init_array_invalid", 1), (init_object_invalid, "avm1/init_array_invalid", 1), (new_object_wrap, "avm1/new_object_wrap", 1), (new_method_wrap, "avm1/new_method_wrap", 1), (as3_hello_world, "avm2/hello_world", 1), (as3_function_call, "avm2/function_call", 1), (as3_function_call_via_call, "avm2/function_call_via_call", 1), (as3_constructor_call, "avm2/constructor_call", 1), (as3_class_methods, "avm2/class_methods", 1), (as3_es3_inheritance, "avm2/es3_inheritance", 1), (as3_es4_inheritance, "avm2/es4_inheritance", 1), (as3_stored_properties, "avm2/stored_properties", 1), (as3_virtual_properties, "avm2/virtual_properties", 1), (as3_es4_oop_prototypes, "avm2/es4_oop_prototypes", 1), (as3_es4_method_binding, "avm2/es4_method_binding", 1), (as3_control_flow_bool, "avm2/control_flow_bool", 1), (as3_control_flow_stricteq, "avm2/control_flow_stricteq", 1), (as3_object_enumeration, "avm2/object_enumeration", 1), (as3_object_prototype, "avm2/object_prototype", 1), (as3_class_enumeration, "avm2/class_enumeration", 1), (as3_is_prototype_of, "avm2/is_prototype_of", 1), (as3_has_own_property, "avm2/has_own_property", 1), (as3_property_is_enumerable, "avm2/property_is_enumerable", 1), (as3_set_property_is_enumerable, "avm2/set_property_is_enumerable", 1), (as3_object_to_string, "avm2/object_to_string", 1), (as3_function_to_string, "avm2/function_to_string", 1), (as3_class_to_string, "avm2/class_to_string", 1), (as3_object_to_locale_string, "avm2/object_to_locale_string", 1), (as3_function_to_locale_string, "avm2/function_to_locale_string", 1), (as3_class_to_locale_string, "avm2/class_to_locale_string", 1), (as3_object_value_of, "avm2/object_value_of", 1), (as3_function_value_of, "avm2/function_value_of", 1), (as3_class_value_of, "avm2/class_value_of", 1), (as3_if_stricteq, "avm2/if_stricteq", 1), (as3_if_strictne, "avm2/if_strictne", 1), (as3_strict_equality, "avm2/strict_equality", 1), (as3_es4_interfaces, "avm2/es4_interfaces", 1), (as3_is_finite, "avm2/is_finite", 1), (as3_is_nan, "avm2/is_nan", 1), (as3_istype, "avm2/istype", 1), (as3_istypelate, "avm2/istypelate", 1), (as3_instanceof, "avm2/instanceof", 1), (as3_astype, "avm2/astype", 1), (as3_astypelate, "avm2/astypelate", 1), (as3_truthiness, "avm2/truthiness", 1), (as3_falsiness, "avm2/falsiness", 1), (as3_boolean_negation, "avm2/boolean_negation", 1), (as3_convert_boolean, "avm2/convert_boolean", 1), (as3_convert_number, "avm2/convert_number", 1), (as3_convert_integer, "avm2/convert_integer", 1), (as3_convert_uinteger, "avm2/convert_uinteger", 1), (as3_coerce_string, "avm2/coerce_string", 1), (as3_if_eq, "avm2/if_eq", 1), (as3_if_ne, "avm2/if_ne", 1), (as3_equals, "avm2/equals", 1), (as3_if_lt, "avm2/if_lt", 1), (as3_if_lte, "avm2/if_lte", 1), (as3_if_gte, "avm2/if_gte", 1), (as3_if_gt, "avm2/if_gt", 1), (as3_greaterequals, "avm2/greaterequals", 1), (as3_greaterthan, "avm2/greaterthan", 1), (as3_lessequals, "avm2/lessequals", 1), (as3_lessthan, "avm2/lessthan", 1), (nested_textfields_in_buttons, "avm1/nested_textfields_in_buttons", 1), (conflicting_instance_names, "avm1/conflicting_instance_names", 6), (button_children, "avm1/button_children", 1), (transform, "avm1/transform", 1), (target_clip_swf5, "avm1/target_clip_swf5", 2), (target_clip_swf6, "avm1/target_clip_swf6", 2), (target_path, "avm1/target_path", 1), (remove_movie_clip, "avm1/remove_movie_clip", 2), (as3_add, "avm2/add", 1), (as3_bitor, "avm2/bitor", 1), (as3_bitand, "avm2/bitand", 1), (as3_bitnot, "avm2/bitnot", 1), (as3_bitxor, "avm2/bitxor", 1), (as3_declocal, "avm2/declocal", 1), (as3_declocal_i, "avm2/declocal_i", 1), (as3_decrement, "avm2/decrement", 1), (as3_decrement_i, "avm2/decrement_i", 1), (as3_inclocal, "avm2/inclocal", 1), (as3_inclocal_i, "avm2/inclocal_i", 1), (as3_increment, "avm2/increment", 1), (as3_increment_i, "avm2/increment_i", 1), (as3_lshift, "avm2/lshift", 1), (as3_modulo, "avm2/modulo", 1), (as3_multiply, "avm2/multiply", 1), (as3_negate, "avm2/negate", 1), (as3_rshift, "avm2/rshift", 1), (as3_subtract, "avm2/subtract", 1), (as3_urshift, "avm2/urshift", 1), (as3_in, "avm2/in", 1), (as3_bytearray, "avm2/bytearray", 1), (as3_date, "avm2/date", 1), (as3_date_parse, "avm2/date_parse", 1), (as3_generate_random_bytes, "avm2/generate_random_bytes", 1), (as3_get_definition_by_name, "avm2/get_definition_by_name", 1), (as3_get_qualified_class_name, "avm2/get_qualified_class_name", 1), (as3_get_qualified_super_class_name, "avm2/get_qualified_super_class_name", 1), (as3_array_constr, "avm2/array_constr", 1), (as3_array_access, "avm2/array_access", 1), (as3_array_storage, "avm2/array_storage", 1), (as3_array_delete, "avm2/array_delete", 1), (as3_array_holes, "avm2/array_holes", 1), (as3_array_literal, "avm2/array_literal", 1), (as3_array_concat, "avm2/array_concat", 1), (as3_array_tostring, "avm2/array_tostring", 1), (as3_array_tolocalestring, "avm2/array_tolocalestring", 1), (as3_array_valueof, "avm2/array_valueof", 1), (as3_array_join, "avm2/array_join", 1), (as3_array_foreach, "avm2/array_foreach", 1), (as3_array_map, "avm2/array_map", 1), (as3_array_filter, "avm2/array_filter", 1), (as3_array_every, "avm2/array_every", 1), (as3_array_some, "avm2/array_some", 1), (as3_array_indexof, "avm2/array_indexof", 1), (as3_array_lastindexof, "avm2/array_lastindexof", 1), (as3_array_push, "avm2/array_push", 1), (as3_array_pop, "avm2/array_pop", 1), (as3_array_reverse, "avm2/array_reverse", 1), (as3_array_shift, "avm2/array_shift", 1), (as3_array_unshift, "avm2/array_unshift", 1), (as3_array_slice, "avm2/array_slice", 1), (as3_array_splice, "avm2/array_splice", 1), (as3_array_sort, "avm2/array_sort", 1), (as3_array_sorton, "avm2/array_sorton", 1), (as3_array_hasownproperty, "avm2/array_hasownproperty", 1), (as3_array_length, "avm2/array_length", 1), (stage_property_representation, "avm1/stage_property_representation", 1), (as3_timeline_scripts, "avm2/timeline_scripts", 3), (as3_movieclip_properties, "avm2/movieclip_properties", 4), (as3_movieclip_gotoandplay, "avm2/movieclip_gotoandplay", 5), (as3_movieclip_gotoandstop, "avm2/movieclip_gotoandstop", 5), (as3_movieclip_stop, "avm2/movieclip_stop", 5), (as3_movieclip_prev_frame, "avm2/movieclip_prev_frame", 5), (as3_movieclip_next_frame, "avm2/movieclip_next_frame", 5), (as3_movieclip_prev_scene, "avm2/movieclip_prev_scene", 5), (as3_movieclip_next_scene, "avm2/movieclip_next_scene", 5), (as3_framelabel_constr, "avm2/framelabel_constr", 5), (as3_movieclip_currentlabels, "avm2/movieclip_currentlabels", 5), (as3_scene_constr, "avm2/scene_constr", 5), (as3_movieclip_currentscene, "avm2/movieclip_currentscene", 5), (as3_movieclip_scenes, "avm2/movieclip_scenes", 5), (as3_movieclip_play, "avm2/movieclip_play", 5), (as3_movieclip_constr, "avm2/movieclip_constr", 1), (as3_lazyinit, "avm2/lazyinit", 1), (as3_trace, "avm2/trace", 1), (as3_displayobjectcontainer_getchildat, "avm2/displayobjectcontainer_getchildat", 1), (as3_displayobjectcontainer_getchildbyname, "avm2/displayobjectcontainer_getchildbyname", 1), (as3_displayobjectcontainer_addchild, "avm2/displayobjectcontainer_addchild", 1), (as3_displayobjectcontainer_addchildat, "avm2/displayobjectcontainer_addchildat", 1), (as3_displayobjectcontainer_removechild, "avm2/displayobjectcontainer_removechild", 1), (as3_displayobjectcontainer_removechild_timelinemanip_remove1, "avm2/displayobjectcontainer_removechild_timelinemanip_remove1", 7), (as3_displayobjectcontainer_addchild_timelinepull0, "avm2/displayobjectcontainer_addchild_timelinepull0", 7), (as3_displayobjectcontainer_addchild_timelinepull1, "avm2/displayobjectcontainer_addchild_timelinepull1", 7), (as3_displayobjectcontainer_addchild_timelinepull2, "avm2/displayobjectcontainer_addchild_timelinepull2", 7), (as3_displayobjectcontainer_addchildat_timelinelock0, "avm2/displayobjectcontainer_addchildat_timelinelock0", 7), (as3_displayobjectcontainer_addchildat_timelinelock1, "avm2/displayobjectcontainer_addchildat_timelinelock1", 7), (as3_displayobjectcontainer_addchildat_timelinelock2, "avm2/displayobjectcontainer_addchildat_timelinelock2", 7), (as3_displayobjectcontainer_contains, "avm2/displayobjectcontainer_contains", 5), (as3_displayobjectcontainer_getchildindex, "avm2/displayobjectcontainer_getchildindex", 5), (as3_displayobjectcontainer_removechildat, "avm2/displayobjectcontainer_removechildat", 1), (as3_displayobjectcontainer_removechildren, "avm2/displayobjectcontainer_removechildren", 5), (as3_displayobjectcontainer_setchildindex, "avm2/displayobjectcontainer_setchildindex", 1), (as3_displayobjectcontainer_swapchildren, "avm2/displayobjectcontainer_swapchildren", 1), (as3_displayobjectcontainer_swapchildrenat, "avm2/displayobjectcontainer_swapchildrenat", 1), (button_order, "avm1/button_order", 2), (as3_displayobjectcontainer_stopallmovieclips, "avm2/displayobjectcontainer_stopallmovieclips", 2), (as3_displayobjectcontainer_timelineinstance, "avm2/displayobjectcontainer_timelineinstance", 6), (as3_displayobject_alpha, "avm2/displayobject_alpha", 1), (as3_displayobject_x, "avm2/displayobject_x", 1), (as3_displayobject_y, "avm2/displayobject_y", 1), (as3_displayobject_name, "avm2/displayobject_name", 4), (as3_displayobject_parent, "avm2/displayobject_parent", 4), (as3_displayobject_root, "avm2/displayobject_root", 4), (as3_displayobject_visible, "avm2/displayobject_visible", 4), (as3_displayobject_hittestpoint, "avm2/displayobject_hittestpoint", 2), (as3_displayobject_hittestobject, "avm2/displayobject_hittestobject", 1), (as3_event_valueof_tostring, "avm2/event_valueof_tostring", 1), (as3_event_bubbles, "avm2/event_bubbles", 1), (as3_event_cancelable, "avm2/event_cancelable", 1), (as3_event_type, "avm2/event_type", 1), (as3_event_clone, "avm2/event_clone", 1), (as3_event_formattostring, "avm2/event_formattostring", 1), (as3_event_isdefaultprevented, "avm2/event_isdefaultprevented", 1), (as3_function_call_via_apply, "avm2/function_call_via_apply", 1), (as3_function_call_arguments, "avm2/function_call_arguments", 1), (as3_function_call_rest, "avm2/function_call_rest", 1), (as3_eventdispatcher_haseventlistener, "avm2/eventdispatcher_haseventlistener", 1), (as3_eventdispatcher_willtrigger, "avm2/eventdispatcher_willtrigger", 1), (as3_movieclip_willtrigger, "avm2/movieclip_willtrigger", 3), (as3_eventdispatcher_dispatchevent, "avm2/eventdispatcher_dispatchevent", 1), (as3_eventdispatcher_dispatchevent_handlerorder, "avm2/eventdispatcher_dispatchevent_handlerorder", 1), (as3_eventdispatcher_dispatchevent_cancel, "avm2/eventdispatcher_dispatchevent_cancel", 1), (as3_eventdispatcher_dispatchevent_this, "avm2/eventdispatcher_dispatchevent_this", 1), (as3_movieclip_dispatchevent, "avm2/movieclip_dispatchevent", 1), (as3_movieclip_dispatchevent_handlerorder, "avm2/movieclip_dispatchevent_handlerorder", 1), (as3_movieclip_dispatchevent_cancel, "avm2/movieclip_dispatchevent_cancel", 1), (as3_movieclip_dispatchevent_target, "avm2/movieclip_dispatchevent_target", 1), (as3_movieclip_dispatchevent_selfadd, "avm2/movieclip_dispatchevent_selfadd", 1), (as3_string_constr, "avm2/string_constr", 1), (as3_string_length, "avm2/string_length", 1), (as3_string_char_at, "avm2/string_char_at", 1), (as3_string_char_code_at, "avm2/string_char_code_at", 1), (as3_string_split, "avm2/string_split", 1), (as3_typeof, "avm2/typeof", 1), (use_hand_cursor, "avm1/use_hand_cursor", 1), (as3_movieclip_displayevents, "avm2/movieclip_displayevents", 9), (as3_movieclip_displayevents_timeline, "avm2/movieclip_displayevents_timeline", 5), (as3_movieclip_displayevents_looping, "avm2/movieclip_displayevents_looping", 5), (as3_movieclip_displayevents_dblhandler, "avm2/movieclip_displayevents_dblhandler", 4), (as3_regexp_constr, "avm2/regexp_constr", 1), (as3_regexp_test, "avm2/regexp_test", 1), (as3_regexp_exec, "avm2/regexp_exec", 1), (as3_point, "avm2/point", 1), (as3_edittext_default_format, "avm2/edittext_default_format", 1), (as3_edittext_html_entity, "avm2/edittext_html_entity", 1), #[ignore] (as3_edittext_html_roundtrip, "avm2/edittext_html_roundtrip", 1), (as3_edittext_newline_stripping, "avm2/edittext_newline_stripping", 1), (as3_edittext_width_height, "avm2/edittext_width_height", 1), (as3_shape_drawrect, "avm2/shape_drawrect", 1), (as3_movieclip_drawrect, "avm2/movieclip_drawrect", 1), (as3_get_timer, "avm2/get_timer", 1), (as3_op_escxattr, "avm2/op_escxattr", 1), (as3_op_escxelem, "avm2/op_escxelem", 1), (as3_op_lookupswitch, "avm2/op_lookupswitch", 1), (as3_loaderinfo_properties, "avm2/loaderinfo_properties", 2), (as3_loaderinfo_quine, "avm2/loaderinfo_quine", 2), (nan_scale, "avm1/nan_scale", 1), (as3_nan_scale, "avm2/nan_scale", 1), (as3_documentclass, "avm2/documentclass", 1), (timer_run_actions, "avm1/timer_run_actions", 1), (as3_op_coerce, "avm2/op_coerce", 1), (as3_op_coerce_x, "avm2/op_coerce_x", 1), (as3_domain_memory, "avm2/domain_memory", 1), (as3_movieclip_symbol_constr, "avm2/movieclip_symbol_constr", 1), (as3_stage_access, "avm2/stage_access", 1), (as3_stage_displayobject_properties, "avm2/stage_displayobject_properties", 1), (as3_stage_loaderinfo_properties, "avm2/stage_loaderinfo_properties", 2), (as3_stage_properties, "avm2/stage_properties", 1), (as3_closures, "avm2/closures", 1), (as3_simplebutton_structure, "avm2/simplebutton_structure", 2), (as3_simplebutton_childevents, "avm2/simplebutton_childevents", 2), (as3_simplebutton_childevents_nested, "avm2/simplebutton_childevents_nested", 2), (as3_simplebutton_constr, "avm2/simplebutton_constr", 2), (as3_simplebutton_constr_childevents, "avm2/simplebutton_constr_childevents", 2), (as3_simplebutton_childprops, "avm2/simplebutton_childprops", 1), (as3_simplebutton_childshuffle, "avm2/simplebutton_childshuffle", 1), (as3_simplebutton_constr_params, "avm2/simplebutton_constr_params", 1), (as3_place_object_replace, "avm2/place_object_replace", 2), (as3_place_object_replace_2, "avm2/place_object_replace_2", 3), (as3_function_call_default, "avm2/function_call_default", 1), (as3_function_call_types, "avm2/function_call_types", 1), (as3_function_call_coercion, "avm2/function_call_coercion", 1), (as3_istypelate_coerce, "avm2/istypelate_coerce", 1), (as3_class_cast_call, "avm2/class_cast_call", 1), (as3_class_supercalls_mismatched, "avm2/class_supercalls_mismatched", 1), (as3_symbol_class_binary_data, "avm2/symbol_class_binary_data", 1), (as3_rectangle, "avm2/rectangle", 1), (as3_font_embedded, "avm2/font_embedded", 1), (as3_font_hasglyphs, "avm2/font_hasglyphs", 1), (as3_simplebutton_symbolclass, "avm2/simplebutton_symbolclass", 3), (as3_vector_int_access, "avm2/vector_int_access", 1), (as3_vector_int_delete, "avm2/vector_int_delete", 1), (as3_vector_holes, "avm2/vector_holes", 1), (as3_vector_coercion, "avm2/vector_coercion", 1), (as3_vector_concat, "avm2/vector_concat", 1), (as3_vector_join, "avm2/vector_join", 1), (as3_vector_every, "avm2/vector_every", 1), (as3_vector_filter, "avm2/vector_filter", 1), (as3_vector_indexof, "avm2/vector_indexof", 1), (as3_vector_lastindexof, "avm2/vector_lastindexof", 1), (as3_vector_map, "avm2/vector_map", 1), (as3_vector_pushpop, "avm2/vector_pushpop", 1), (as3_vector_shiftunshift, "avm2/vector_shiftunshift", 1), (as3_vector_insertat, "avm2/vector_insertat", 1), (as3_vector_removeat, "avm2/vector_removeat", 1), (as3_vector_reverse, "avm2/vector_reverse", 1), (as3_vector_slice, "avm2/vector_slice", 1), (as3_vector_sort, "avm2/vector_sort", 1), (as3_vector_splice, "avm2/vector_splice", 1), (as3_vector_tostring, "avm2/vector_tostring", 1), (as3_vector_constr, "avm2/vector_constr", 1), (as3_vector_legacy, "avm2/vector_legacy", 1), (as3_sound_valueof, "avm2/sound_valueof", 1), (as3_sound_embeddedprops, "avm2/sound_embeddedprops", 1), (as3_soundtransform, "avm2/soundtransform", 1), (as3_movieclip_soundtransform, "avm2/movieclip_soundtransform", 49), (as3_simplebutton_soundtransform, "avm2/simplebutton_soundtransform", 49), (as3_soundmixer_soundtransform, "avm2/soundmixer_soundtransform", 49), (as3_sound_play, "avm2/sound_play", 1), #[ignore] (as3_soundchannel_position, "avm2/soundchannel_position", 75), (as3_soundchannel_soundtransform, "avm2/soundchannel_soundtransform", 49), (as3_soundchannel_stop, "avm2/soundchannel_stop", 4), (as3_soundmixer_stopall, "avm2/soundmixer_stopall", 4), #[ignore] (as3_soundchannel_soundcomplete, "avm2/soundchannel_soundcomplete", 25), (as3_soundmixer_buffertime, "avm2/soundmixer_buffertime", 1), (as3_bitmap_timeline, "avm2/bitmap_timeline", 1), #[ignore] (as3_bitmapdata_embedded, "avm2/bitmapdata_embedded", 1), (as3_bitmapdata_constr, "avm2/bitmapdata_constr", 1), (as3_bitmap_constr, "avm2/bitmap_constr", 1), #[ignore] (as3_bitmap_properties, "avm2/bitmap_properties", 1), (as3_string_concat_fromcharcode, "avm2/string_concat_fromcharcode", 1), (as3_string_indexof_lastindexof, "avm2/string_indexof_lastindexof", 1), (as3_string_match, "avm2/string_match", 1), (as3_string_slice_substr_substring, "avm2/string_slice_substr_substring", 1), (as3_class_is, "avm2/class_is", 1), } // TODO: These tests have some inaccuracies currently, so we use approx_eq to test that numeric values are close enough. // Eventually we can hopefully make some of these match exactly (see #193). // Some will probably always need to be approx. (if they rely on trig functions, etc.) swf_tests_approx! { (local_to_global, "avm1/local_to_global", 1, epsilon = 0.051), (stage_object_properties, "avm1/stage_object_properties", 6, epsilon = 0.051), (stage_object_properties_swf6, "avm1/stage_object_properties_swf6", 4, epsilon = 0.051), (movieclip_getbounds, "avm1/movieclip_getbounds", 1, epsilon = 0.051), (parse_float, "avm1/parse_float", 1, max_relative = 5.0 * f64::EPSILON), (edittext_letter_spacing, "avm1/edittext_letter_spacing", 1, epsilon = 15.0), // TODO: Discrepancy in wrapping in letterSpacing = 0.1 test. (edittext_align, "avm1/edittext_align", 1, epsilon = 3.0), (edittext_autosize, "avm1/edittext_autosize", 1, epsilon = 4.0), // TODO Flash has _width higher by 4.0, probably padding logic mistake (edittext_margins, "avm1/edittext_margins", 1, epsilon = 5.0), // TODO: Discrepancy in wrapping. (edittext_tab_stops, "avm1/edittext_tab_stops", 1, epsilon = 5.0), (edittext_bullet, "avm1/edittext_bullet", 1, epsilon = 3.0), (edittext_underline, "avm1/edittext_underline", 1, epsilon = 4.0), (edittext_hscroll, "avm1/edittext_hscroll", 1, epsilon = 3.0), (as3_coerce_string_precision, "avm2/coerce_string_precision", 1, max_relative = 30.0 * f64::EPSILON), (as3_divide, "avm2/divide", 1, epsilon = 0.0), // TODO: Discrepancy in float formatting. (as3_math, "avm2/math", 1, max_relative = 30.0 * f64::EPSILON), (as3_displayobject_height, "avm2/displayobject_height", 7, epsilon = 0.06), // TODO: height/width appears to be off by 1 twip sometimes (as3_displayobject_width, "avm2/displayobject_width", 7, epsilon = 0.06), (as3_displayobject_rotation, "avm2/displayobject_rotation", 1, epsilon = 0.0000000001), (as3_edittext_align, "avm2/edittext_align", 1, epsilon = 3.0), (as3_edittext_autosize, "avm2/edittext_autosize", 1, epsilon = 5.0), // TODO AS3 has _width higher by 5.0, probably padding logic mistake (as3_edittext_bullet, "avm2/edittext_bullet", 1, epsilon = 3.0), (as3_edittext_letter_spacing, "avm2/edittext_letter_spacing", 1, epsilon = 15.0), // TODO: Discrepancy in wrapping in letterSpacing = 0.1 test. (as3_edittext_margins, "avm2/edittext_margins", 1, epsilon = 5.0), // TODO: Discrepancy in wrapping. (as3_edittext_tab_stops, "avm2/edittext_tab_stops", 1, epsilon = 5.0), (as3_edittext_underline, "avm2/edittext_underline", 1, epsilon = 4.0), (as3_edittext_leading, "avm2/edittext_leading", 1, epsilon = 0.3), (as3_edittext_font_size, "avm2/edittext_font_size", 1, epsilon = 0.1), } #[test] fn external_interface_avm1() -> Result<(), Error> { set_logger(); test_swf_with_hooks( "tests/swfs/avm1/external_interface/test.swf", 1, "tests/swfs/avm1/external_interface/output.txt", |player| { player .lock() .unwrap() .add_external_interface(Box::new(ExternalInterfaceTestProvider::new())); Ok(()) }, |player| { let mut player_locked = player.lock().unwrap(); let parroted = player_locked.call_internal_interface("parrot", vec!["Hello World!".into()]); player_locked.log_backend().avm_trace(&format!( "After calling `parrot` with a string: {:?}", parroted )); let mut nested = BTreeMap::new(); nested.insert( "list".to_string(), vec![ "string".into(), 100.into(), false.into(), ExternalValue::Object(BTreeMap::new()), ] .into(), ); let mut root = BTreeMap::new(); root.insert("number".to_string(), (-500.1).into()); root.insert("string".to_string(), "A string!".into()); root.insert("true".to_string(), true.into()); root.insert("false".to_string(), false.into()); root.insert("null".to_string(), ExternalValue::Null); root.insert("nested".to_string(), nested.into()); let result = player_locked .call_internal_interface("callWith", vec!["trace".into(), root.into()]); player_locked.log_backend().avm_trace(&format!( "After calling `callWith` with a complex payload: {:?}", result )); Ok(()) }, false, ) } #[test] fn shared_object_avm1() -> Result<(), Error> { set_logger(); // Test SharedObject persistence. Run an SWF that saves data // to a shared object twice and verify that the data is saved. let mut memory_storage_backend: Box<dyn StorageBackend> = Box::new(MemoryStorageBackend::default()); // Initial run; no shared object data. test_swf_with_hooks( "tests/swfs/avm1/shared_object/test.swf", 1, "tests/swfs/avm1/shared_object/output1.txt", |_player| Ok(()), |player| { // Save the storage backend for next run. let mut player = player.lock().unwrap(); std::mem::swap(player.storage_mut(), &mut memory_storage_backend); Ok(()) }, false, )?; // Verify that the flash cookie matches the expected one let expected = std::fs::read("tests/swfs/avm1/shared_object/RuffleTest.sol")?; assert_eq!( expected, memory_storage_backend .get("localhost//RuffleTest") .unwrap_or_default() ); // Re-run the SWF, verifying that the shared object persists. test_swf_with_hooks( "tests/swfs/avm1/shared_object/test.swf", 1, "tests/swfs/avm1/shared_object/output2.txt", |player| { // Swap in the previous storage backend. let mut player = player.lock().unwrap(); std::mem::swap(player.storage_mut(), &mut memory_storage_backend); Ok(()) }, |_player| Ok(()), false, )?; Ok(()) } #[test] fn timeout_avm1() -> Result<(), Error> { set_logger(); test_swf_with_hooks( "tests/swfs/avm1/timeout/test.swf", 1, "tests/swfs/avm1/timeout/output.txt", |player| { player .lock() .unwrap() .set_max_execution_duration(Duration::from_secs(5)); Ok(()) }, |_| Ok(()), false, ) } #[test] fn stage_scale_mode() -> Result<(), Error> { set_logger(); test_swf_with_hooks( "tests/swfs/avm1/stage_scale_mode/test.swf", 1, "tests/swfs/avm1/stage_scale_mode/output.txt", |player| { // Simulate a large viewport to test stage size. player .lock() .unwrap() .set_viewport_dimensions(900, 900, 1.0); Ok(()) }, |_| Ok(()), false, ) } /// Wrapper around string slice that makes debug output `{:?}` to print string same way as `{}`. /// Used in different `assert*!` macros in combination with `pretty_assertions` crate to make /// test failures to show nice diffs. /// Courtesy of https://github.com/colin-kiegel/rust-pretty-assertions/issues/24 #[derive(PartialEq, Eq)] #[doc(hidden)] pub struct PrettyString<'a>(pub &'a str); /// Make diff to display string as multi-line string impl<'a> std::fmt::Debug for PrettyString<'a> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { f.write_str(self.0) } } macro_rules! assert_eq { ($left:expr, $right:expr) => { pretty_assertions::assert_eq!(PrettyString($left.as_ref()), PrettyString($right.as_ref())); }; ($left:expr, $right:expr, $message:expr) => { pretty_assertions::assert_eq!( PrettyString($left.as_ref()), PrettyString($right.as_ref()), $message ); }; } /// Loads an SWF and runs it through the Ruffle core for a number of frames. /// Tests that the trace output matches the given expected output. fn test_swf( swf_path: &str, num_frames: u32, expected_output_path: &str, check_img: bool, ) -> Result<(), Error> { test_swf_with_hooks( swf_path, num_frames, expected_output_path, |_| Ok(()), |_| Ok(()), check_img, ) } /// Loads an SWF and runs it through the Ruffle core for a number of frames. /// Tests that the trace output matches the given expected output. fn test_swf_with_hooks( swf_path: &str, num_frames: u32, expected_output_path: &str, before_start: impl FnOnce(Arc<Mutex<Player>>) -> Result<(), Error>, before_end: impl FnOnce(Arc<Mutex<Player>>) -> Result<(), Error>, check_img: bool, ) -> Result<(), Error> { let mut expected_output = std::fs::read_to_string(expected_output_path)?.replace("\r\n", "\n"); // Strip a trailing newline if it has one. if expected_output.ends_with('\n') { expected_output = expected_output[0..expected_output.len() - "\n".len()].to_string(); } let trace_log = run_swf(swf_path, num_frames, before_start, before_end, check_img)?; assert_eq!( trace_log, expected_output, "ruffle output != flash player output" ); Ok(()) } /// Loads an SWF and runs it through the Ruffle core for a number of frames. /// Tests that the trace output matches the given expected output. /// If a line has a floating point value, it will be compared approxinmately using the given epsilon. fn test_swf_approx( swf_path: &str, num_frames: u32, expected_output_path: &str, approx_assert_fn: impl Fn(f64, f64), ) -> Result<(), Error> { let trace_log = run_swf(swf_path, num_frames, |_| Ok(()), |_| Ok(()), false)?; let mut expected_data = std::fs::read_to_string(expected_output_path)?; // Strip a trailing newline if it has one. if expected_data.ends_with('\n') { expected_data = expected_data[0..expected_data.len() - "\n".len()].to_string(); } std::assert_eq!( trace_log.lines().count(), expected_data.lines().count(), "# of lines of output didn't match" ); for (actual, expected) in trace_log.lines().zip(expected_data.lines()) { // If these are numbers, compare using approx_eq. if let (Ok(actual), Ok(expected)) = (actual.parse::<f64>(), expected.parse::<f64>()) { // NaNs should be able to pass in an approx test. if actual.is_nan() && expected.is_nan() { continue; } // TODO: Lower this epsilon as the accuracy of the properties improves. // if let Some(relative_epsilon) = relative_epsilon { // assert_relative_eq!( // actual, // expected, // epsilon = absolute_epsilon, // max_relative = relative_epsilon // ); // } else { // assert_abs_diff_eq!(actual, expected, epsilon = absolute_epsilon); // } approx_assert_fn(actual, expected); } else { assert_eq!(actual, expected); } } Ok(()) } /// Loads an SWF and runs it through the Ruffle core for a number of frames. /// Tests that the trace output matches the given expected output. fn run_swf( swf_path: &str, num_frames: u32, before_start: impl FnOnce(Arc<Mutex<Player>>) -> Result<(), Error>, before_end: impl FnOnce(Arc<Mutex<Player>>) -> Result<(), Error>, mut check_img: bool, ) -> Result<String, Error> { check_img &= RUN_IMG_TESTS; let base_path = Path::new(swf_path).parent().unwrap(); let (mut executor, channel) = NullExecutor::new(); let movie = SwfMovie::from_path(swf_path, None)?; let frame_time = 1000.0 / movie.frame_rate().to_f64(); let trace_output = Rc::new(RefCell::new(Vec::new())); let mut platform_id = None; let backend_bit = wgpu::BackendBit::PRIMARY; let (render_backend, video_backend): (Box<dyn RenderBackend>, Box<dyn VideoBackend>) = if check_img { let instance = wgpu::Instance::new(backend_bit); let descriptors = WgpuRenderBackend::<TextureTarget>::build_descriptors( backend_bit, instance, None, Default::default(), None, )?; platform_id = Some(get_img_platform_suffix(&descriptors.info)); let target = TextureTarget::new( &descriptors.device, ( movie.width().to_pixels() as u32, movie.height().to_pixels() as u32, ), ); let render_backend = Box::new(WgpuRenderBackend::new(descriptors, target)?); let video_backend = Box::new(SoftwareVideoBackend::new()); (render_backend, video_backend) } else { (Box::new(NullRenderer), Box::new(NullVideoBackend::new())) }; let player = Player::new( render_backend, Box::new(NullAudioBackend::new()), Box::new(NullNavigatorBackend::with_base_path(base_path, channel)), Box::new(MemoryStorageBackend::default()), Box::new(NullLocaleBackend::new()), video_backend, Box::new(TestLogBackend::new(trace_output.clone())), Box::new(NullUiBackend::new()), )?; player.lock().unwrap().set_root_movie(Arc::new(movie)); player .lock() .unwrap() .set_max_execution_duration(Duration::from_secs(300)); before_start(player.clone())?; for _ in 0..num_frames { player.lock().unwrap().run_frame(); player.lock().unwrap().update_timers(frame_time); executor.poll_all().unwrap(); } // Render the image to disk // FIXME: Determine how we want to compare against on on-disk image if check_img { player.lock().unwrap().render(); let mut player_lock = player.lock().unwrap(); let renderer = player_lock .renderer_mut() .downcast_mut::<WgpuRenderBackend<TextureTarget>>() .unwrap(); let target = renderer.target(); let image = target .capture(renderer.device()) .expect("Failed to capture image"); // The swf path ends in '<swf_name>/test.swf' - extract `swf_name` let mut swf_path_buf = PathBuf::from(swf_path); swf_path_buf.pop(); let swf_name = swf_path_buf.file_name().unwrap().to_string_lossy(); let img_name = format!("{}-{}.png", swf_name, platform_id.unwrap()); let mut img_path = swf_path_buf.clone(); img_path.push(&img_name); let result = match image::open(&img_path) { Ok(existing_img) => { if existing_img .as_rgba8() .expect("Expected 8-bit RGBA image") .as_raw() == image.as_raw() { Ok(()) } else { Err(format!( "Test output does not match existing image `{:?}`", img_path )) } } Err(err) => Err(format!( "Error occured when trying to read existing image `{:?}`: {}", img_path, err )), }; if let Err(err) = result { let new_img_path = img_path.with_file_name(img_name + ".updated"); image.save_with_format(&new_img_path, image::ImageFormat::Png)?; panic!( "Image test failed - saved new image to `{:?}`\n{}", new_img_path, err ); } } before_end(player)?; executor.block_all().unwrap(); let trace = trace_output.borrow().join("\n"); Ok(trace) } struct TestLogBackend { trace_output: Rc<RefCell<Vec<String>>>, } impl TestLogBackend { pub fn new(trace_output: Rc<RefCell<Vec<String>>>) -> Self { Self { trace_output } } } impl LogBackend for TestLogBackend { fn avm_trace(&self, message: &str) { self.trace_output.borrow_mut().push(message.to_string()); } } #[derive(Default)] pub struct ExternalInterfaceTestProvider {} impl ExternalInterfaceTestProvider { pub fn new() -> Self { Default::default() } } fn do_trace(context: &mut UpdateContext<'_, '_, '_>, args: &[ExternalValue]) -> ExternalValue { context .log .avm_trace(&format!("[ExternalInterface] trace: {:?}", args)); "Traced!".into() } fn do_ping(context: &mut UpdateContext<'_, '_, '_>, _args: &[ExternalValue]) -> ExternalValue { context.log.avm_trace("[ExternalInterface] ping"); "Pong!".into() } fn do_reentry(context: &mut UpdateContext<'_, '_, '_>, _args: &[ExternalValue]) -> ExternalValue { context .log .avm_trace("[ExternalInterface] starting reentry"); if let Some(callback) = context.external_interface.get_callback("callWith") { callback.call( context, "callWith", vec!["trace".into(), "successful reentry!".into()], ) } else { ExternalValue::Null } } impl ExternalInterfaceProvider for ExternalInterfaceTestProvider { fn get_method(&self, name: &str) -> Option<Box<dyn ExternalInterfaceMethod>> { match name { "trace" => Some(Box::new(do_trace)), "ping" => Some(Box::new(do_ping)), "reentry" => Some(Box::new(do_reentry)), _ => None, } } fn on_callback_available(&self, _name: &str) {} fn on_fs_command(&self, _command: &str, _args: &str) -> bool { false } }
45.328583
147
0.675703
14c66a9f59742b9921efd9c02574e114ec02fee8
28,347
use crate::batch_storage::{BatchStorage, BinaryBatch, BatchFactory}; use crate::chained_error::ChainedError; use crate::batch_records::{RecordsBuilder, RecordsBuilderFactory}; use std::time::{Duration, SystemTime}; use std::{io, thread, mem}; use crate::batch_sender::BatchSender; use std::ops::Deref; use std::thread::JoinHandle; use std::marker::PhantomData; use std::sync::{Arc, Mutex, MutexGuard}; use std::io::{Error, ErrorKind}; use log::*; const DEFAULT_MAX_BATCH_RECORDS: u32 = 10000; const DEFAULT_MAX_BATCH_BYTES: usize = 1024 * 1024; pub trait Batcher<T> { fn start(&self) -> bool; fn stop(self) -> io::Result<()>; fn hard_stop(self) -> io::Result<()>; fn soft_stop(self)-> io::Result<()>; fn is_stopped(&self) -> bool; fn put(&self, record: T) -> io::Result<()>; fn put_all(&self, records: impl Iterator<Item=T>) -> io::Result<()>; fn flush(&self) -> io::Result<()>; fn flush_if_needed(&self) -> io::Result<bool>; } pub struct BatcherSharedState<T, Records, Builder: RecordsBuilder<T, Records>> { stopped: bool, hard_stop: bool, soft_stop: bool, last_flush_time: SystemTime, records_builder: Builder, upload_thread: Option<JoinHandle<()>>, last_upload_result: Arc<io::Result<()>>, phantom_t: PhantomData<T>, phantom_r: PhantomData<Records>, } #[derive(Clone)] pub struct BatcherImpl<T: Clone + Send + 'static, Records: Clone + Send + 'static, Builder, BuilderFactory, Batch, Factory, Storage, Sender> where Builder: RecordsBuilder<T, Records>, BuilderFactory: RecordsBuilderFactory<T, Records, Builder>, Batch: Deref<Target=BinaryBatch> + Clone + Send + 'static, Factory: BatchFactory<Records>, Storage: BatchStorage<Batch>, Sender: BatchSender { builder_factory: BuilderFactory, batch_factory: Factory, batch_storage: Storage, batch_sender: Sender, pub retry_batch_upload: bool, pub clock: fn() -> SystemTime, pub max_batch_records: u32, pub max_batch_bytes: usize, pub flush_period: Duration, pub read_retry_timeout: Duration, pub failed_upload_timeout: Duration, pub shared_state: Arc<Mutex<BatcherSharedState<T, Records, Builder>>>, phantom_t: PhantomData<T>, phantom_r: PhantomData<Records>, phantom_b: PhantomData<Batch>, } impl<T: Clone + Send + 'static, Records: Clone + Send + 'static, Builder, BuilderFactory, Batch, Factory, Storage, Sender> BatcherImpl<T, Records, Builder, BuilderFactory, Batch, Factory, Storage, Sender> where Builder: RecordsBuilder<T, Records>, BuilderFactory: RecordsBuilderFactory<T, Records, Builder>, Batch: Deref<Target=BinaryBatch> + Clone + Send + 'static, Factory: BatchFactory<Records>, Storage: BatchStorage<Batch>, Sender: BatchSender { pub fn new(builder_factory: BuilderFactory, batch_factory: Factory, batch_storage: Storage, batch_sender: Sender) -> Self { let records_builder = builder_factory.create_builder(); BatcherImpl { builder_factory, batch_factory, batch_storage, batch_sender, retry_batch_upload: true, clock: || SystemTime::now(), max_batch_records: DEFAULT_MAX_BATCH_RECORDS, max_batch_bytes: DEFAULT_MAX_BATCH_BYTES, flush_period: Duration::from_secs(60), read_retry_timeout: Duration::from_secs(3), failed_upload_timeout: Duration::from_secs(1), shared_state: Arc::new(Mutex::new(BatcherSharedState { stopped: true, hard_stop: false, soft_stop: false, last_flush_time: SystemTime::UNIX_EPOCH, records_builder, upload_thread: None, last_upload_result: Arc::new(Ok(())), phantom_t: PhantomData, phantom_r: PhantomData, })), phantom_t: PhantomData, phantom_r: PhantomData, phantom_b: PhantomData, } } fn upload(self) { info!("Upload starting..."); let mut uploaded_batch_counter = 0; let mut all_batch_counter = 0; let mut send_batches_bytes = 0; 'outer: loop { let batch_read_start = (self.clock)(); let batch_result = self.batch_storage.get(); if let Err(e) = batch_result { if e.kind() == ErrorKind::Interrupted { if self.should_interrupt(self.shared_state.lock().unwrap()) { debug!("The upload thread has been interrupted"); break; } else { continue; } } else { error!("Error while reading batch: {}", e); thread::sleep(self.read_retry_timeout); continue; } } all_batch_counter += 1; let batch = batch_result.unwrap(); trace!("{} read time: {:?}", *batch, self.since(batch_read_start)); loop { let batch_upload_start = (self.clock)(); let send_result = self.batch_sender.send_batch(&batch.bytes); if let Err(e) = send_result { error!("Unexpected exception while sending the {}: {}", *batch, e); self.shared_state.lock().unwrap().last_upload_result = Arc::new(Err(e)); self.stop().unwrap(); return; } trace!("{} sending time: {:?}", *batch, self.since(batch_upload_start)); let success = if let Ok(None) = send_result { trace!("{} successfully uploaded", *batch); uploaded_batch_counter += 1; send_batches_bytes += batch.bytes.len(); self.try_remove(&batch); true } else { warn!("Error while sending {}: {}", *batch, send_result.unwrap().unwrap()); thread::sleep(self.failed_upload_timeout); if !self.retry_batch_upload { self.try_remove(&batch); } false }; let mutex_guard = self.shared_state.lock().unwrap(); if mutex_guard.stopped && self.should_interrupt(mutex_guard) { debug!("Upload stopped"); break 'outer; } if success || !self.retry_batch_upload { break; } debug!("Retrying the {}", *batch); } } info!("{} from {} batches uploaded. Send {} bytes", uploaded_batch_counter, all_batch_counter, send_batches_bytes); } fn since(&self, since: SystemTime) -> Duration { (self.clock)().duration_since(since).unwrap_or(Duration::from_secs(0)) } fn try_remove(&self, batch: &Batch) { if let Err(e) = self.batch_storage.remove() { error!("Error while removing uploaded {}: {}", **batch, e); } } fn should_interrupt(&self, mutex_guard: MutexGuard<BatcherSharedState<T, Records, Builder>>) -> bool { mutex_guard.hard_stop || (self.batch_storage.is_persistent() && !mutex_guard.soft_stop) || self.batch_storage.is_empty() } fn stop_inner(&self, hard: bool, soft: bool) -> io::Result<()> { info!("Stop with hard: {} and soft: {}", hard, soft); { let mut mutex_guard = self.shared_state.lock().unwrap(); if mutex_guard.stopped { return Ok(()); } mutex_guard.stopped = true; mutex_guard.hard_stop = hard; mutex_guard.soft_stop = soft; self.flush_inner(&mut mutex_guard)?; self.batch_storage.shutdown(); mutex_guard.upload_thread.take() } .map(|upload_thread| upload_thread.join()) .map(|r| r.map_err(|e| if let Ok(error) = e.downcast::<Error>() { Error::new(ErrorKind::Other, ChainedError::new("The upload thread panicked with the reason", error)) } else { Error::new(ErrorKind::Other, "The upload thread panicked with unknown reason") })) .unwrap_or(Ok(())) } fn check_state(&self, mutex_guard: &MutexGuard<BatcherSharedState<T, Records, Builder>>) -> io::Result<()> { if mutex_guard.stopped { return Err(Error::new(ErrorKind::Interrupted, ChainedError::arc_result( "The batcher has been shut down", mutex_guard.last_upload_result.clone()))) } Ok(()) } fn need_flush(&self, mutex_guard: &MutexGuard<BatcherSharedState<T, Records, Builder>>) -> bool { (self.max_batch_bytes > 0 && mutex_guard.records_builder.size() >= self.max_batch_bytes) || (self.max_batch_records > 0 && mutex_guard.records_builder.len() >= self.max_batch_records) || self.since(mutex_guard.last_flush_time) >= self.flush_period } fn flush_inner(&self, mutex_guard: &mut MutexGuard<BatcherSharedState<T, Records, Builder>>) -> io::Result<()> { let records_builder = mem::replace(&mut mutex_guard.records_builder, self.builder_factory.create_builder()); let len = records_builder.len(); let size = records_builder.size(); if len == 0 { debug!("Flushing nothing"); return Ok(()); } trace!("Flushing {} bytes in {} records", size, len); let result = self.batch_storage.store(records_builder.build(), &self.batch_factory); if result.is_ok() { debug!("Flushing completed for {} bytes in {} records", size, len); mutex_guard.last_flush_time = (self.clock)(); } else { warn!("Flushing failed for {} bytes in {} records with error: {}", size, len, result.as_ref().unwrap_err()); } result } fn put_inner(&self, record: T, mutex_guard: &mut MutexGuard<BatcherSharedState<T, Records, Builder>>) -> io::Result<()> { if self.need_flush(mutex_guard) { self.flush_inner(mutex_guard)? } mutex_guard.records_builder.add(record); Ok(()) } } impl<T: Clone + Send + 'static, Records: Clone + Send + 'static, Builder, BuilderFactory, Batch, Factory, Storage, Sender> Batcher<T> for BatcherImpl<T, Records, Builder, BuilderFactory, Batch, Factory, Storage, Sender> where Builder: RecordsBuilder<T, Records>, BuilderFactory: RecordsBuilderFactory<T, Records, Builder>, Batch: Deref<Target=BinaryBatch> + Clone + Send + 'static, Factory: BatchFactory<Records>, Storage: BatchStorage<Batch>, Sender: BatchSender { fn start(&self) -> bool { let mut mutex_guard = self.shared_state.lock().unwrap(); if mutex_guard.upload_thread.is_some() { return false; } mutex_guard.stopped = false; mutex_guard.last_flush_time = (self.clock)(); let cloned_batcher = self.clone(); mutex_guard.upload_thread = Some(thread::Builder::new().name("batcher-upload".to_string()).spawn(move || { cloned_batcher.upload(); }).unwrap()); true } fn stop(self) -> io::Result<()> { self.stop_inner(false, false) } fn hard_stop(self) -> io::Result<()> { self.stop_inner(true, false) } fn soft_stop(self) -> io::Result<()> { self.stop_inner(false, true) } fn is_stopped(&self) -> bool { self.shared_state.lock().unwrap().stopped } fn put(&self, record: T) -> Result<(), Error> { let mut mutex_guard = self.shared_state.lock().unwrap(); self.check_state(&mutex_guard)?; self.put_inner(record, &mut mutex_guard) } fn put_all(&self, records: impl Iterator<Item=T>) -> Result<(), Error> { let mut mutex_guard = self.shared_state.lock().unwrap(); self.check_state(&mutex_guard)?; for record in records { self.put_inner(record, &mut mutex_guard)? } Ok(()) } fn flush(&self) -> Result<(), Error> { self.flush_inner(&mut self.shared_state.lock().unwrap()) } fn flush_if_needed(&self) -> Result<bool, Error> { let mut mutex_guard = self.shared_state.lock().unwrap(); self.check_state(&mutex_guard)?; if self.need_flush(&mutex_guard) { self.flush_inner(&mut mutex_guard)?; return Ok(true) } return Ok(false) } } #[cfg(test)] mod test { use crate::batch_sender::BatchSender; use std::io::{Error, ErrorKind}; use crate::batcher::{BatcherImpl, Batcher}; use crate::batch_storage::{GzippedJsonDisplayBatchFactory, BinaryBatch, BatchStorage, BatchFactory}; use crate::memory_storage::{MemoryStorage, NonBlockingMemoryStorage}; use crate::batch_records::{RECORDS_BUILDER_FACTORY, JsonArrayRecordsBuilder, JsonArrayRecordsBuilderFactory}; use std::{thread, io}; use std::time::{Duration, SystemTime}; use std::sync::{Once, Arc, Mutex}; use env_logger::{Builder, Env}; use miniz_oxide::inflate::decompress_to_vec; use std::sync::atomic::{AtomicBool, Ordering}; use std::collections::VecDeque; #[derive(Clone)] struct MockBatchSender { batches: Arc<Mutex<Vec<Vec<u8>>>>, result_supplier: Arc<dyn Fn() -> io::Result<Option<Error>> + Send + Sync> } impl MockBatchSender { fn new() -> MockBatchSender { MockBatchSender::with_result(Arc::new(|| Ok(None))) } fn with_result(result_supplier: Arc<dyn Fn() -> io::Result<Option<Error>> + Send + Sync>) -> MockBatchSender { MockBatchSender { batches: Arc::new(Mutex::new(Vec::new())), result_supplier } } } impl BatchSender for MockBatchSender { fn send_batch(&self, batch: &[u8]) -> io::Result<Option<Error>> { self.batches.lock().unwrap().push(batch.to_owned()); thread::sleep(Duration::from_millis(1)); (self.result_supplier)() } } #[derive(Clone)] struct NothingBatchSender(Arc<AtomicBool>); impl NothingBatchSender { fn new() -> NothingBatchSender { NothingBatchSender(Arc::new(AtomicBool::new(false))) } } impl BatchSender for NothingBatchSender { fn send_batch(&self, batch: &[u8]) -> Result<Option<Error>, Error> { while !self.0.load(Ordering::Relaxed) { thread::yield_now(); } Ok(None) } } #[derive(Clone, Debug)] pub struct PersistentMemoryStorage(MemoryStorage); impl<'a> BatchStorage<Arc<BinaryBatch>> for PersistentMemoryStorage { fn store<T>(&self, records: T, batch_factory: &impl BatchFactory<T>) -> io::Result<()> { self.0.store(records, batch_factory) } fn get(&self) -> io::Result<Arc<BinaryBatch>> { self.0.get() } fn remove(&self) -> io::Result<()> { self.0.remove() } fn is_persistent(&self) -> bool { true } fn is_empty(&self) -> bool { self.0.is_empty() } fn shutdown(&self) { self.0.shutdown() } } const BATCH1: &str = r#"{"serverId":s1,"batchId":1,"batch":[test1]}"#; const BATCH2: &str = r#"{"serverId":s1,"batchId":2,"batch":[test2]}"#; static INIT: Once = Once::new(); fn init() { INIT.call_once(|| { Builder::from_env(Env::default().default_filter_or("trace")).is_test(true) .format_timestamp_millis() .init(); }); } fn memory_storage() -> MemoryStorage { let mut storage = MemoryStorage::new(); storage.0.clock = || 1; storage } #[test] fn do_not_start() { let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), MemoryStorage::new(), MockBatchSender::new()); let result = batcher.put("test1"); assert!(result.is_err()); let error = result.unwrap_err(); assert_eq!(error.kind(), ErrorKind::Interrupted); assert_eq!(error.get_ref().unwrap().to_string(), "The batcher has been shut down"); } #[test] fn test_drop() { init(); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), MemoryStorage::new(), MockBatchSender::new()); assert!(batcher.start()); drop(batcher); } #[test] fn send_manually() { init(); let batch_sender = MockBatchSender::new(); let memory_storage = memory_storage(); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage, batch_sender.clone()); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); thread::sleep(Duration::from_millis(1)); batcher.hard_stop().unwrap(); let batches_guard = batch_sender.batches.lock().unwrap(); assert_eq!(batches_guard.len(), 1); validate_batch(&batches_guard[0], BATCH1); } #[test] fn store_by_batch_records() { validate_stored_by(|batcher| batcher.max_batch_records = 1); } #[test] fn store_by_batch_bytes() { validate_stored_by(|batcher| batcher.max_batch_bytes = 1); } #[test] fn store_by_time() { init(); let batch_sender = NothingBatchSender::new(); let memory_storage = memory_storage(); let mut batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); batcher.flush_period = Duration::from_secs(1); batcher.clock = || SystemTime::UNIX_EPOCH; assert!(batcher.start()); batcher.put("test1").unwrap(); validate_batches_queue0(&memory_storage); batcher.clock = || SystemTime::UNIX_EPOCH + Duration::from_secs(1); batcher.put("test2").unwrap(); validate_batches_queue1(&memory_storage); let cloned_batcher = batcher.clone(); let stop_thread = thread::spawn(move || { cloned_batcher.hard_stop().unwrap() }); thread::sleep(Duration::from_millis(40)); validate_batches_queue2(&memory_storage); batch_sender.0.store(true, Ordering::Relaxed); stop_thread.join().unwrap(); } #[test] fn store_discarded() { init(); let batch_sender = NothingBatchSender::new(); let memory_storage = NonBlockingMemoryStorage::with_max_bytes(0); let mut batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender); batcher.max_batch_records = 1; assert!(batcher.start()); batcher.put("test1").unwrap(); let result = batcher.put("test2"); assert!(result.unwrap_err().to_string().starts_with("Storage capacity exceeded")); assert!(memory_storage.is_empty()); } #[test] fn sender_success() { init(); let batch_sender = MockBatchSender::new(); let memory_storage = memory_storage(); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); thread::sleep(Duration::from_millis(40)); let batches_guard = batch_sender.batches.lock().unwrap(); assert_eq!(batches_guard.len(), 1); validate_batch(&batches_guard[0], BATCH1); batcher.stop().unwrap(); validate_batches_queue0(&memory_storage); } #[test] fn sender_success_several_batches() { init(); let batch_sender = MockBatchSender::new(); let memory_storage = memory_storage(); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); batcher.put("test2").unwrap(); batcher.stop().unwrap(); let batches_guard = batch_sender.batches.lock().unwrap(); assert_eq!(batches_guard.len(), 2); validate_batch(&batches_guard[0], BATCH1); validate_batch(&batches_guard[1], BATCH2); validate_batches_queue0(&memory_storage); } #[test] fn persistent_storage_and_stop() { init(); let batch_sender = MockBatchSender::new(); let persistent_memory_storage = PersistentMemoryStorage(memory_storage()); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), persistent_memory_storage.clone(), batch_sender.clone()); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); batcher.put("test2").unwrap(); batcher.stop().unwrap(); let batches_guard = batch_sender.batches.lock().unwrap(); assert_eq!(batches_guard.len(), 1); validate_batch(&batches_guard[0], BATCH1); validate_batches_queue(&persistent_memory_storage.0, |batches| { assert_eq!(batches.len(), 1); validate_batch(&batches[0].bytes, BATCH2); }); } #[test] fn persistent_storage_and_soft_stop() { init(); let batch_sender = MockBatchSender::new(); let persistent_memory_storage = PersistentMemoryStorage(memory_storage()); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), persistent_memory_storage.clone(), batch_sender.clone()); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); batcher.put("test2").unwrap(); batcher.soft_stop().unwrap(); let batches_guard = batch_sender.batches.lock().unwrap(); assert_eq!(batches_guard.len(), 2); validate_batch(&batches_guard[0], BATCH1); validate_batch(&batches_guard[1], BATCH2); validate_batches_queue0(&persistent_memory_storage.0); } #[test] fn sender_fail() { init(); let batch_sender = MockBatchSender::with_result(Arc::new( || Ok(Some(Error::new(ErrorKind::Other, "Test error"))))); let memory_storage = memory_storage(); let mut batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); batcher.failed_upload_timeout = Duration::from_millis(1); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); thread::sleep(Duration::from_millis(40)); batcher.hard_stop().unwrap(); let batches_guard = batch_sender.batches.lock().unwrap(); assert!(batches_guard.len() >= 2); validate_batch(&batches_guard[0], BATCH1); validate_batch(&batches_guard[1], BATCH1); validate_batches_queue1(&memory_storage); } #[test] fn sender_fail_do_not_retry_upload() { init(); let batch_sender = MockBatchSender::with_result(Arc::new( || Ok(Some(Error::new(ErrorKind::Other, "Test error"))))); let memory_storage = memory_storage(); let mut batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); batcher.failed_upload_timeout = Duration::from_millis(1); batcher.retry_batch_upload = false; assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); thread::sleep(Duration::from_millis(5)); batcher.hard_stop().unwrap(); let batches_guard = batch_sender.batches.lock().unwrap(); assert_eq!(batches_guard.len(), 1); validate_batch(&batches_guard[0], BATCH1); validate_batches_queue0(&memory_storage); } #[test] fn sender_exception() { init(); let batch_sender = MockBatchSender::with_result(Arc::new( || Err(Error::new(ErrorKind::Other, "Test error")))); let memory_storage = memory_storage(); let batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.flush().unwrap(); thread::sleep(Duration::from_millis(40)); let result = batcher.put("test2"); assert!(result.is_err()); batcher.hard_stop().unwrap(); validate_batches_queue1(&memory_storage); } type BatchImplType<'a> = BatcherImpl<&'a str, String, JsonArrayRecordsBuilder, JsonArrayRecordsBuilderFactory, Arc<BinaryBatch>, GzippedJsonDisplayBatchFactory<String>, MemoryStorage, NothingBatchSender>; fn validate_stored_by(batcher_consumer: impl Fn(&mut BatchImplType<'_>)) { init(); let batch_sender = NothingBatchSender::new(); let memory_storage = memory_storage(); let mut batcher = BatcherImpl::new( RECORDS_BUILDER_FACTORY, GzippedJsonDisplayBatchFactory::new("s1"), memory_storage.clone(), batch_sender.clone()); batcher_consumer(&mut batcher); assert!(batcher.start()); batcher.put("test1").unwrap(); batcher.put("test2").unwrap(); validate_batches_queue1(&memory_storage); let cloned_batcher = batcher.clone(); let stop_thread = thread::spawn(move || { cloned_batcher.hard_stop().unwrap() }); thread::sleep(Duration::from_millis(40)); validate_batches_queue2(&memory_storage); batch_sender.0.store(true, Ordering::Relaxed); stop_thread.join().unwrap(); } fn validate_batches_queue(memory_storage: &MemoryStorage, f: impl Fn(&VecDeque<Arc<BinaryBatch>>)) { let mutex_guard = memory_storage.0.shared_state.lock(); let batches = &mutex_guard.batches_queue; f(batches); } fn validate_batches_queue0(memory_storage: &MemoryStorage) { validate_batches_queue(&memory_storage, |batches| { assert_eq!(batches.len(), 0); }); } fn validate_batches_queue1(memory_storage: &MemoryStorage) { validate_batches_queue(&memory_storage, |batches| { assert_eq!(batches.len(), 1); validate_batch(&batches[0].bytes, BATCH1); }); } fn validate_batches_queue2(memory_storage: &MemoryStorage) { validate_batches_queue(&memory_storage, |batches| { assert_eq!(batches.len(), 2); validate_batch(&batches[1].bytes, BATCH2); }); } fn validate_batch(bytes: &[u8], batch: &str) { assert_eq!(String::from_utf8(decompress_to_vec(bytes).unwrap()).unwrap(), batch); } }
33.154386
140
0.586588
482482cbe7cfe9a589324af1ce4a67c57b4aca63
1,836
#[allow(deprecated)] use solana_sdk::sysvar::fees::Fees; use { solana_program_test::{processor, ProgramTest}, solana_sdk::{ account_info::AccountInfo, clock::Clock, entrypoint::ProgramResult, epoch_schedule::EpochSchedule, fee_calculator::FeeCalculator, instruction::Instruction, msg, pubkey::Pubkey, rent::Rent, signature::Signer, sysvar::Sysvar, transaction::Transaction, }, }; // Process instruction to invoke into another program fn sysvar_getter_process_instruction( _program_id: &Pubkey, _accounts: &[AccountInfo], _input: &[u8], ) -> ProgramResult { msg!("sysvar_getter"); let clock = Clock::get()?; assert_eq!(42, clock.slot); let epoch_schedule = EpochSchedule::get()?; assert_eq!(epoch_schedule, EpochSchedule::default()); #[allow(deprecated)] { let fees = Fees::get()?; assert_eq!( fees.fee_calculator, FeeCalculator { lamports_per_signature: 5000 } ); } let rent = Rent::get()?; assert_eq!(rent, Rent::default()); Ok(()) } #[tokio::test] async fn get_sysvar() { let program_id = Pubkey::new_unique(); let program_test = ProgramTest::new( "sysvar_getter", program_id, processor!(sysvar_getter_process_instruction), ); let mut context = program_test.start_with_context().await; context.warp_to_slot(42).unwrap(); let instructions = vec![Instruction::new_with_bincode(program_id, &(), vec![])]; let transaction = Transaction::new_signed_with_payer( &instructions, Some(&context.payer.pubkey()), &[&context.payer], context.last_blockhash, ); context .banks_client .process_transaction(transaction) .await .unwrap(); }
26.228571
95
0.627996
c1e2070e7ef640f5112011e1c3efbcdaa897dc17
5,688
//! This modules contains both the `static_loader` and `ArcLoader` //! implementations, as well as the `Loader` trait. Which provides a loader //! agnostic interface. #[cfg(feature = "handlebars")] mod handlebars; #[cfg(feature = "tera")] mod tera; mod shared; use std::collections::HashMap; use crate::FluentBundle; use fluent_bundle::{FluentArgs, FluentResource, FluentValue}; use fluent_langneg::negotiate_languages; pub use unic_langid::{langid, langids, LanguageIdentifier}; mod arc_loader; mod static_loader; pub use arc_loader::{ArcLoader, ArcLoaderBuilder}; pub use static_loader::StaticLoader; /// A loader capable of looking up Fluent keys given a language. pub trait Loader { /// Look up `text_id` for `lang` in Fluent. fn lookup(&self, lang: &LanguageIdentifier, text_id: &str) -> Option<String> { self.lookup_complete::<&str>(lang, text_id, None) } /// Look up `text_id` for `lang` with `args` in Fluent. fn lookup_with_args<T: AsRef<str>>( &self, lang: &LanguageIdentifier, text_id: &str, args: &HashMap<T, FluentValue>, ) -> Option<String> { self.lookup_complete(lang, text_id, Some(args)) } /// Look up `text_id` for `lang` in Fluent, using any `args` if provided. fn lookup_complete<T: AsRef<str>>( &self, lang: &LanguageIdentifier, text_id: &str, args: Option<&HashMap<T, FluentValue>>, ) -> Option<String>; /// Returns an Iterator over the locales that are present. fn locales(&self) -> Box<dyn Iterator<Item = &LanguageIdentifier> + '_>; } impl<L> Loader for std::sync::Arc<L> where L: Loader, { fn lookup_complete<T: AsRef<str>>( &self, lang: &LanguageIdentifier, text_id: &str, args: Option<&HashMap<T, FluentValue>>, ) -> Option<String> { L::lookup_complete(self, lang, text_id, args) } fn locales(&self) -> Box<dyn Iterator<Item = &LanguageIdentifier> + '_> { L::locales(self) } } impl<'a, L> Loader for &'a L where L: Loader, { fn lookup_complete<T: AsRef<str>>( &self, lang: &LanguageIdentifier, text_id: &str, args: Option<&HashMap<T, FluentValue>>, ) -> Option<String> { L::lookup_complete(self, lang, text_id, args) } fn locales(&self) -> Box<dyn Iterator<Item = &LanguageIdentifier> + '_> { L::locales(self) } } /// A `Loader` agnostic container type with optional trait implementations /// for integrating with different libraries. pub struct FluentLoader<L> { loader: L, #[allow(unused)] default_lang: Option<LanguageIdentifier>, } impl<L> FluentLoader<L> { /// Create a new `FluentLoader`. pub fn new(loader: L) -> Self { Self { loader, default_lang: None, } } /// Set default language for this `FluentLoader`. /// Template engines can use this value when rendering translations. /// So far this feature is only implemented for Tera. pub fn with_default_lang(self, lang: LanguageIdentifier) -> Self { Self { loader: self.loader, default_lang: Some(lang), } } } /// Constructs a map of languages with a list of potential fallback languages. pub fn build_fallbacks( locales: &[LanguageIdentifier], ) -> HashMap<LanguageIdentifier, Vec<LanguageIdentifier>> { let mut map = HashMap::new(); for locale in locales.iter() { map.insert( locale.to_owned(), negotiate_languages( &[locale], locales, None, fluent_langneg::NegotiationStrategy::Filtering, ) .into_iter() .cloned() .collect::<Vec<_>>(), ); } map } /// Creates a new static `FluentBundle` for `lang` using `resources`. Optionally /// shared resources can be specified with `core_resource` and the bundle can /// be customized with `customizer`. fn create_bundle( lang: LanguageIdentifier, resources: &'static [FluentResource], core_resource: Option<&'static FluentResource>, customizer: &impl Fn(&mut FluentBundle<&'static FluentResource>), ) -> FluentBundle<&'static FluentResource> { let mut bundle: FluentBundle<&'static FluentResource> = FluentBundle::new_concurrent(vec![lang]); if let Some(core) = core_resource { bundle .add_resource(core) .expect("Failed to add core resource to bundle"); } for res in resources { bundle .add_resource(res) .expect("Failed to add FTL resources to the bundle."); } customizer(&mut bundle); bundle } /// Maps from map of languages containing a list of resources to a map of /// languages containing a `FluentBundle` of those resources. pub fn build_bundles( resources: &'static HashMap<LanguageIdentifier, Vec<FluentResource>>, core_resource: Option<&'static FluentResource>, customizer: impl Fn(&mut FluentBundle<&'static FluentResource>), ) -> HashMap<LanguageIdentifier, FluentBundle<&'static FluentResource>> { let mut bundles = HashMap::new(); for (k, ref v) in resources.iter() { bundles.insert( k.clone(), create_bundle(k.clone(), &v, core_resource, &customizer), ); } bundles } fn map_to_fluent_args<'map, T: AsRef<str>>( map: Option<&'map HashMap<T, FluentValue>>, ) -> Option<FluentArgs<'map>> { let mut new = FluentArgs::new(); if let Some(map) = map { for (key, value) in map { new.set(key.as_ref(), value.clone()); } } Some(new) }
28.582915
101
0.624297
abad49c0928faa83674c48bb312e567d400a0f48
8,530
use super::SearchContext; use crate::{ ast, error::{Error, ErrorKind}, query::BindResult, symbol::Symbol, Query, }; use analogy::Analogy, use artifact::ArtifactId, use claim::{Claim, ClaimId}, use std::rc::Rc; pub enum SearchNode { Artifact { binary_concatenated_atomid_list: Option<Vec<u8>>, artifact_id: ArtifactId, }, Pair { vec: Option<Vec<u8>>, left: Box<SearchNode>, right: Box<SearchNode>, }, Bound { node: Box<SearchNode>, sv: Rc<ast::SymbolVar>, }, // Someone gave us this symbol, and said "use it", so there's nothing to be done Given { vec: Option<Vec<u8>>, }, } impl SearchNode { pub fn search(query: &Query, symz: &Rc<ast::GSymbolizable>) -> Result<SearchNode, MBQLError> { println!("SEARCH {:?}", *symz); let node = match &**symz { ast::GSymbolizable::Artifact(a) => SearchNode::artifact_search(query, a)?, ast::GSymbolizable::GroundPair(a) => SearchNode::pair_search(query, a)?, ast::GSymbolizable::SymbolVar(sv) => SearchNode::symbolvar_search(query, sv)?, ast::GSymbolizable::Ground(_) => { // Shouldn't be able to call this directly with a Ground statement unreachable!() } }; Ok(node) } pub fn artifact_search(query: &Query, artifact: &ast::Artifact) -> Result<Self, MBQLError> { let artifact_id = artifact.apply(query)?; let binary_concatenated_atomid_list = { let mut ctx = query.search_context.lock().unwrap(); ctx.query_atoms_by_artifact(&artifact_id)? }; Ok(SearchNode::Artifact { artifact_id, binary_concatenated_atomid_list: Some(binary_concatenated_atomid_list), }) } /// Search for symbols for a given symbol variable. Said variable is either a given, or a Bound variable, depending on how /// it's defined in the query pub fn symbolvar_search(query: &Query, sv: &Rc<ast::SymbolVar>) -> Result<Self, MBQLError> { match query.bind_symbolvar(&sv.var) { Err(e) => { return Err(MBQLError { position: sv.position().clone(), kind: MBQLErrorKind::SymbolVarNotFound { var: sv.var.to_string() }, }); } Ok(BindResult::Bound(gsymz)) => { let node = SearchNode::search(query, &gsymz)?; Ok(SearchNode::Bound { node: Box::new(node), sv: sv.clone(), }) } Ok(BindResult::Symbol(symbol)) => Ok(SearchNode::Given { vec: symbol.as_vec() }), } } pub fn pair_search(query: &Query, gpair: &ast::GPair) -> Result<Self, MBQLError> { // Depth first recursion to find possible leaf symbols let left = SearchNode::search(query, &gpair.left)?; let right = SearchNode::search(query, &gpair.right)?; let union = left.union_vec(&right); match union { None => Ok(SearchNode::Pair { vec: None, left: Box::new(left), right: Box::new(right), }), Some(v) => { // find symbols (Analogies) which refer to BOTH of the above println!("{:?}", v); unimplemented!() } } // I'm searching for Analogies which match both the left and the right // AND I'm also searching for that set of left/right atoms which match said analogies, which I need to call // store_symbol_for_var on if they're GSNode::Bound // let opt_symbol = ctx.find_matching_analogy_symbol(&left, &right, query)?; // if let Some(symbol) = opt_symbol { // println!("FOUND MATCH {}", symbol); // return Ok(SearchNode::Pair { left: Box::new(left), // right: Box::new(right), }); // } } fn intersect(&mut self) {} pub fn stash_bindings(&self, query: &Query) -> Result<(), MBError> { match self { SearchNode::Pair { left, right, .. } => { left.stash_bindings(query)?; right.stash_bindings(query)?; Ok(()) } SearchNode::Bound { node, sv } => match node.symbol() { None => Err(MBError::Other), Some(symbol) => { query.stash_symbol_for_var(&sv, symbol)?; Ok(()) } }, _ => Ok(()), } } pub fn symbol(&self) -> Option<Symbol> { match self { SearchNode::Artifact { binary_concatenated_atomid_list: vec, .. } | SearchNode::Pair { vec, .. } | SearchNode::Given { vec, .. } => match vec { None => None, Some(v) => Symbol::new_from_vec(v.clone()), }, SearchNode::Bound { node, .. } => node.symbol(), } } pub fn vivify_symbols(&mut self, query: &Query) -> Result<(), MBError> { match self { SearchNode::Artifact { artifact_id, binary_concatenated_atomid_list: vec, } => { let atom = query.mb.symbolize_atom(&*artifact_id)?; overwrite_vec(vec, atom.id()); } SearchNode::Bound { node, .. } => { node.vivify_symbols(query)?; } SearchNode::Pair { left, right, vec } => { left.vivify_symbols(query)?; right.vivify_symbols(query)?; let atom = query .mb .symbolize_atom(Analogy::declarative(left.symbol().unwrap(), right.symbol().unwrap()))?; overwrite_vec(vec, atom.id()); } SearchNode::Given { .. } => { // Can't resymbolize/vivify a given } }; Ok(()) } fn vec(&self) -> Option<&Vec<u8>> { match self { SearchNode::Artifact { binary_concatenated_atomid_list: vec, .. } | SearchNode::Pair { vec, .. } | SearchNode::Given { vec, .. } => vec.as_ref(), SearchNode::Bound { node, .. } => node.vec(), } } fn vec_mut(&mut self) -> &mut Option<Vec<u8>> { match self { SearchNode::Artifact { binary_concatenated_atomid_list: vec, .. } | SearchNode::Pair { vec, .. } | SearchNode::Given { vec, .. } => vec, SearchNode::Bound { node, .. } => node.vec_mut(), } } fn union_vec(&self, other: &Self) -> Option<Vec<u8>> { let a = self.vec(); let b = other.vec(); match (a, b) { (None, None) => None, (Some(a), None) => Some(a.clone()), (None, Some(b)) => Some(b.clone()), (Some(a), Some(b)) => { let mut merged = Vec::with_capacity(a.len() + b.len()); merged.extend(a.iter().copied()); use inverted_index_util::entity_list::insert_entity_mut; use typenum::consts::U16; for chunk in b.chunks(16) { insert_entity_mut::<U16>(&mut merged, chunk) } Some(merged) } } } } fn overwrite_vec(vec: &mut Option<Vec<u8>>, atom: &ClaimId) { match vec { None => { let mut v = Vec::new(); v.extend(atom.as_ref()); *vec = Some(v); } Some(v) => { v.truncate(0); v.extend(atom.as_bytes()); } } } // At each stage, I am searching for a set of // * Instantiated artifacts (ID) // * Associative Analogies (ID) // * Catagorical Analogies (ID) // Lets ignore Given symbols for now // // so what are our cardinalities here? // If I were to start at the root of a given GSymz tree, It would initially encompass all the data in the system // Lets assume this for a moment. What do you do next? // you iterate over each record, then recursively check its contents for matching // Lets just fucking do this, but do it as a module! // Make it work, make it correct, make it fast. Not the reverse :facepalm:
32.43346
126
0.504689
9c218731f4ac0fbbe45d2c6f4e6d91c68e160b32
2,362
// https://leetcode-cn.com/problems/word-break-ii/ // Runtime: 0 ms // Memory Usage: 2 MB use std::{ collections::{hash_map::DefaultHasher, HashSet}, hash::Hasher, }; pub fn word_break(s: String, word_dict: Vec<String>) -> Vec<String> { let n = s.len(); let mut dict = HashSet::new(); let mut alphabet = vec![false; 256]; for word in word_dict { let mut hasher = DefaultHasher::new(); for b in word.bytes() { alphabet[b as usize] = true; hasher.write_u8(b); } dict.insert(hasher.finish()); } let s = s.bytes().collect::<Vec<u8>>(); for i in 0..n { if !alphabet[s[i] as usize] { return Vec::new(); } } let mut cur = Vec::new(); let mut res = Vec::new(); dfs(0, &mut cur, &mut res, &dict, &s, n); res } fn dfs( start: usize, cur: &mut Vec<(usize, usize)>, all: &mut Vec<String>, dict: &HashSet<u64>, s: &[u8], n: usize, ) { if start == n { let mut words = Vec::new(); for &(l, r) in cur.iter() { let mut word = "".to_string(); for &si in s.iter().take(r + 1).skip(l) { word.push(si as char); } words.push(word); } all.push(words.join(" ")); } let mut hasher = DefaultHasher::new(); for i in start..n { hasher.write_u8(s[i]); if dict.contains(&hasher.finish()) { cur.push((start, i)); dfs(i + 1, cur, all, dict, s, n); cur.pop(); } } } // backtracking dynamic_programming #[test] fn test2_140() { use leetcode_prelude::vec_string; assert_eq!( word_break( "catsanddog".to_string(), vec_string!["cat", "cats", "and", "sand", "dog"] ), vec_string!["cat sand dog", "cats and dog"] ); assert_eq!( word_break( "pineapplepenapple".to_string(), vec_string!["apple", "pen", "applepen", "pine", "pineapple"] ), vec_string![ "pine apple pen apple", "pine applepen apple", "pineapple pen apple" ] ); assert_eq!( word_break( "catsandog".to_string(), vec_string!["cats", "dog", "sand", "and", "cat"] ), vec_string![] ); }
25.956044
72
0.489839
ff875e179788f8042ca1016fd91b1b0d87436086
14,884
use std::collections::HashSet; use syn::{punctuated::Punctuated, Expr, Ident, LitInt, LitStr, Path, Token}; use proc_macro2::TokenStream; use quote::{quote, quote_spanned, ToTokens}; use syn::ext::IdentExt as _; use syn::parse::{Parse, ParseStream}; #[derive(Clone, Default, Debug)] pub(crate) struct InstrumentArgs { level: Option<Level>, pub(crate) name: Option<LitStr>, target: Option<LitStr>, pub(crate) parent: Option<Expr>, pub(crate) follows_from: Option<Expr>, pub(crate) skips: HashSet<Ident>, pub(crate) skip_all: bool, pub(crate) fields: Option<Fields>, pub(crate) err_mode: Option<FormatMode>, pub(crate) ret_mode: Option<FormatMode>, /// Errors describing any unrecognized parse inputs that we skipped. parse_warnings: Vec<syn::Error>, } impl InstrumentArgs { pub(crate) fn level(&self) -> impl ToTokens { fn is_level(lit: &LitInt, expected: u64) -> bool { match lit.base10_parse::<u64>() { Ok(value) => value == expected, Err(_) => false, } } match &self.level { Some(Level::Str(ref lit)) if lit.value().eq_ignore_ascii_case("trace") => { quote!(tracing::Level::TRACE) } Some(Level::Str(ref lit)) if lit.value().eq_ignore_ascii_case("debug") => { quote!(tracing::Level::DEBUG) } Some(Level::Str(ref lit)) if lit.value().eq_ignore_ascii_case("info") => { quote!(tracing::Level::INFO) } Some(Level::Str(ref lit)) if lit.value().eq_ignore_ascii_case("warn") => { quote!(tracing::Level::WARN) } Some(Level::Str(ref lit)) if lit.value().eq_ignore_ascii_case("error") => { quote!(tracing::Level::ERROR) } Some(Level::Int(ref lit)) if is_level(lit, 1) => quote!(tracing::Level::TRACE), Some(Level::Int(ref lit)) if is_level(lit, 2) => quote!(tracing::Level::DEBUG), Some(Level::Int(ref lit)) if is_level(lit, 3) => quote!(tracing::Level::INFO), Some(Level::Int(ref lit)) if is_level(lit, 4) => quote!(tracing::Level::WARN), Some(Level::Int(ref lit)) if is_level(lit, 5) => quote!(tracing::Level::ERROR), Some(Level::Path(ref pat)) => quote!(#pat), Some(_) => quote! { compile_error!( "unknown verbosity level, expected one of \"trace\", \ \"debug\", \"info\", \"warn\", or \"error\", or a number 1-5" ) }, None => quote!(tracing::Level::INFO), } } pub(crate) fn target(&self) -> impl ToTokens { if let Some(ref target) = self.target { quote!(#target) } else { quote!(module_path!()) } } /// Generate "deprecation" warnings for any unrecognized attribute inputs /// that we skipped. /// /// For backwards compatibility, we need to emit compiler warnings rather /// than errors for unrecognized inputs. Generating a fake deprecation is /// the only way to do this on stable Rust right now. pub(crate) fn warnings(&self) -> impl ToTokens { let warnings = self.parse_warnings.iter().map(|err| { let msg = format!("found unrecognized input, {}", err); let msg = LitStr::new(&msg, err.span()); // TODO(eliza): This is a bit of a hack, but it's just about the // only way to emit warnings from a proc macro on stable Rust. // Eventually, when the `proc_macro::Diagnostic` API stabilizes, we // should definitely use that instead. quote_spanned! {err.span()=> #[warn(deprecated)] { #[deprecated(since = "not actually deprecated", note = #msg)] const TRACING_INSTRUMENT_WARNING: () = (); let _ = TRACING_INSTRUMENT_WARNING; } } }); quote! { { #(#warnings)* } } } } impl Parse for InstrumentArgs { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let mut args = Self::default(); while !input.is_empty() { let lookahead = input.lookahead1(); if lookahead.peek(kw::name) { if args.name.is_some() { return Err(input.error("expected only a single `name` argument")); } let name = input.parse::<StrArg<kw::name>>()?.value; args.name = Some(name); } else if lookahead.peek(LitStr) { // XXX: apparently we support names as either named args with an // sign, _or_ as unnamed string literals. That's weird, but // changing it is apparently breaking. if args.name.is_some() { return Err(input.error("expected only a single `name` argument")); } args.name = Some(input.parse()?); } else if lookahead.peek(kw::target) { if args.target.is_some() { return Err(input.error("expected only a single `target` argument")); } let target = input.parse::<StrArg<kw::target>>()?.value; args.target = Some(target); } else if lookahead.peek(kw::parent) { if args.target.is_some() { return Err(input.error("expected only a single `parent` argument")); } let parent = input.parse::<ExprArg<kw::parent>>()?; args.parent = Some(parent.value); } else if lookahead.peek(kw::follows_from) { if args.target.is_some() { return Err(input.error("expected only a single `follows_from` argument")); } let follows_from = input.parse::<ExprArg<kw::follows_from>>()?; args.follows_from = Some(follows_from.value); } else if lookahead.peek(kw::level) { if args.level.is_some() { return Err(input.error("expected only a single `level` argument")); } args.level = Some(input.parse()?); } else if lookahead.peek(kw::skip) { if !args.skips.is_empty() { return Err(input.error("expected only a single `skip` argument")); } if args.skip_all { return Err(input.error("expected either `skip` or `skip_all` argument")); } let Skips(skips) = input.parse()?; args.skips = skips; } else if lookahead.peek(kw::skip_all) { if args.skip_all { return Err(input.error("expected only a single `skip_all` argument")); } if !args.skips.is_empty() { return Err(input.error("expected either `skip` or `skip_all` argument")); } let _ = input.parse::<kw::skip_all>()?; args.skip_all = true; } else if lookahead.peek(kw::fields) { if args.fields.is_some() { return Err(input.error("expected only a single `fields` argument")); } args.fields = Some(input.parse()?); } else if lookahead.peek(kw::err) { let _ = input.parse::<kw::err>(); let mode = FormatMode::parse(input)?; args.err_mode = Some(mode); } else if lookahead.peek(kw::ret) { let _ = input.parse::<kw::ret>()?; let mode = FormatMode::parse(input)?; args.ret_mode = Some(mode); } else if lookahead.peek(Token![,]) { let _ = input.parse::<Token![,]>()?; } else { // We found a token that we didn't expect! // We want to emit warnings for these, rather than errors, so // we'll add it to the list of unrecognized inputs we've seen so // far and keep going. args.parse_warnings.push(lookahead.error()); // Parse the unrecognized token tree to advance the parse // stream, and throw it away so we can keep parsing. let _ = input.parse::<proc_macro2::TokenTree>(); } } Ok(args) } } struct StrArg<T> { value: LitStr, _p: std::marker::PhantomData<T>, } impl<T: Parse> Parse for StrArg<T> { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _ = input.parse::<T>()?; let _ = input.parse::<Token![=]>()?; let value = input.parse()?; Ok(Self { value, _p: std::marker::PhantomData, }) } } struct ExprArg<T> { value: Expr, _p: std::marker::PhantomData<T>, } impl<T: Parse> Parse for ExprArg<T> { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _ = input.parse::<T>()?; let _ = input.parse::<Token![=]>()?; let value = input.parse()?; Ok(Self { value, _p: std::marker::PhantomData, }) } } struct Skips(HashSet<Ident>); impl Parse for Skips { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _ = input.parse::<kw::skip>(); let content; let _ = syn::parenthesized!(content in input); let names: Punctuated<Ident, Token![,]> = content.parse_terminated(Ident::parse_any)?; let mut skips = HashSet::new(); for name in names { if skips.contains(&name) { return Err(syn::Error::new( name.span(), "tried to skip the same field twice", )); } else { skips.insert(name); } } Ok(Self(skips)) } } #[derive(Clone, Debug, Hash, PartialEq, Eq)] pub(crate) enum FormatMode { Default, Display, Debug, } impl Default for FormatMode { fn default() -> Self { FormatMode::Default } } impl Parse for FormatMode { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { if !input.peek(syn::token::Paren) { return Ok(FormatMode::default()); } let content; let _ = syn::parenthesized!(content in input); let maybe_mode: Option<Ident> = content.parse()?; maybe_mode.map_or(Ok(FormatMode::default()), |ident| { match ident.to_string().as_str() { "Debug" => Ok(FormatMode::Debug), "Display" => Ok(FormatMode::Display), _ => Err(syn::Error::new( ident.span(), "unknown error mode, must be Debug or Display", )), } }) } } #[derive(Clone, Debug)] pub(crate) struct Fields(pub(crate) Punctuated<Field, Token![,]>); #[derive(Clone, Debug)] pub(crate) struct Field { pub(crate) name: Punctuated<Ident, Token![.]>, pub(crate) value: Option<Expr>, pub(crate) kind: FieldKind, } #[derive(Clone, Debug, Eq, PartialEq)] pub(crate) enum FieldKind { Debug, Display, Value, } impl Parse for Fields { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _ = input.parse::<kw::fields>(); let content; let _ = syn::parenthesized!(content in input); let fields: Punctuated<_, Token![,]> = content.parse_terminated(Field::parse)?; Ok(Self(fields)) } } impl ToTokens for Fields { fn to_tokens(&self, tokens: &mut TokenStream) { self.0.to_tokens(tokens) } } impl Parse for Field { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let mut kind = FieldKind::Value; if input.peek(Token![%]) { input.parse::<Token![%]>()?; kind = FieldKind::Display; } else if input.peek(Token![?]) { input.parse::<Token![?]>()?; kind = FieldKind::Debug; }; let name = Punctuated::parse_separated_nonempty_with(input, Ident::parse_any)?; let value = if input.peek(Token![=]) { input.parse::<Token![=]>()?; if input.peek(Token![%]) { input.parse::<Token![%]>()?; kind = FieldKind::Display; } else if input.peek(Token![?]) { input.parse::<Token![?]>()?; kind = FieldKind::Debug; }; Some(input.parse()?) } else { None }; Ok(Self { name, value, kind }) } } impl ToTokens for Field { fn to_tokens(&self, tokens: &mut TokenStream) { if let Some(ref value) = self.value { let name = &self.name; let kind = &self.kind; tokens.extend(quote! { #name = #kind#value }) } else if self.kind == FieldKind::Value { // XXX(eliza): I don't like that fields without values produce // empty fields rather than local variable shorthand...but, // we've released a version where field names without values in // `instrument` produce empty field values, so changing it now // is a breaking change. agh. let name = &self.name; tokens.extend(quote!(#name = tracing::field::Empty)) } else { self.kind.to_tokens(tokens); self.name.to_tokens(tokens); } } } impl ToTokens for FieldKind { fn to_tokens(&self, tokens: &mut TokenStream) { match self { FieldKind::Debug => tokens.extend(quote! { ? }), FieldKind::Display => tokens.extend(quote! { % }), _ => {} } } } #[derive(Clone, Debug)] enum Level { Str(LitStr), Int(LitInt), Path(Path), } impl Parse for Level { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _ = input.parse::<kw::level>()?; let _ = input.parse::<Token![=]>()?; let lookahead = input.lookahead1(); if lookahead.peek(LitStr) { Ok(Self::Str(input.parse()?)) } else if lookahead.peek(LitInt) { Ok(Self::Int(input.parse()?)) } else if lookahead.peek(Ident) { Ok(Self::Path(input.parse()?)) } else { Err(lookahead.error()) } } } mod kw { syn::custom_keyword!(fields); syn::custom_keyword!(skip); syn::custom_keyword!(skip_all); syn::custom_keyword!(level); syn::custom_keyword!(target); syn::custom_keyword!(parent); syn::custom_keyword!(follows_from); syn::custom_keyword!(name); syn::custom_keyword!(err); syn::custom_keyword!(ret); }
35.951691
94
0.523515
e8ee1409213f66c490b57d2087a63748fb10869f
39,575
#[cfg(feature = "quic")] use crate::check::check_message; use crate::check::{inappropriate_handshake_message, inappropriate_message}; use crate::cipher; use crate::conn::{ConnectionCommon, ConnectionRandoms}; use crate::error::Error; use crate::hash_hs::HandshakeHash; use crate::key::Certificate; use crate::key_schedule::{KeyScheduleTraffic, KeyScheduleTrafficWithClientFinishedPending}; #[cfg(feature = "logging")] use crate::log::{debug, trace, warn}; use crate::msgs::codec::Codec; use crate::msgs::enums::{AlertDescription, KeyUpdateRequest}; use crate::msgs::enums::{ContentType, HandshakeType, ProtocolVersion}; use crate::msgs::handshake::HandshakeMessagePayload; use crate::msgs::handshake::HandshakePayload; use crate::msgs::handshake::NewSessionTicketPayloadTLS13; use crate::msgs::message::{Message, MessagePayload}; use crate::msgs::persist; use crate::rand; use crate::server::ServerConfig; use crate::suites::Tls13CipherSuite; use crate::verify; #[cfg(feature = "quic")] use crate::{conn::Protocol, msgs::handshake::NewSessionTicketExtension}; use super::hs::{self, HandshakeHashOrBuffer, ServerContext}; use std::sync::Arc; use ring::constant_time; pub(super) use client_hello::CompleteClientHelloHandling; mod client_hello { use crate::key_schedule::{KeyScheduleEarly, KeyScheduleHandshake, KeyScheduleNonSecret}; use crate::kx; use crate::msgs::base::{Payload, PayloadU8}; use crate::msgs::ccs::ChangeCipherSpecPayload; use crate::msgs::enums::{Compression, PSKKeyExchangeMode}; use crate::msgs::enums::{NamedGroup, SignatureScheme}; use crate::msgs::handshake::CertReqExtension; use crate::msgs::handshake::CertificateEntry; use crate::msgs::handshake::CertificateExtension; use crate::msgs::handshake::CertificatePayloadTLS13; use crate::msgs::handshake::CertificateRequestPayloadTLS13; use crate::msgs::handshake::CertificateStatus; use crate::msgs::handshake::ClientHelloPayload; use crate::msgs::handshake::DigitallySignedStruct; use crate::msgs::handshake::HelloRetryExtension; use crate::msgs::handshake::HelloRetryRequest; use crate::msgs::handshake::KeyShareEntry; use crate::msgs::handshake::Random; use crate::msgs::handshake::ServerExtension; use crate::msgs::handshake::ServerHelloPayload; use crate::msgs::handshake::SessionID; #[cfg(feature = "quic")] use crate::quic; use crate::server::common::ActiveCertifiedKey; use crate::sign; use super::*; pub(in crate::server) struct CompleteClientHelloHandling { pub(in crate::server) config: Arc<ServerConfig>, pub(in crate::server) transcript: HandshakeHash, pub(in crate::server) suite: &'static Tls13CipherSuite, pub(in crate::server) randoms: ConnectionRandoms, pub(in crate::server) done_retry: bool, pub(in crate::server) send_ticket: bool, pub(in crate::server) extra_exts: Vec<ServerExtension>, } impl CompleteClientHelloHandling { fn check_binder( &self, suite: &'static Tls13CipherSuite, client_hello: &Message, psk: &[u8], binder: &[u8], ) -> bool { let binder_plaintext = match client_hello.payload { MessagePayload::Handshake(ref hmp) => hmp.get_encoding_for_binder_signing(), _ => unreachable!(), }; let handshake_hash = self .transcript .get_hash_given(&binder_plaintext); let key_schedule = KeyScheduleEarly::new(suite.hkdf_algorithm, &psk); let real_binder = key_schedule.resumption_psk_binder_key_and_sign_verify_data(&handshake_hash); constant_time::verify_slices_are_equal(real_binder.as_ref(), binder).is_ok() } fn attempt_tls13_ticket_decryption( &mut self, ticket: &[u8], ) -> Option<persist::ServerSessionValue> { if self.config.ticketer.enabled() { self.config .ticketer .decrypt(ticket) .and_then(|plain| persist::ServerSessionValue::read_bytes(&plain)) } else { self.config .session_storage .take(ticket) .and_then(|plain| persist::ServerSessionValue::read_bytes(&plain)) } } pub(in crate::server) fn handle_client_hello( mut self, cx: &mut ServerContext<'_>, server_key: ActiveCertifiedKey, chm: &Message, ) -> hs::NextStateOrError { let client_hello = require_handshake_msg!( chm, HandshakeType::ClientHello, HandshakePayload::ClientHello )?; if client_hello.compression_methods.len() != 1 { return Err(cx .common .illegal_param("client offered wrong compressions")); } let groups_ext = client_hello .get_namedgroups_extension() .ok_or_else(|| hs::incompatible(&mut cx.common, "client didn't describe groups"))?; let mut sigschemes_ext = client_hello .get_sigalgs_extension() .ok_or_else(|| { hs::incompatible(&mut cx.common, "client didn't describe sigschemes") })? .clone(); let tls13_schemes = sign::supported_sign_tls13(); sigschemes_ext.retain(|scheme| tls13_schemes.contains(scheme)); let shares_ext = client_hello .get_keyshare_extension() .ok_or_else(|| hs::incompatible(&mut cx.common, "client didn't send keyshares"))?; if client_hello.has_keyshare_extension_with_duplicates() { return Err(cx .common .illegal_param("client sent duplicate keyshares")); } // choose a share that we support let chosen_share = self .config .kx_groups .iter() .find_map(|group| { shares_ext .iter() .find(|share| share.group == group.name) }); let chosen_share = match chosen_share { Some(s) => s, None => { // We don't have a suitable key share. Choose a suitable group and // send a HelloRetryRequest. let retry_group_maybe = self .config .kx_groups .iter() .find(|group| groups_ext.contains(&group.name)) .cloned(); self.transcript.add_message(chm); if let Some(group) = retry_group_maybe { if self.done_retry { return Err(cx .common .illegal_param("did not follow retry request")); } emit_hello_retry_request( &mut self.transcript, self.suite, &mut cx.common, group.name, ); emit_fake_ccs(&mut cx.common); return Ok(Box::new(hs::ExpectClientHello { config: self.config, transcript: HandshakeHashOrBuffer::Hash(self.transcript), session_id: SessionID::empty(), using_ems: false, done_retry: true, send_ticket: self.send_ticket, extra_exts: self.extra_exts, })); } return Err(hs::incompatible( &mut cx.common, "no kx group overlap with client", )); } }; let mut chosen_psk_index = None; let mut resumedata = None; if let Some(psk_offer) = client_hello.get_psk() { if !client_hello.check_psk_ext_is_last() { return Err(cx .common .illegal_param("psk extension in wrong position")); } if psk_offer.binders.is_empty() { return Err(hs::decode_error( &mut cx.common, "psk extension missing binder", )); } if psk_offer.binders.len() != psk_offer.identities.len() { return Err(cx .common .illegal_param("psk extension mismatched ids/binders")); } for (i, psk_id) in psk_offer.identities.iter().enumerate() { let resume = match self .attempt_tls13_ticket_decryption(&psk_id.identity.0) .filter(|resumedata| { hs::can_resume(self.suite.into(), &cx.data.sni, false, resumedata) }) { Some(resume) => resume, None => continue, }; if !self.check_binder( self.suite, chm, &resume.master_secret.0, &psk_offer.binders[i].0, ) { cx.common .send_fatal_alert(AlertDescription::DecryptError); return Err(Error::PeerMisbehavedError( "client sent wrong binder".to_string(), )); } chosen_psk_index = Some(i); resumedata = Some(resume); break; } } if !client_hello.psk_mode_offered(PSKKeyExchangeMode::PSK_DHE_KE) { debug!("Client unwilling to resume, DHE_KE not offered"); self.send_ticket = false; chosen_psk_index = None; resumedata = None; } else { self.send_ticket = true; } if let Some(ref resume) = resumedata { cx.data.received_resumption_data = Some(resume.application_data.0.clone()); cx.data.client_cert_chain = resume.client_cert_chain.clone(); } let full_handshake = resumedata.is_none(); self.transcript.add_message(chm); let key_schedule = emit_server_hello( &mut self.transcript, &self.randoms, self.suite, cx, &client_hello.session_id, chosen_share, chosen_psk_index, resumedata .as_ref() .map(|x| &x.master_secret.0[..]), &self.config, )?; if !self.done_retry { emit_fake_ccs(&mut cx.common); } let (mut ocsp_response, mut sct_list) = (server_key.get_ocsp(), server_key.get_sct_list()); emit_encrypted_extensions( &mut self.transcript, self.suite, cx, &mut ocsp_response, &mut sct_list, client_hello, resumedata.as_ref(), self.extra_exts, &self.config, )?; let doing_client_auth = if full_handshake { let client_auth = emit_certificate_req_tls13(&mut self.transcript, cx, &self.config)?; emit_certificate_tls13( &mut self.transcript, &mut cx.common, server_key.get_cert(), ocsp_response, sct_list, ); emit_certificate_verify_tls13( &mut self.transcript, &mut cx.common, server_key.get_key(), &sigschemes_ext, )?; client_auth } else { false }; cx.common.check_aligned_handshake()?; let key_schedule_traffic = emit_finished_tls13( &mut self.transcript, self.suite, &self.randoms, cx, key_schedule, &self.config, ); if doing_client_auth { Ok(Box::new(ExpectCertificate { config: self.config, transcript: self.transcript, suite: self.suite, key_schedule: key_schedule_traffic, send_ticket: self.send_ticket, })) } else { Ok(Box::new(ExpectFinished { config: self.config, transcript: self.transcript, suite: self.suite, key_schedule: key_schedule_traffic, send_ticket: self.send_ticket, })) } } } fn emit_server_hello( transcript: &mut HandshakeHash, randoms: &ConnectionRandoms, suite: &'static Tls13CipherSuite, cx: &mut ServerContext<'_>, session_id: &SessionID, share: &KeyShareEntry, chosen_psk_idx: Option<usize>, resuming_psk: Option<&[u8]>, config: &ServerConfig, ) -> Result<KeyScheduleHandshake, Error> { let mut extensions = Vec::new(); // Do key exchange let kxr = kx::KeyExchange::choose(share.group, &config.kx_groups) .and_then(kx::KeyExchange::start) .ok_or(Error::FailedToGetRandomBytes)? .complete(&share.payload.0) .ok_or_else(|| Error::PeerMisbehavedError("key exchange failed".to_string()))?; let kse = KeyShareEntry::new(share.group, kxr.pubkey.as_ref()); extensions.push(ServerExtension::KeyShare(kse)); extensions.push(ServerExtension::SupportedVersions(ProtocolVersion::TLSv1_3)); if let Some(psk_idx) = chosen_psk_idx { extensions.push(ServerExtension::PresharedKey(psk_idx as u16)); } let sh = Message { version: ProtocolVersion::TLSv1_2, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::ServerHello, payload: HandshakePayload::ServerHello(ServerHelloPayload { legacy_version: ProtocolVersion::TLSv1_2, random: Random::from(randoms.server), session_id: *session_id, cipher_suite: suite.common.suite, compression_method: Compression::Null, extensions, }), }), }; cx.common.check_aligned_handshake()?; #[cfg(feature = "quic")] let client_hello_hash = transcript.get_hash_given(&[]); trace!("sending server hello {:?}", sh); transcript.add_message(&sh); cx.common.send_msg(sh, false); // Start key schedule let key_schedule = if let Some(psk) = resuming_psk { let early_key_schedule = KeyScheduleEarly::new(suite.hkdf_algorithm, psk); #[cfg(feature = "quic")] { if cx.common.protocol == Protocol::Quic { let client_early_traffic_secret = early_key_schedule .client_early_traffic_secret( &client_hello_hash, &*config.key_log, &randoms.client, ); // If 0-RTT should be rejected, this will be clobbered by ExtensionProcessing // before the application can see. cx.common.quic.early_secret = Some(client_early_traffic_secret); } } early_key_schedule.into_handshake(&kxr.shared_secret) } else { KeyScheduleNonSecret::new(suite.hkdf_algorithm).into_handshake(&kxr.shared_secret) }; let handshake_hash = transcript.get_current_hash(); let (key_schedule, client_key, server_key) = key_schedule.derive_handshake_secrets( handshake_hash, &*config.key_log, &randoms.client, ); // Encrypt with our own key, decrypt with the peer's key cx.common .record_layer .set_message_encrypter(cipher::new_tls13_write(suite, &server_key)); cx.common .record_layer .set_message_decrypter(cipher::new_tls13_read(suite, &client_key)); #[cfg(feature = "quic")] { cx.common.quic.hs_secrets = Some(quic::Secrets { client: client_key, server: server_key, }); } Ok(key_schedule) } fn emit_fake_ccs(common: &mut ConnectionCommon) { if common.is_quic() { return; } let m = Message { version: ProtocolVersion::TLSv1_2, payload: MessagePayload::ChangeCipherSpec(ChangeCipherSpecPayload {}), }; common.send_msg(m, false); } fn emit_hello_retry_request( transcript: &mut HandshakeHash, suite: &'static Tls13CipherSuite, common: &mut ConnectionCommon, group: NamedGroup, ) { let mut req = HelloRetryRequest { legacy_version: ProtocolVersion::TLSv1_2, session_id: SessionID::empty(), cipher_suite: suite.common.suite, extensions: Vec::new(), }; req.extensions .push(HelloRetryExtension::KeyShare(group)); req.extensions .push(HelloRetryExtension::SupportedVersions( ProtocolVersion::TLSv1_3, )); let m = Message { version: ProtocolVersion::TLSv1_2, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::HelloRetryRequest, payload: HandshakePayload::HelloRetryRequest(req), }), }; trace!("Requesting retry {:?}", m); transcript.rollup_for_hrr(); transcript.add_message(&m); common.send_msg(m, false); } fn emit_encrypted_extensions( transcript: &mut HandshakeHash, suite: &'static Tls13CipherSuite, cx: &mut ServerContext<'_>, ocsp_response: &mut Option<&[u8]>, sct_list: &mut Option<&[u8]>, hello: &ClientHelloPayload, resumedata: Option<&persist::ServerSessionValue>, extra_exts: Vec<ServerExtension>, config: &ServerConfig, ) -> Result<(), Error> { let mut ep = hs::ExtensionProcessing::new(); ep.process_common( config, cx, suite.into(), ocsp_response, sct_list, hello, resumedata, extra_exts, )?; let ee = Message { version: ProtocolVersion::TLSv1_3, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::EncryptedExtensions, payload: HandshakePayload::EncryptedExtensions(ep.exts), }), }; trace!("sending encrypted extensions {:?}", ee); transcript.add_message(&ee); cx.common.send_msg(ee, true); Ok(()) } fn emit_certificate_req_tls13( transcript: &mut HandshakeHash, cx: &mut ServerContext<'_>, config: &ServerConfig, ) -> Result<bool, Error> { if !config.verifier.offer_client_auth() { return Ok(false); } let mut cr = CertificateRequestPayloadTLS13 { context: PayloadU8::empty(), extensions: Vec::new(), }; let schemes = config .verifier .supported_verify_schemes(); cr.extensions .push(CertReqExtension::SignatureAlgorithms(schemes.to_vec())); let names = config .verifier .client_auth_root_subjects(cx.data.get_sni()) .ok_or_else(|| { debug!("could not determine root subjects based on SNI"); cx.common .send_fatal_alert(AlertDescription::AccessDenied); Error::General("client rejected by client_auth_root_subjects".into()) })?; if !names.is_empty() { cr.extensions .push(CertReqExtension::AuthorityNames(names)); } let m = Message { version: ProtocolVersion::TLSv1_3, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::CertificateRequest, payload: HandshakePayload::CertificateRequestTLS13(cr), }), }; trace!("Sending CertificateRequest {:?}", m); transcript.add_message(&m); cx.common.send_msg(m, true); Ok(true) } fn emit_certificate_tls13( transcript: &mut HandshakeHash, common: &mut ConnectionCommon, cert_chain: &[Certificate], ocsp_response: Option<&[u8]>, sct_list: Option<&[u8]>, ) { let mut cert_entries = vec![]; for cert in cert_chain { let entry = CertificateEntry { cert: cert.to_owned(), exts: Vec::new(), }; cert_entries.push(entry); } if let Some(end_entity_cert) = cert_entries.first_mut() { // Apply OCSP response to first certificate (we don't support OCSP // except for leaf certs). if let Some(ocsp) = ocsp_response { let cst = CertificateStatus::new(ocsp.to_owned()); end_entity_cert .exts .push(CertificateExtension::CertificateStatus(cst)); } // Likewise, SCT if let Some(sct_list) = sct_list { end_entity_cert .exts .push(CertificateExtension::make_sct(sct_list.to_owned())); } } let cert_body = CertificatePayloadTLS13::new(cert_entries); let c = Message { version: ProtocolVersion::TLSv1_3, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::Certificate, payload: HandshakePayload::CertificateTLS13(cert_body), }), }; trace!("sending certificate {:?}", c); transcript.add_message(&c); common.send_msg(c, true); } fn emit_certificate_verify_tls13( transcript: &mut HandshakeHash, common: &mut ConnectionCommon, signing_key: &dyn sign::SigningKey, schemes: &[SignatureScheme], ) -> Result<(), Error> { let message = verify::construct_tls13_server_verify_message(&transcript.get_current_hash()); let signer = signing_key .choose_scheme(schemes) .ok_or_else(|| hs::incompatible(common, "no overlapping sigschemes"))?; let scheme = signer.get_scheme(); let sig = signer.sign(&message)?; let cv = DigitallySignedStruct::new(scheme, sig); let m = Message { version: ProtocolVersion::TLSv1_3, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::CertificateVerify, payload: HandshakePayload::CertificateVerify(cv), }), }; trace!("sending certificate-verify {:?}", m); transcript.add_message(&m); common.send_msg(m, true); Ok(()) } fn emit_finished_tls13( transcript: &mut HandshakeHash, suite: &'static Tls13CipherSuite, randoms: &ConnectionRandoms, cx: &mut ServerContext<'_>, key_schedule: KeyScheduleHandshake, config: &ServerConfig, ) -> KeyScheduleTrafficWithClientFinishedPending { let handshake_hash = transcript.get_current_hash(); let verify_data = key_schedule.sign_server_finish(&handshake_hash); let verify_data_payload = Payload::new(verify_data.as_ref()); let m = Message { version: ProtocolVersion::TLSv1_3, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::Finished, payload: HandshakePayload::Finished(verify_data_payload), }), }; trace!("sending finished {:?}", m); transcript.add_message(&m); let hash_at_server_fin = transcript.get_current_hash(); cx.common.send_msg(m, true); // Now move to application data keys. Read key change is deferred until // the Finish message is received & validated. let (key_schedule_traffic, _client_key, server_key) = key_schedule .into_traffic_with_client_finished_pending( hash_at_server_fin, &*config.key_log, &randoms.client, ); cx.common .record_layer .set_message_encrypter(cipher::new_tls13_write(suite, &server_key)); #[cfg(feature = "quic")] { cx.common.quic.traffic_secrets = Some(quic::Secrets { client: _client_key, server: server_key, }); } key_schedule_traffic } } struct ExpectCertificate { config: Arc<ServerConfig>, transcript: HandshakeHash, suite: &'static Tls13CipherSuite, key_schedule: KeyScheduleTrafficWithClientFinishedPending, send_ticket: bool, } impl hs::State for ExpectCertificate { fn handle(mut self: Box<Self>, cx: &mut ServerContext<'_>, m: Message) -> hs::NextStateOrError { let certp = require_handshake_msg!( m, HandshakeType::Certificate, HandshakePayload::CertificateTLS13 )?; self.transcript.add_message(&m); // We don't send any CertificateRequest extensions, so any extensions // here are illegal. if certp.any_entry_has_extension() { return Err(Error::PeerMisbehavedError( "client sent unsolicited cert extension".to_string(), )); } let client_cert = certp.convert(); let mandatory = self .config .verifier .client_auth_mandatory(cx.data.get_sni()) .ok_or_else(|| { debug!("could not determine if client auth is mandatory based on SNI"); cx.common .send_fatal_alert(AlertDescription::AccessDenied); Error::General("client rejected by client_auth_mandatory".into()) })?; let (end_entity, intermediates) = match client_cert.split_first() { None => { if !mandatory { debug!("client auth requested but no certificate supplied"); self.transcript.abandon_client_auth(); return Ok(Box::new(ExpectFinished { config: self.config, suite: self.suite, key_schedule: self.key_schedule, transcript: self.transcript, send_ticket: self.send_ticket, })); } cx.common .send_fatal_alert(AlertDescription::CertificateRequired); return Err(Error::NoCertificatesPresented); } Some(chain) => chain, }; let now = std::time::SystemTime::now(); self.config .verifier .verify_client_cert(end_entity, intermediates, cx.data.get_sni(), now) .map_err(|err| { hs::incompatible(&mut cx.common, "certificate invalid"); err })?; Ok(Box::new(ExpectCertificateVerify { config: self.config, suite: self.suite, transcript: self.transcript, key_schedule: self.key_schedule, client_cert, send_ticket: self.send_ticket, })) } } struct ExpectCertificateVerify { config: Arc<ServerConfig>, transcript: HandshakeHash, suite: &'static Tls13CipherSuite, key_schedule: KeyScheduleTrafficWithClientFinishedPending, client_cert: Vec<Certificate>, send_ticket: bool, } impl hs::State for ExpectCertificateVerify { fn handle(mut self: Box<Self>, cx: &mut ServerContext<'_>, m: Message) -> hs::NextStateOrError { let rc = { let sig = require_handshake_msg!( m, HandshakeType::CertificateVerify, HandshakePayload::CertificateVerify )?; let handshake_hash = self.transcript.get_current_hash(); self.transcript.abandon_client_auth(); let certs = &self.client_cert; let msg = verify::construct_tls13_client_verify_message(&handshake_hash); self.config .verifier .verify_tls13_signature(&msg, &certs[0], sig) }; if let Err(e) = rc { cx.common .send_fatal_alert(AlertDescription::AccessDenied); return Err(e); } trace!("client CertificateVerify OK"); cx.data.client_cert_chain = Some(self.client_cert); self.transcript.add_message(&m); Ok(Box::new(ExpectFinished { config: self.config, suite: self.suite, key_schedule: self.key_schedule, transcript: self.transcript, send_ticket: self.send_ticket, })) } } // --- Process client's Finished --- fn get_server_session_value( transcript: &mut HandshakeHash, suite: &'static Tls13CipherSuite, key_schedule: &KeyScheduleTraffic, cx: &ServerContext<'_>, nonce: &[u8], ) -> persist::ServerSessionValue { let version = ProtocolVersion::TLSv1_3; let handshake_hash = transcript.get_current_hash(); let secret = key_schedule.resumption_master_secret_and_derive_ticket_psk(&handshake_hash, nonce); persist::ServerSessionValue::new( cx.data.get_sni(), version, suite.common.suite, secret, &cx.data.client_cert_chain, cx.common.alpn_protocol.clone(), cx.data.resumption_data.clone(), ) } struct ExpectFinished { config: Arc<ServerConfig>, transcript: HandshakeHash, suite: &'static Tls13CipherSuite, key_schedule: KeyScheduleTrafficWithClientFinishedPending, send_ticket: bool, } impl ExpectFinished { fn emit_ticket( transcript: &mut HandshakeHash, suite: &'static Tls13CipherSuite, cx: &mut ServerContext<'_>, key_schedule: &KeyScheduleTraffic, config: &ServerConfig, ) -> Result<(), rand::GetRandomFailed> { let nonce = rand::random_vec(32)?; let plain = get_server_session_value(transcript, suite, key_schedule, cx, &nonce).get_encoding(); let stateless = config.ticketer.enabled(); let (ticket, lifetime) = if stateless { let ticket = match config.ticketer.encrypt(&plain) { Some(t) => t, None => return Ok(()), }; (ticket, config.ticketer.lifetime()) } else { let id = rand::random_vec(32)?; let stored = config .session_storage .put(id.clone(), plain); if !stored { trace!("resumption not available; not issuing ticket"); return Ok(()); } let stateful_lifetime = 24 * 60 * 60; // this is a bit of a punt (id, stateful_lifetime) }; let age_add = rand::random_u32()?; // nb, we don't do 0-RTT data, so whatever #[allow(unused_mut)] let mut payload = NewSessionTicketPayloadTLS13::new(lifetime, age_add, nonce, ticket); #[cfg(feature = "quic")] { if config.max_early_data_size > 0 && cx.common.protocol == Protocol::Quic { payload .exts .push(NewSessionTicketExtension::EarlyData( config.max_early_data_size, )); } } let m = Message { version: ProtocolVersion::TLSv1_3, payload: MessagePayload::Handshake(HandshakeMessagePayload { typ: HandshakeType::NewSessionTicket, payload: HandshakePayload::NewSessionTicketTLS13(payload), }), }; trace!("sending new ticket {:?} (stateless: {})", m, stateless); transcript.add_message(&m); cx.common.send_msg(m, true); Ok(()) } } impl hs::State for ExpectFinished { fn handle(mut self: Box<Self>, cx: &mut ServerContext<'_>, m: Message) -> hs::NextStateOrError { let finished = require_handshake_msg!(m, HandshakeType::Finished, HandshakePayload::Finished)?; let handshake_hash = self.transcript.get_current_hash(); let (key_schedule_traffic, expect_verify_data, client_key) = self .key_schedule .sign_client_finish(&handshake_hash); let fin = constant_time::verify_slices_are_equal(expect_verify_data.as_ref(), &finished.0) .map_err(|_| { cx.common .send_fatal_alert(AlertDescription::DecryptError); warn!("Finished wrong"); Error::DecryptError }) .map(|_| verify::FinishedMessageVerified::assertion())?; // nb. future derivations include Client Finished, but not the // main application data keying. self.transcript.add_message(&m); cx.common.check_aligned_handshake()?; // Install keying to read future messages. cx.common .record_layer .set_message_decrypter(cipher::new_tls13_read(self.suite, &client_key)); if self.send_ticket { Self::emit_ticket( &mut self.transcript, self.suite, cx, &key_schedule_traffic, &self.config, )?; } cx.common.start_traffic(); #[cfg(feature = "quic")] { if cx.common.protocol == Protocol::Quic { return Ok(Box::new(ExpectQuicTraffic { key_schedule: key_schedule_traffic, _fin_verified: fin, })); } } Ok(Box::new(ExpectTraffic { suite: self.suite, key_schedule: key_schedule_traffic, want_write_key_update: false, _fin_verified: fin, })) } } // --- Process traffic --- struct ExpectTraffic { suite: &'static Tls13CipherSuite, key_schedule: KeyScheduleTraffic, want_write_key_update: bool, _fin_verified: verify::FinishedMessageVerified, } impl ExpectTraffic { fn handle_key_update( &mut self, common: &mut ConnectionCommon, kur: &KeyUpdateRequest, ) -> Result<(), Error> { #[cfg(feature = "quic")] { if let Protocol::Quic = common.protocol { common.send_fatal_alert(AlertDescription::UnexpectedMessage); let msg = "KeyUpdate received in QUIC connection".to_string(); warn!("{}", msg); return Err(Error::PeerMisbehavedError(msg)); } } common.check_aligned_handshake()?; match kur { KeyUpdateRequest::UpdateNotRequested => {} KeyUpdateRequest::UpdateRequested => { self.want_write_key_update = true; } _ => { common.send_fatal_alert(AlertDescription::IllegalParameter); return Err(Error::CorruptMessagePayload(ContentType::Handshake)); } } // Update our read-side keys. let new_read_key = self .key_schedule .next_client_application_traffic_secret(); common .record_layer .set_message_decrypter(cipher::new_tls13_read(self.suite, &new_read_key)); Ok(()) } } impl hs::State for ExpectTraffic { fn handle(mut self: Box<Self>, cx: &mut ServerContext, m: Message) -> hs::NextStateOrError { match m.payload { MessagePayload::ApplicationData(payload) => cx .common .take_received_plaintext(payload), MessagePayload::Handshake(payload) => match payload.payload { HandshakePayload::KeyUpdate(key_update) => { self.handle_key_update(cx.common, &key_update)? } _ => { return Err(inappropriate_handshake_message( &payload, &[HandshakeType::KeyUpdate], )); } }, _ => { return Err(inappropriate_message( &m, &[ContentType::ApplicationData, ContentType::Handshake], )); } } Ok(self) } fn export_keying_material( &self, output: &mut [u8], label: &[u8], context: Option<&[u8]>, ) -> Result<(), Error> { self.key_schedule .export_keying_material(output, label, context) } fn perhaps_write_key_update(&mut self, common: &mut ConnectionCommon) { if self.want_write_key_update { self.want_write_key_update = false; common.send_msg_encrypt(Message::build_key_update_notify().into()); let write_key = self .key_schedule .next_server_application_traffic_secret(); common .record_layer .set_message_encrypter(cipher::new_tls13_write(self.suite, &write_key)); } } } #[cfg(feature = "quic")] struct ExpectQuicTraffic { key_schedule: KeyScheduleTraffic, _fin_verified: verify::FinishedMessageVerified, } #[cfg(feature = "quic")] impl hs::State for ExpectQuicTraffic { fn handle(self: Box<Self>, _cx: &mut ServerContext<'_>, m: Message) -> hs::NextStateOrError { // reject all messages check_message(&m, &[], &[])?; unreachable!(); } fn export_keying_material( &self, output: &mut [u8], label: &[u8], context: Option<&[u8]>, ) -> Result<(), Error> { self.key_schedule .export_keying_material(output, label, context) } }
34.991158
100
0.543373
22b6c357d9213226a9db771a510709f8912c44e9
18,432
use std::borrow::Borrow; use std::collections; use std::fs; use crate::support::{lines_match, paths, project}; use cargo::core::{enable_nightly_features, Shell}; use cargo::util::config::{self, Config}; use cargo::util::toml::{self, VecStringOrBool as VSOB}; use serde::Deserialize; #[test] fn read_env_vars_for_config() { let p = project() .file( "Cargo.toml", r#" [package] name = "foo" authors = [] version = "0.0.0" build = "build.rs" "#, ) .file("src/lib.rs", "") .file( "build.rs", r#" use std::env; fn main() { assert_eq!(env::var("NUM_JOBS").unwrap(), "100"); } "#, ) .build(); p.cargo("build").env("CARGO_BUILD_JOBS", "100").run(); } fn write_config(config: &str) { let path = paths::root().join(".cargo/config"); fs::create_dir_all(path.parent().unwrap()).unwrap(); fs::write(path, config).unwrap(); } fn new_config(env: &[(&str, &str)]) -> Config { enable_nightly_features(); // -Z advanced-env let output = Box::new(fs::File::create(paths::root().join("shell.out")).unwrap()); let shell = Shell::from_write(output); let cwd = paths::root(); let homedir = paths::home(); let env = env .iter() .map(|(k, v)| (k.to_string(), v.to_string())) .collect(); let mut config = Config::new(shell, cwd, homedir); config.set_env(env); config .configure( 0, None, &None, false, false, false, &None, &["advanced-env".into()], ) .unwrap(); config } fn assert_error<E: Borrow<failure::Error>>(error: E, msgs: &str) { let causes = error .borrow() .iter_chain() .map(|e| e.to_string()) .collect::<Vec<_>>() .join("\n"); if !lines_match(msgs, &causes) { panic!( "Did not find expected:\n{}\nActual error:\n{}\n", msgs, causes ); } } #[test] fn get_config() { write_config( "\ [S] f1 = 123 ", ); let config = new_config(&[]); #[derive(Debug, Deserialize, Eq, PartialEq)] struct S { f1: Option<i64>, } let s: S = config.get("S").unwrap(); assert_eq!(s, S { f1: Some(123) }); let config = new_config(&[("CARGO_S_F1", "456")]); let s: S = config.get("S").unwrap(); assert_eq!(s, S { f1: Some(456) }); } #[test] fn config_unused_fields() { write_config( "\ [S] unused = 456 ", ); let config = new_config(&[("CARGO_S_UNUSED2", "1"), ("CARGO_S2_UNUSED", "2")]); #[derive(Debug, Deserialize, Eq, PartialEq)] struct S { f1: Option<i64>, } // This prints a warning (verified below). let s: S = config.get("S").unwrap(); assert_eq!(s, S { f1: None }); // This does not print anything, we cannot easily/reliably warn for // environment variables. let s: S = config.get("S2").unwrap(); assert_eq!(s, S { f1: None }); // Verify the warnings. drop(config); // Paranoid about flushing the file. let path = paths::root().join("shell.out"); let output = fs::read_to_string(path).unwrap(); let expected = "\ warning: unused key `S.unused` in config file `[..]/.cargo/config` "; if !lines_match(expected, &output) { panic!( "Did not find expected:\n{}\nActual error:\n{}\n", expected, output ); } } #[test] fn config_load_toml_profile() { write_config( "\ [profile.dev] opt-level = 's' lto = true codegen-units=4 debug = true debug-assertions = true rpath = true panic = 'abort' overflow-checks = true incremental = true [profile.dev.build-override] opt-level = 1 [profile.dev.overrides.bar] codegen-units = 9 ", ); let config = new_config(&[ ("CARGO_PROFILE_DEV_CODEGEN_UNITS", "5"), ("CARGO_PROFILE_DEV_BUILD_OVERRIDE_CODEGEN_UNITS", "11"), ("CARGO_PROFILE_DEV_OVERRIDES_env_CODEGEN_UNITS", "13"), ("CARGO_PROFILE_DEV_OVERRIDES_bar_OPT_LEVEL", "2"), ]); // TODO: don't use actual `tomlprofile`. let p: toml::TomlProfile = config.get("profile.dev").unwrap(); let mut overrides = collections::BTreeMap::new(); let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("bar").unwrap()); let o_profile = toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("2".to_string())), lto: None, codegen_units: Some(9), debug: None, debug_assertions: None, rpath: None, panic: None, overflow_checks: None, incremental: None, overrides: None, build_override: None, }; overrides.insert(key, o_profile); let key = toml::ProfilePackageSpec::Spec(::cargo::core::PackageIdSpec::parse("env").unwrap()); let o_profile = toml::TomlProfile { opt_level: None, lto: None, codegen_units: Some(13), debug: None, debug_assertions: None, rpath: None, panic: None, overflow_checks: None, incremental: None, overrides: None, build_override: None, }; overrides.insert(key, o_profile); assert_eq!( p, toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("s".to_string())), lto: Some(toml::StringOrBool::Bool(true)), codegen_units: Some(5), debug: Some(toml::U32OrBool::Bool(true)), debug_assertions: Some(true), rpath: Some(true), panic: Some("abort".to_string()), overflow_checks: Some(true), incremental: Some(true), overrides: Some(overrides), build_override: Some(Box::new(toml::TomlProfile { opt_level: Some(toml::TomlOptLevel("1".to_string())), lto: None, codegen_units: Some(11), debug: None, debug_assertions: None, rpath: None, panic: None, overflow_checks: None, incremental: None, overrides: None, build_override: None })) } ); } #[test] fn config_deserialize_any() { // Some tests to exercise deserialize_any for deserializers that need to // be told the format. write_config( "\ a = true b = ['b'] c = ['c'] ", ); let config = new_config(&[ ("CARGO_ENVB", "false"), ("CARGO_C", "['d']"), ("CARGO_ENVL", "['a', 'b']"), ]); let a = config.get::<VSOB>("a").unwrap(); match a { VSOB::VecString(_) => panic!("expected bool"), VSOB::Bool(b) => assert_eq!(b, true), } let b = config.get::<VSOB>("b").unwrap(); match b { VSOB::VecString(l) => assert_eq!(l, vec!["b".to_string()]), VSOB::Bool(_) => panic!("expected list"), } let c = config.get::<VSOB>("c").unwrap(); match c { VSOB::VecString(l) => assert_eq!(l, vec!["c".to_string(), "d".to_string()]), VSOB::Bool(_) => panic!("expected list"), } let envb = config.get::<VSOB>("envb").unwrap(); match envb { VSOB::VecString(_) => panic!("expected bool"), VSOB::Bool(b) => assert_eq!(b, false), } let envl = config.get::<VSOB>("envl").unwrap(); match envl { VSOB::VecString(l) => assert_eq!(l, vec!["a".to_string(), "b".to_string()]), VSOB::Bool(_) => panic!("expected list"), } } #[test] fn config_toml_errors() { write_config( "\ [profile.dev] opt-level = 'foo' ", ); let config = new_config(&[]); assert_error( config.get::<toml::TomlProfile>("profile.dev").unwrap_err(), "error in [..]/.cargo/config: \ could not load config key `profile.dev.opt-level`: \ must be an integer, `z`, or `s`, but found: foo", ); let config = new_config(&[("CARGO_PROFILE_DEV_OPT_LEVEL", "asdf")]); assert_error( config.get::<toml::TomlProfile>("profile.dev").unwrap_err(), "error in environment variable `CARGO_PROFILE_DEV_OPT_LEVEL`: \ could not load config key `profile.dev.opt-level`: \ must be an integer, `z`, or `s`, but found: asdf", ); } #[test] fn load_nested() { write_config( "\ [nest.foo] f1 = 1 f2 = 2 [nest.bar] asdf = 3 ", ); let config = new_config(&[ ("CARGO_NEST_foo_f2", "3"), ("CARGO_NESTE_foo_f1", "1"), ("CARGO_NESTE_foo_f2", "3"), ("CARGO_NESTE_bar_asdf", "3"), ]); type Nested = collections::HashMap<String, collections::HashMap<String, u8>>; let n: Nested = config.get("nest").unwrap(); let mut expected = collections::HashMap::new(); let mut foo = collections::HashMap::new(); foo.insert("f1".to_string(), 1); foo.insert("f2".to_string(), 3); expected.insert("foo".to_string(), foo); let mut bar = collections::HashMap::new(); bar.insert("asdf".to_string(), 3); expected.insert("bar".to_string(), bar); assert_eq!(n, expected); let n: Nested = config.get("neste").unwrap(); assert_eq!(n, expected); } #[test] fn get_errors() { write_config( "\ [S] f1 = 123 f2 = 'asdf' big = 123456789 ", ); let config = new_config(&[("CARGO_E_S", "asdf"), ("CARGO_E_BIG", "123456789")]); assert_error( config.get::<i64>("foo").unwrap_err(), "missing config key `foo`", ); assert_error( config.get::<i64>("foo.bar").unwrap_err(), "missing config key `foo.bar`", ); assert_error( config.get::<i64>("S.f2").unwrap_err(), "error in [..]/.cargo/config: `S.f2` expected an integer, but found a string", ); assert_error( config.get::<u8>("S.big").unwrap_err(), "error in [..].cargo/config: could not load config key `S.big`: \ invalid value: integer `123456789`, expected u8", ); // Environment variable type errors. assert_error( config.get::<i64>("e.s").unwrap_err(), "error in environment variable `CARGO_E_S`: invalid digit found in string", ); assert_error( config.get::<i8>("e.big").unwrap_err(), "error in environment variable `CARGO_E_BIG`: \ could not load config key `e.big`: \ invalid value: integer `123456789`, expected i8", ); #[derive(Debug, Deserialize)] struct S { f1: i64, f2: String, f3: i64, big: i64, } assert_error( config.get::<S>("S").unwrap_err(), "missing config key `S.f3`", ); } #[test] fn config_get_option() { write_config( "\ [foo] f1 = 1 ", ); let config = new_config(&[("CARGO_BAR_ASDF", "3")]); assert_eq!(config.get::<Option<i32>>("a").unwrap(), None); assert_eq!(config.get::<Option<i32>>("a.b").unwrap(), None); assert_eq!(config.get::<Option<i32>>("foo.f1").unwrap(), Some(1)); assert_eq!(config.get::<Option<i32>>("bar.asdf").unwrap(), Some(3)); assert_eq!(config.get::<Option<i32>>("bar.zzzz").unwrap(), None); } #[test] fn config_bad_toml() { write_config("asdf"); let config = new_config(&[]); assert_error( config.get::<i32>("foo").unwrap_err(), "\ could not load Cargo configuration Caused by: could not parse TOML configuration in `[..]/.cargo/config` Caused by: could not parse input as TOML Caused by: expected an equals, found eof at line 1", ); } #[test] fn config_get_list() { write_config( "\ l1 = [] l2 = ['one', 'two'] l3 = 123 l4 = ['one', 'two'] [nested] l = ['x'] [nested2] l = ['y'] [nested-empty] ", ); type L = Vec<String>; let config = new_config(&[ ("CARGO_L4", "['three', 'four']"), ("CARGO_L5", "['a']"), ("CARGO_ENV_EMPTY", "[]"), ("CARGO_ENV_BLANK", ""), ("CARGO_ENV_NUM", "1"), ("CARGO_ENV_NUM_LIST", "[1]"), ("CARGO_ENV_TEXT", "asdf"), ("CARGO_LEPAIR", "['a', 'b']"), ("CARGO_NESTED2_L", "['z']"), ("CARGO_NESTEDE_L", "['env']"), ("CARGO_BAD_ENV", "[zzz]"), ]); assert_eq!(config.get::<L>("unset").unwrap(), vec![] as Vec<String>); assert_eq!(config.get::<L>("l1").unwrap(), vec![] as Vec<String>); assert_eq!(config.get::<L>("l2").unwrap(), vec!["one", "two"]); assert_error( config.get::<L>("l3").unwrap_err(), "\ invalid configuration for key `l3` expected a list, but found a integer for `l3` in [..]/.cargo/config", ); assert_eq!( config.get::<L>("l4").unwrap(), vec!["one", "two", "three", "four"] ); assert_eq!(config.get::<L>("l5").unwrap(), vec!["a"]); assert_eq!(config.get::<L>("env-empty").unwrap(), vec![] as Vec<String>); assert_error( config.get::<L>("env-blank").unwrap_err(), "error in environment variable `CARGO_ENV_BLANK`: \ should have TOML list syntax, found ``", ); assert_error( config.get::<L>("env-num").unwrap_err(), "error in environment variable `CARGO_ENV_NUM`: \ should have TOML list syntax, found `1`", ); assert_error( config.get::<L>("env-num-list").unwrap_err(), "error in environment variable `CARGO_ENV_NUM_LIST`: \ expected string, found integer", ); assert_error( config.get::<L>("env-text").unwrap_err(), "error in environment variable `CARGO_ENV_TEXT`: \ should have TOML list syntax, found `asdf`", ); // "invalid number" here isn't the best error, but I think it's just toml.rs. assert_error( config.get::<L>("bad-env").unwrap_err(), "error in environment variable `CARGO_BAD_ENV`: \ could not parse TOML list: invalid number at line 1", ); // Try some other sequence-like types. assert_eq!( config .get::<(String, String, String, String)>("l4") .unwrap(), ( "one".to_string(), "two".to_string(), "three".to_string(), "four".to_string() ) ); assert_eq!(config.get::<(String,)>("l5").unwrap(), ("a".to_string(),)); // Tuple struct #[derive(Debug, Deserialize, Eq, PartialEq)] struct TupS(String, String); assert_eq!( config.get::<TupS>("lepair").unwrap(), TupS("a".to_string(), "b".to_string()) ); // Nested with an option. #[derive(Debug, Deserialize, Eq, PartialEq)] struct S { l: Option<Vec<String>>, } assert_eq!(config.get::<S>("nested-empty").unwrap(), S { l: None }); assert_eq!( config.get::<S>("nested").unwrap(), S { l: Some(vec!["x".to_string()]), } ); assert_eq!( config.get::<S>("nested2").unwrap(), S { l: Some(vec!["y".to_string(), "z".to_string()]), } ); assert_eq!( config.get::<S>("nestede").unwrap(), S { l: Some(vec!["env".to_string()]), } ); } #[test] fn config_get_other_types() { write_config( "\ ns = 123 ns2 = 456 ", ); let config = new_config(&[("CARGO_NSE", "987"), ("CARGO_NS2", "654")]); #[derive(Debug, Deserialize, Eq, PartialEq)] struct NewS(i32); assert_eq!(config.get::<NewS>("ns").unwrap(), NewS(123)); assert_eq!(config.get::<NewS>("ns2").unwrap(), NewS(654)); assert_eq!(config.get::<NewS>("nse").unwrap(), NewS(987)); assert_error( config.get::<NewS>("unset").unwrap_err(), "missing config key `unset`", ); } #[test] fn config_relative_path() { write_config(&format!( "\ p1 = 'foo/bar' p2 = '../abc' p3 = 'b/c' abs = '{}' ", paths::home().display(), )); let config = new_config(&[("CARGO_EPATH", "a/b"), ("CARGO_P3", "d/e")]); assert_eq!( config .get::<config::ConfigRelativePath>("p1") .unwrap() .path(), paths::root().join("foo/bar") ); assert_eq!( config .get::<config::ConfigRelativePath>("p2") .unwrap() .path(), paths::root().join("../abc") ); assert_eq!( config .get::<config::ConfigRelativePath>("p3") .unwrap() .path(), paths::root().join("d/e") ); assert_eq!( config .get::<config::ConfigRelativePath>("abs") .unwrap() .path(), paths::home() ); assert_eq!( config .get::<config::ConfigRelativePath>("epath") .unwrap() .path(), paths::root().join("a/b") ); } #[test] fn config_get_integers() { write_config( "\ npos = 123456789 nneg = -123456789 i64max = 9223372036854775807 ", ); let config = new_config(&[ ("CARGO_EPOS", "123456789"), ("CARGO_ENEG", "-1"), ("CARGO_EI64MAX", "9223372036854775807"), ]); assert_eq!( config.get::<u64>("i64max").unwrap(), 9_223_372_036_854_775_807 ); assert_eq!( config.get::<i64>("i64max").unwrap(), 9_223_372_036_854_775_807 ); assert_eq!( config.get::<u64>("ei64max").unwrap(), 9_223_372_036_854_775_807 ); assert_eq!( config.get::<i64>("ei64max").unwrap(), 9_223_372_036_854_775_807 ); assert_error( config.get::<u32>("nneg").unwrap_err(), "error in [..].cargo/config: \ could not load config key `nneg`: \ invalid value: integer `-123456789`, expected u32", ); assert_error( config.get::<u32>("eneg").unwrap_err(), "error in environment variable `CARGO_ENEG`: \ could not load config key `eneg`: \ invalid value: integer `-1`, expected u32", ); assert_error( config.get::<i8>("npos").unwrap_err(), "error in [..].cargo/config: \ could not load config key `npos`: \ invalid value: integer `123456789`, expected i8", ); assert_error( config.get::<i8>("epos").unwrap_err(), "error in environment variable `CARGO_EPOS`: \ could not load config key `epos`: \ invalid value: integer `123456789`, expected i8", ); }
26.482759
98
0.538303
562e7cef3eb9fc9c4e8ff1e311046ef1f58e2c25
7,266
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. pub(crate) fn reflens_structure_crate_output_describe_account_attributes_output_next_token( input: &crate::output::DescribeAccountAttributesOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_account_limits_output_next_token( input: &crate::output::DescribeAccountLimitsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_configuration_sets_output_next_token( input: &crate::output::DescribeConfigurationSetsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_keywords_output_next_token( input: &crate::output::DescribeKeywordsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_opted_out_numbers_output_next_token( input: &crate::output::DescribeOptedOutNumbersOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_opt_out_lists_output_next_token( input: &crate::output::DescribeOptOutListsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_phone_numbers_output_next_token( input: &crate::output::DescribePhoneNumbersOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_pools_output_next_token( input: &crate::output::DescribePoolsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_sender_ids_output_next_token( input: &crate::output::DescribeSenderIdsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_describe_spend_limits_output_next_token( input: &crate::output::DescribeSpendLimitsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_pool_origination_identities_output_next_token( input: &crate::output::ListPoolOriginationIdentitiesOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_account_attributes_output_account_attributes( input: crate::output::DescribeAccountAttributesOutput, ) -> std::option::Option<std::vec::Vec<crate::model::AccountAttribute>> { let input = match input.account_attributes { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_account_limits_output_account_limits( input: crate::output::DescribeAccountLimitsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::AccountLimit>> { let input = match input.account_limits { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_configuration_sets_output_configuration_sets( input: crate::output::DescribeConfigurationSetsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::ConfigurationSetInformation>> { let input = match input.configuration_sets { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_keywords_output_keywords( input: crate::output::DescribeKeywordsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::KeywordInformation>> { let input = match input.keywords { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_opted_out_numbers_output_opted_out_numbers( input: crate::output::DescribeOptedOutNumbersOutput, ) -> std::option::Option<std::vec::Vec<crate::model::OptedOutNumberInformation>> { let input = match input.opted_out_numbers { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_opt_out_lists_output_opt_out_lists( input: crate::output::DescribeOptOutListsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::OptOutListInformation>> { let input = match input.opt_out_lists { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_phone_numbers_output_phone_numbers( input: crate::output::DescribePhoneNumbersOutput, ) -> std::option::Option<std::vec::Vec<crate::model::PhoneNumberInformation>> { let input = match input.phone_numbers { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_pools_output_pools( input: crate::output::DescribePoolsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::PoolInformation>> { let input = match input.pools { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_sender_ids_output_sender_ids( input: crate::output::DescribeSenderIdsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::SenderIdInformation>> { let input = match input.sender_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_describe_spend_limits_output_spend_limits( input: crate::output::DescribeSpendLimitsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::SpendLimit>> { let input = match input.spend_limits { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_pool_origination_identities_output_origination_identities( input: crate::output::ListPoolOriginationIdentitiesOutput, ) -> std::option::Option<std::vec::Vec<crate::model::OriginationIdentityMetadata>> { let input = match input.origination_identities { None => return None, Some(t) => t, }; Some(input) }
32.877828
105
0.690339
abbdbdf42c2c3e365978f8402c1f0411ab6d150b
934
//! This module implements the [ProofError] type. use thiserror::Error; /// Errors that can arise when creating a proof // TODO(mimoo): move this out of oracle #[derive(Error, Debug, Clone, Copy)] pub enum ProofError { #[error("the circuit is too large")] NoRoomForZkInWitness, #[error("the witness columns are not all the same size")] WitnessCsInconsistent, #[error("the proof could not be constructed: {0}")] Prover(&'static str), #[error("the permutation was not constructed correctly: {0}")] Permutation(&'static str), #[error("the lookup failed to find a match in the table")] ValueNotInTable, } /// Errors that can arise when verifying a proof #[derive(Error, Debug, Clone, Copy)] pub enum VerifyError { #[error("the commitment to {0} is of an unexpected size")] IncorrectCommitmentLength(&'static str), #[error("the opening proof failed to verify")] OpenProof, }
27.470588
66
0.684154
d626601216fef32f357a4f66cdd3b0fc9eac6cd5
799
use quickcheck_macros::quickcheck; use super::bubble_sort; // If only `is_sorted` was already stabilized... fn is_sorted<T: PartialOrd>(data: &Vec<T>) -> bool { if data.len() == 0 { return true; } for i in 0..data.len() - 1 { if !(data[i] <= data[i + 1]) { return false; } } true } #[quickcheck] fn sortedness(mut data: Vec<i32>) -> bool { bubble_sort(&mut data); is_sorted(&data) } #[quickcheck] fn sortedness_preservation(mut data: Vec<i32>) -> bool { let mut data_copy = data.clone(); bubble_sort(&mut data); data_copy.sort_unstable(); data == data_copy } // Edge case(s) #[test] fn no_data() { let mut nothing: Vec<i32> = Vec::new(); bubble_sort(&mut nothing); assert!(nothing == Vec::new()); }
19.02381
56
0.584481
db4012146dff9fe2b4d93fefc71a8d5a7f0b922b
11,077
//! Checks for needless boolean results of if-else expressions //! //! This lint is **warn** by default use crate::utils::sugg::Sugg; use crate::utils::{higher, parent_node_is_if_expr, span_lint, span_lint_and_sugg}; use rustc_errors::Applicability; use rustc_hir::*; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; use rustc_span::source_map::Spanned; use syntax::ast::LitKind; declare_clippy_lint! { /// **What it does:** Checks for expressions of the form `if c { true } else { /// false }` /// (or vice versa) and suggest using the condition directly. /// /// **Why is this bad?** Redundant code. /// /// **Known problems:** Maybe false positives: Sometimes, the two branches are /// painstakingly documented (which we, of course, do not detect), so they *may* /// have some value. Even then, the documentation can be rewritten to match the /// shorter code. /// /// **Example:** /// ```rust,ignore /// if x { /// false /// } else { /// true /// } /// ``` /// Could be written as /// ```rust,ignore /// !x /// ``` pub NEEDLESS_BOOL, complexity, "if-statements with plain booleans in the then- and else-clause, e.g., `if p { true } else { false }`" } declare_clippy_lint! { /// **What it does:** Checks for expressions of the form `x == true`, /// `x != true` and order comparisons such as `x < true` (or vice versa) and /// suggest using the variable directly. /// /// **Why is this bad?** Unnecessary code. /// /// **Known problems:** None. /// /// **Example:** /// ```rust,ignore /// if x == true {} // could be `if x { }` /// ``` pub BOOL_COMPARISON, complexity, "comparing a variable to a boolean, e.g., `if x == true` or `if x != true`" } declare_lint_pass!(NeedlessBool => [NEEDLESS_BOOL]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NeedlessBool { fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, e: &'tcx Expr<'_>) { use self::Expression::*; if let Some((ref pred, ref then_block, Some(ref else_expr))) = higher::if_block(&e) { let reduce = |ret, not| { let mut applicability = Applicability::MachineApplicable; let snip = Sugg::hir_with_applicability(cx, pred, "<predicate>", &mut applicability); let mut snip = if not { !snip } else { snip }; if ret { snip = snip.make_return(); } if parent_node_is_if_expr(&e, &cx) { snip = snip.blockify() } span_lint_and_sugg( cx, NEEDLESS_BOOL, e.span, "this if-then-else expression returns a bool literal", "you can reduce it to", snip.to_string(), applicability, ); }; if let ExprKind::Block(ref then_block, _) = then_block.kind { match (fetch_bool_block(then_block), fetch_bool_expr(else_expr)) { (RetBool(true), RetBool(true)) | (Bool(true), Bool(true)) => { span_lint( cx, NEEDLESS_BOOL, e.span, "this if-then-else expression will always return true", ); }, (RetBool(false), RetBool(false)) | (Bool(false), Bool(false)) => { span_lint( cx, NEEDLESS_BOOL, e.span, "this if-then-else expression will always return false", ); }, (RetBool(true), RetBool(false)) => reduce(true, false), (Bool(true), Bool(false)) => reduce(false, false), (RetBool(false), RetBool(true)) => reduce(true, true), (Bool(false), Bool(true)) => reduce(false, true), _ => (), } } else { panic!("IfExpr `then` node is not an `ExprKind::Block`"); } } } } declare_lint_pass!(BoolComparison => [BOOL_COMPARISON]); impl<'a, 'tcx> LateLintPass<'a, 'tcx> for BoolComparison { fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, e: &'tcx Expr<'_>) { if e.span.from_expansion() { return; } if let ExprKind::Binary(Spanned { node, .. }, ..) = e.kind { let ignore_case = None::<(fn(_) -> _, &str)>; let ignore_no_literal = None::<(fn(_, _) -> _, &str)>; match node { BinOpKind::Eq => { let true_case = Some((|h| h, "equality checks against true are unnecessary")); let false_case = Some(( |h: Sugg<'_>| !h, "equality checks against false can be replaced by a negation", )); check_comparison(cx, e, true_case, false_case, true_case, false_case, ignore_no_literal) }, BinOpKind::Ne => { let true_case = Some(( |h: Sugg<'_>| !h, "inequality checks against true can be replaced by a negation", )); let false_case = Some((|h| h, "inequality checks against false are unnecessary")); check_comparison(cx, e, true_case, false_case, true_case, false_case, ignore_no_literal) }, BinOpKind::Lt => check_comparison( cx, e, ignore_case, Some((|h| h, "greater than checks against false are unnecessary")), Some(( |h: Sugg<'_>| !h, "less than comparison against true can be replaced by a negation", )), ignore_case, Some(( |l: Sugg<'_>, r: Sugg<'_>| (!l).bit_and(&r), "order comparisons between booleans can be simplified", )), ), BinOpKind::Gt => check_comparison( cx, e, Some(( |h: Sugg<'_>| !h, "less than comparison against true can be replaced by a negation", )), ignore_case, ignore_case, Some((|h| h, "greater than checks against false are unnecessary")), Some(( |l: Sugg<'_>, r: Sugg<'_>| l.bit_and(&(!r)), "order comparisons between booleans can be simplified", )), ), _ => (), } } } } fn check_comparison<'a, 'tcx>( cx: &LateContext<'a, 'tcx>, e: &'tcx Expr<'_>, left_true: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>, left_false: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>, right_true: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>, right_false: Option<(impl FnOnce(Sugg<'a>) -> Sugg<'a>, &str)>, no_literal: Option<(impl FnOnce(Sugg<'a>, Sugg<'a>) -> Sugg<'a>, &str)>, ) { use self::Expression::*; if let ExprKind::Binary(_, ref left_side, ref right_side) = e.kind { let (l_ty, r_ty) = (cx.tables.expr_ty(left_side), cx.tables.expr_ty(right_side)); if l_ty.is_bool() && r_ty.is_bool() { let mut applicability = Applicability::MachineApplicable; match (fetch_bool_expr(left_side), fetch_bool_expr(right_side)) { (Bool(true), Other) => left_true.map_or((), |(h, m)| { suggest_bool_comparison(cx, e, right_side, applicability, m, h) }), (Other, Bool(true)) => right_true.map_or((), |(h, m)| { suggest_bool_comparison(cx, e, left_side, applicability, m, h) }), (Bool(false), Other) => left_false.map_or((), |(h, m)| { suggest_bool_comparison(cx, e, right_side, applicability, m, h) }), (Other, Bool(false)) => right_false.map_or((), |(h, m)| { suggest_bool_comparison(cx, e, left_side, applicability, m, h) }), (Other, Other) => no_literal.map_or((), |(h, m)| { let left_side = Sugg::hir_with_applicability(cx, left_side, "..", &mut applicability); let right_side = Sugg::hir_with_applicability(cx, right_side, "..", &mut applicability); span_lint_and_sugg( cx, BOOL_COMPARISON, e.span, m, "try simplifying it as shown", h(left_side, right_side).to_string(), applicability, ) }), _ => (), } } } } fn suggest_bool_comparison<'a, 'tcx>( cx: &LateContext<'a, 'tcx>, e: &'tcx Expr<'_>, expr: &Expr<'_>, mut applicability: Applicability, message: &str, conv_hint: impl FnOnce(Sugg<'a>) -> Sugg<'a>, ) { let hint = Sugg::hir_with_applicability(cx, expr, "..", &mut applicability); span_lint_and_sugg( cx, BOOL_COMPARISON, e.span, message, "try simplifying it as shown", conv_hint(hint).to_string(), applicability, ); } enum Expression { Bool(bool), RetBool(bool), Other, } fn fetch_bool_block(block: &Block<'_>) -> Expression { match (&*block.stmts, block.expr.as_ref()) { (&[], Some(e)) => fetch_bool_expr(&**e), (&[ref e], None) => { if let StmtKind::Semi(ref e) = e.kind { if let ExprKind::Ret(_) = e.kind { fetch_bool_expr(&**e) } else { Expression::Other } } else { Expression::Other } }, _ => Expression::Other, } } fn fetch_bool_expr(expr: &Expr<'_>) -> Expression { match expr.kind { ExprKind::Block(ref block, _) => fetch_bool_block(block), ExprKind::Lit(ref lit_ptr) => { if let LitKind::Bool(value) = lit_ptr.node { Expression::Bool(value) } else { Expression::Other } }, ExprKind::Ret(Some(ref expr)) => match fetch_bool_expr(expr) { Expression::Bool(value) => Expression::RetBool(value), _ => Expression::Other, }, _ => Expression::Other, } }
37.805461
108
0.477927
ac78c1bf77ba478b919823e3b9b7c2cdd7bc2819
3,607
// Copyright 2020 Netwarps Ltd. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the "Software"), // to deal in the Software without restriction, including without limitation // the rights to use, copy, modify, merge, publish, distribute, sublicense, // and/or sell copies of the Software, and to permit persons to whom the // Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER // DEALINGS IN THE SOFTWARE. use crate::protocol::FloodsubMessage; use crate::subscription::Subscription; use crate::Topic; use crate::{FloodsubConfig, FloodsubError}; use futures::channel::{mpsc, oneshot}; use futures::SinkExt; use libp2prs_core::PeerId; pub(crate) enum ControlCommand { Publish(FloodsubMessage, oneshot::Sender<()>), Subscribe(Topic, oneshot::Sender<Subscription>), Ls(oneshot::Sender<Vec<Topic>>), GetPeers(Topic, oneshot::Sender<Vec<PeerId>>), } #[derive(Clone)] pub struct Control { config: FloodsubConfig, control_sender: mpsc::UnboundedSender<ControlCommand>, } type Result<T> = std::result::Result<T, FloodsubError>; impl Control { pub(crate) fn new(control_sender: mpsc::UnboundedSender<ControlCommand>, config: FloodsubConfig) -> Self { Control { control_sender, config } } /// Closes the floodsub main loop. pub fn close(&mut self) { self.control_sender.close_channel(); } /// Publish publishes data to a given topic. pub async fn publish(&mut self, topic: Topic, data: impl Into<Vec<u8>>) -> Result<()> { let msg = FloodsubMessage { source: self.config.local_peer_id, data: data.into(), // If the sequence numbers are predictable, then an attacker could flood the network // with packets with the predetermined sequence numbers and absorb our legitimate // messages. We therefore use a random number. sequence_number: rand::random::<[u8; 20]>().to_vec(), topics: vec![topic.clone()], }; let (tx, rx) = oneshot::channel(); self.control_sender.send(ControlCommand::Publish(msg, tx)).await?; Ok(rx.await?) } /// Subscribe to messages on a given topic. pub async fn subscribe(&mut self, topic: Topic) -> Result<Subscription> { let (tx, rx) = oneshot::channel(); self.control_sender.send(ControlCommand::Subscribe(topic, tx)).await?; Ok(rx.await?) } /// List subscribed topics by name. pub async fn ls(&mut self) -> Result<Vec<Topic>> { let (tx, rx) = oneshot::channel(); self.control_sender.send(ControlCommand::Ls(tx)).await?; Ok(rx.await?) } /// List peers we are currently pubsubbing with. pub async fn get_peers(&mut self, topic: Topic) -> Result<Vec<PeerId>> { let (tx, rx) = oneshot::channel(); self.control_sender.send(ControlCommand::GetPeers(topic, tx)).await?; Ok(rx.await?) } }
39.206522
110
0.680898
d7271376e594629ec7d279ca0d5917ed9235f6cd
124
use rui::*; fn main() { rui(vstack! { text("This is a test."); text("This is another test.") }); }
13.777778
37
0.459677
6a523952186084e1a0b05b9ae446b3eaf7d1aa93
31,048
/* This tool is part of the WhiteboxTools geospatial analysis library. Authors: Dr. John Lindsay Created: 10/05/2018 Last Modified: 9/12/2019 License: MIT Most IDW tool have the option to work either based on a fixed number of neighbouring /// points or a fixed neighbourhood size. This tool is currently configured to perform the later /// only, using a FixedRadiusSearch structure. Using a fixed number of neighbours will require /// use of a KD-tree structure. I've been testing one Rust KD-tree library but its performance /// does not appear to be satisfactory compared to the FixedRadiusSearch. I will need to explore /// other options here. /// /// Another change that will need to be implemented is the use of a nodal function. The original /// Whitebox GAT tool allows for use of a constant or a quadratic. This tool only allows the /// former. */ use whitebox_raster::*; use whitebox_common::structures::{DistanceMetric, FixedRadiusSearch2D}; use crate::tools::*; use whitebox_vector::{FieldData, ShapeType, Shapefile}; use num_cpus; use std::env; use std::f64; use std::io::{Error, ErrorKind}; use std::path; use std::sync::mpsc; use std::sync::Arc; use std::thread; /// This tool interpolates vector points into a raster surface using an inverse-distance weighted scheme. pub struct IdwInterpolation { name: String, description: String, toolbox: String, parameters: Vec<ToolParameter>, example_usage: String, } impl IdwInterpolation { /// public constructor pub fn new() -> IdwInterpolation { let name = "IdwInterpolation".to_string(); let toolbox = "GIS Analysis".to_string(); let description = "Interpolates vector points into a raster surface using an inverse-distance weighted scheme.".to_string(); let mut parameters = vec![]; parameters.push(ToolParameter { name: "Input Vector Points File".to_owned(), flags: vec!["-i".to_owned(), "--input".to_owned()], description: "Input vector Points file.".to_owned(), parameter_type: ParameterType::ExistingFile(ParameterFileType::Vector( VectorGeometryType::Point, )), default_value: None, optional: false, }); parameters.push(ToolParameter { name: "Field Name".to_owned(), flags: vec!["--field".to_owned()], description: "Input field name in attribute table.".to_owned(), parameter_type: ParameterType::VectorAttributeField( AttributeType::Number, "--input".to_string(), ), default_value: None, optional: false, }); parameters.push(ToolParameter { name: "Use z-coordinate instead of field?".to_owned(), flags: vec!["--use_z".to_owned()], description: "Use z-coordinate instead of field?".to_owned(), parameter_type: ParameterType::Boolean, default_value: Some("false".to_string()), optional: true, }); parameters.push(ToolParameter { name: "Output File".to_owned(), flags: vec!["-o".to_owned(), "--output".to_owned()], description: "Output raster file.".to_owned(), parameter_type: ParameterType::NewFile(ParameterFileType::Raster), default_value: None, optional: false, }); parameters.push(ToolParameter { name: "IDW Weight (Exponent) Value".to_owned(), flags: vec!["--weight".to_owned()], description: "IDW weight value.".to_owned(), parameter_type: ParameterType::Float, default_value: Some("2.0".to_owned()), optional: true, }); parameters.push(ToolParameter { name: "Search Radius (map units)".to_owned(), flags: vec!["--radius".to_owned()], description: "Search Radius in map units.".to_owned(), parameter_type: ParameterType::Float, default_value: None, optional: true, }); parameters.push(ToolParameter { name: "Min. Number of Points".to_owned(), flags: vec!["--min_points".to_owned()], description: "Minimum number of points.".to_owned(), parameter_type: ParameterType::Integer, default_value: None, optional: true, }); parameters.push(ToolParameter{ name: "Cell Size (optional)".to_owned(), flags: vec!["--cell_size".to_owned()], description: "Optionally specified cell size of output raster. Not used when base raster is specified.".to_owned(), parameter_type: ParameterType::Float, default_value: None, optional: true }); parameters.push(ToolParameter{ name: "Base Raster File (optional)".to_owned(), flags: vec!["--base".to_owned()], description: "Optionally specified input base raster file. Not used when a cell size is specified.".to_owned(), parameter_type: ParameterType::ExistingFile(ParameterFileType::Raster), default_value: None, optional: true }); let sep: String = path::MAIN_SEPARATOR.to_string(); let p = format!("{}", env::current_dir().unwrap().display()); let e = format!("{}", env::current_exe().unwrap().display()); let mut short_exe = e .replace(&p, "") .replace(".exe", "") .replace(".", "") .replace(&sep, ""); if e.contains(".exe") { short_exe += ".exe"; } let usage = format!(">>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i=points.shp --field=ELEV -o=output.tif --weight=2.0 --radius=4.0 --min_points=3 --cell_size=1.0 >>.*{0} -r={1} -v --wd=\"*path*to*data*\" -i=points.shp --use_z -o=output.tif --weight=2.0 --radius=4.0 --min_points=3 --base=existing_raster.tif", short_exe, name).replace("*", &sep); IdwInterpolation { name: name, description: description, toolbox: toolbox, parameters: parameters, example_usage: usage, } } } impl WhiteboxTool for IdwInterpolation { fn get_source_file(&self) -> String { String::from(file!()) } fn get_tool_name(&self) -> String { self.name.clone() } fn get_tool_description(&self) -> String { self.description.clone() } fn get_tool_parameters(&self) -> String { match serde_json::to_string(&self.parameters) { Ok(json_str) => return format!("{{\"parameters\":{}}}", json_str), Err(err) => return format!("{:?}", err), } } fn get_example_usage(&self) -> String { self.example_usage.clone() } fn get_toolbox(&self) -> String { self.toolbox.clone() } fn run<'a>( &self, args: Vec<String>, working_directory: &'a str, verbose: bool, ) -> Result<(), Error> { let mut input_file = String::new(); let mut field_name = String::new(); let mut use_z = false; let mut output_file = String::new(); let mut grid_res = 0f64; let mut base_file = String::new(); let mut weight = 2f64; let mut radius = 0f64; let mut min_points = 0usize; // let mut max_dist = f64::INFINITY; if args.len() == 0 { return Err(Error::new( ErrorKind::InvalidInput, "Tool run with no parameters.", )); } for i in 0..args.len() { let mut arg = args[i].replace("\"", ""); arg = arg.replace("\'", ""); let cmd = arg.split("="); // in case an equals sign was used let vec = cmd.collect::<Vec<&str>>(); let mut keyval = false; if vec.len() > 1 { keyval = true; } let flag_val = vec[0].to_lowercase().replace("--", "-"); if flag_val == "-i" || flag_val == "-input" { input_file = if keyval { vec[1].to_string() } else { args[i + 1].to_string() }; } else if flag_val == "-field" { field_name = if keyval { vec[1].to_string() } else { args[i + 1].to_string() }; } else if flag_val == "-use_z" { if vec.len() == 1 || !vec[1].to_string().to_lowercase().contains("false") { use_z = true; } } else if flag_val == "-o" || flag_val == "-output" { output_file = if keyval { vec[1].to_string() } else { args[i + 1].to_string() }; } else if flag_val == "-resolution" || flag_val == "-cell_size" { grid_res = if keyval { vec[1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) } else { args[i + 1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) }; } else if flag_val == "-base" { base_file = if keyval { vec[1].to_string() } else { args[i + 1].to_string() }; } else if flag_val == "-weight" { weight = if keyval { vec[1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) } else { args[i + 1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) }; } else if flag_val == "-radius" { radius = if keyval { vec[1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) } else { args[i + 1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) }; } else if flag_val == "-min_points" { min_points = if keyval { vec[1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) as usize } else { args[i + 1] .to_string() .parse::<f64>() .expect(&format!("Error parsing {}", flag_val)) as usize }; // } else if flag_val == "-max_dist" { // max_dist = if keyval { // vec[1].to_string().parse::<f64>().expect(&format!("Error parsing {}", flag_val)) // } else { // args[i+1].to_string().parse::<f64>().unwrap() // }; } } if verbose { let tool_name = self.get_tool_name(); let welcome_len = format!("* Welcome to {} *", tool_name).len().max(28); // 28 = length of the 'Powered by' by statement. println!("{}", "*".repeat(welcome_len)); println!("* Welcome to {} {}*", tool_name, " ".repeat(welcome_len - 15 - tool_name.len())); println!("* Powered by WhiteboxTools {}*", " ".repeat(welcome_len - 28)); println!("* www.whiteboxgeo.com {}*", " ".repeat(welcome_len - 23)); println!("{}", "*".repeat(welcome_len)); } let sep: String = path::MAIN_SEPARATOR.to_string(); let mut progress: usize; let mut old_progress: usize = 1; if !input_file.contains(&sep) && !input_file.contains("/") { input_file = format!("{}{}", working_directory, input_file); } if !output_file.contains(&sep) && !output_file.contains("/") { output_file = format!("{}{}", working_directory, output_file); } // radius = radius * radius; // squared distances are used // if max_dist != f64::INFINITY { // max_dist = max_dist * max_dist; // square the max dist // } if verbose { println!("Reading data...") }; let vector_data = Shapefile::read(&input_file)?; let start = Instant::now(); // make sure the input vector file is of points type if vector_data.header.shape_type.base_shape_type() != ShapeType::Point { return Err(Error::new( ErrorKind::InvalidInput, "The input vector data must be of point base shape type.", )); } // // Create the kd tree let (mut x, mut y, mut z): (f64, f64, f64); // let mut points = vec![]; // for record_num in 0..vector_data.num_records { // let record = vector_data.get_record(record_num); // for i in 0..record.points.len() { // x = record.points[i].x; // y = record.points[i].y; // points.push([x, y]); // } // } // let kdtree = if !use_z { // // use the specified attribute // // What is the index of the field to be analyzed? // let field_index = match vector_data.attributes.get_field_num(&field_name) { // Some(i) => i, // None => { // // Field not found // return Err(Error::new(ErrorKind::InvalidInput, // "Attribute not found in table.")); // }, // }; // // Is the field numeric? // if !vector_data.attributes.is_field_numeric(field_index) { // // Warn user of non-numeric // return Err(Error::new(ErrorKind::InvalidInput, // "Non-numeric attributes cannot be rasterized.")); // } // let mut kdtree = KdTree::new_with_capacity(2, vector_data.num_records); // for record_num in 0..vector_data.num_records { // match vector_data.attributes.get_field_value(record_num, field_index) { // FieldData::Int(val) => { // kdtree.add(points[record_num], val as f64).unwrap(); // }, // FieldData::Int64(val) => { // kdtree.add(points[record_num], val as f64).unwrap(); // }, // FieldData::Real(val) => { // kdtree.add(points[record_num], val as f64).unwrap(); // }, // _ => { // // do nothing; likely due to null value for record. // } // } // if verbose { // progress = (100.0_f64 * record_num as f64 / (vector_data.num_records - 1) as f64) as usize; // if progress != old_progress { // println!("Creating kd-tree: {}%", progress); // old_progress = progress; // } // } // } // kdtree // } else { // // use the z dimension of the point data. // if vector_data.header.shape_type != ShapeType::PointZ && // vector_data.header.shape_type != ShapeType::PointM && // vector_data.header.shape_type != ShapeType::MultiPointZ && // vector_data.header.shape_type != ShapeType::MultiPointM { // return Err(Error::new(ErrorKind::InvalidInput, // "The input vector data must be of PointZ, PointM, MultiPointZ, or MultiPointM shape type.")); // } // let mut kdtree = KdTree::new_with_capacity(2, vector_data.num_records); // let mut p = 0; // for record_num in 0..vector_data.num_records { // let record = vector_data.get_record(record_num); // for i in 0..record.z_array.len() { // z = record.z_array[i]; // kdtree.add(points[p], z).unwrap(); // p += 1; // } // if verbose { // progress = (100.0_f64 * record_num as f64 / (vector_data.num_records - 1) as f64) as usize; // if progress != old_progress { // println!("Creating kd-tree: {}%", progress); // old_progress = progress; // } // } // } // kdtree // }; let frs = if !use_z { // use the specified attribute // What is the index of the field to be analyzed? let field_index = match vector_data.attributes.get_field_num(&field_name) { Some(i) => i, None => { // Field not found return Err(Error::new( ErrorKind::InvalidInput, "Attribute not found in table.", )); } }; // Is the field numeric? if !vector_data.attributes.is_field_numeric(field_index) { // Warn user of non-numeric return Err(Error::new( ErrorKind::InvalidInput, "Non-numeric attributes cannot be rasterized.", )); } let mut frs: FixedRadiusSearch2D<f64> = FixedRadiusSearch2D::new(radius, DistanceMetric::Euclidean); for record_num in 0..vector_data.num_records { let record = vector_data.get_record(record_num); x = record.points[0].x; y = record.points[0].y; match vector_data.attributes.get_value(record_num, &field_name) { FieldData::Int(val) => { frs.insert(x, y, val as f64); } // FieldData::Int64(val) => { // frs.insert(x, y, val as f64); // }, FieldData::Real(val) => { frs.insert(x, y, val); } _ => { // do nothing; likely due to null value for record. } } if verbose { progress = (100.0_f64 * record_num as f64 / (vector_data.num_records - 1) as f64) as usize; if progress != old_progress { println!("Creating search structure: {}%", progress); old_progress = progress; } } } frs } else { // use the z dimension of the point data. if vector_data.header.shape_type != ShapeType::PointZ && vector_data.header.shape_type != ShapeType::PointM && vector_data.header.shape_type != ShapeType::MultiPointZ && vector_data.header.shape_type != ShapeType::MultiPointM { return Err(Error::new(ErrorKind::InvalidInput, "The input vector data must be of PointZ, PointM, MultiPointZ, or MultiPointM shape type.")); } let mut frs: FixedRadiusSearch2D<f64> = FixedRadiusSearch2D::new(radius, DistanceMetric::Euclidean); // let mut p = 0; for record_num in 0..vector_data.num_records { let record = vector_data.get_record(record_num); for i in 0..record.z_array.len() { x = record.points[i].x; y = record.points[i].y; z = record.z_array[i]; frs.insert(x, y, z); // p += 1; } if verbose { progress = (100.0_f64 * record_num as f64 / (vector_data.num_records - 1) as f64) as usize; if progress != old_progress { println!("Creating search structure: {}%", progress); old_progress = progress; } } } frs }; // Create the output raster. The process of doing this will // depend on whether a cell size or a base raster were specified. // If both are specified, the base raster takes priority. let nodata = -32768.0f64; let mut output = if !base_file.trim().is_empty() || grid_res == 0f64 { if !base_file.contains(&sep) && !base_file.contains("/") { base_file = format!("{}{}", working_directory, base_file); } let mut base = Raster::new(&base_file, "r")?; base.configs.nodata = nodata; Raster::initialize_using_file(&output_file, &base) } else { if grid_res == 0f64 { return Err(Error::new( ErrorKind::InvalidInput, "The specified grid resolution is incorrect. Either a non-zero grid resolution \nor an input existing base file name must be used.", )); } // base the output raster on the grid_res and the // extent of the input vector. let west: f64 = vector_data.header.x_min; let north: f64 = vector_data.header.y_max; let rows: isize = (((north - vector_data.header.y_min) / grid_res).ceil()) as isize; let columns: isize = (((vector_data.header.x_max - west) / grid_res).ceil()) as isize; let south: f64 = north - rows as f64 * grid_res; let east = west + columns as f64 * grid_res; let mut configs = RasterConfigs { ..Default::default() }; configs.rows = rows as usize; configs.columns = columns as usize; configs.north = north; configs.south = south; configs.east = east; configs.west = west; configs.resolution_x = grid_res; configs.resolution_y = grid_res; configs.nodata = nodata; configs.data_type = DataType::F32; configs.photometric_interp = PhotometricInterpretation::Continuous; Raster::initialize_using_config(&output_file, &configs) }; let rows = output.configs.rows as isize; let columns = output.configs.columns as isize; let west = output.configs.west; let north = output.configs.north; output.configs.nodata = nodata; // in case a base image is used with a different nodata value. let res_x = output.configs.resolution_x; let res_y = output.configs.resolution_y; // let kdtree = Arc::new(kdtree); // wrap FRS in an Arc let frs = Arc::new(frs); let mut num_procs = num_cpus::get() as isize; let configs = whitebox_common::configs::get_configs()?; let max_procs = configs.max_procs; if max_procs > 0 && max_procs < num_procs { num_procs = max_procs; } let (tx, rx) = mpsc::channel(); for tid in 0..num_procs { // let kdtree = kdtree.clone(); let frs = frs.clone(); let tx = tx.clone(); thread::spawn(move || { let (mut x, mut y): (f64, f64); let mut zn: f64; let mut dist: f64; let mut val: f64; let mut sum_weights: f64; // let diff_weight = weight - 2f64; // diff between weight and 2, because distances are returned squared for row in (0..rows).filter(|r| r % num_procs == tid) { let mut data = vec![nodata; columns as usize]; for col in 0..columns { x = west + (col as f64 + 0.5) * res_x; y = north - (row as f64 + 0.5) * res_y; let mut ret = frs.search(x, y); if ret.len() < min_points { ret = frs.knn_search(x, y, min_points); } if ret.len() >= min_points { sum_weights = 0.0; val = 0.0; for j in 0..ret.len() { zn = ret[j].0; dist = ret[j].1 as f64; if dist > 0.0 { val += zn / dist.powf(weight); sum_weights += 1.0 / dist.powf(weight); } else { data[col as usize] = zn; sum_weights = 0.0; break; } } if sum_weights > 0.0 { data[col as usize] = val / sum_weights; } } } tx.send((row, data)).unwrap(); } // if radius > 0f64 { // for row in (0..rows).filter(|r| r % num_procs == tid) { // let mut data = vec![nodata; columns as usize]; // for col in 0..columns { // x = west + col as f64 * grid_res + 0.5; // y = north - row as f64 * grid_res - 0.5; // let ret = kdtree.within(&[x, y], radius, &squared_euclidean).unwrap(); // if ret.len() >= min_points { // sum_weights = 0.0; // val = 0.0; // for j in 0..ret.len() { // zn = *ret[j].1; // dist = ret[j].0; // if dist > 0.0 { // val += zn / (dist * dist.powf(diff_weight)); // sum_weights += 1.0 / (dist * dist.powf(diff_weight)); // } else { // data[col as usize] = zn; // sum_weights = 0.0; // break; // } // } // if sum_weights > 0.0 { // data[col as usize] = val / sum_weights; // } // } // } // tx.send((row, data)).unwrap(); // } // } else { // for row in (0..rows).filter(|r| r % num_procs == tid) { // let mut data = vec![nodata; columns as usize]; // for col in 0..columns { // x = west + col as f64 * grid_res + 0.5; // y = north - row as f64 * grid_res - 0.5; // let ret = kdtree.nearest(&[x, y], min_points, &squared_euclidean).unwrap(); // sum_weights = 0.0; // val = 0.0; // for j in 0..ret.len() { // zn = *ret[j].1; // dist = ret[j].0; // if dist < max_dist { // if dist > 0.0 { // val += zn / (dist * dist.powf(diff_weight)); // sum_weights += 1.0 / (dist * dist.powf(diff_weight)); // } else { // data[col as usize] = zn; // sum_weights = 0.0; // break; // } // } else { // // There are fewer than the required number of neighbouring // // points. Assign the output nodata. // sum_weights = 0.0; // break; // } // } // if sum_weights > 0.0 { // data[col as usize] = val / sum_weights; // } // } // tx.send((row, data)).unwrap(); // } // } }); } for row in 0..rows { let data = rx.recv().expect("Error receiving data from thread."); output.set_row_data(data.0, data.1); if verbose { progress = (100.0_f64 * row as f64 / (rows - 1) as f64) as usize; if progress != old_progress { println!("Progress: {}%", progress); old_progress = progress; } } } let elapsed_time = get_formatted_elapsed_time(start); output.add_metadata_entry(format!( "Created by whitebox_tools\' {} tool", self.get_tool_name() )); output.add_metadata_entry(format!("Input file: {}", input_file)); output.add_metadata_entry(format!("Elapsed Time (excluding I/O): {}", elapsed_time)); if verbose { println!("Saving data...") }; let _ = match output.write() { Ok(_) => { if verbose { println!("Output file written") } } Err(e) => return Err(e), }; if verbose { println!( "{}", &format!("Elapsed Time (excluding I/O): {}", elapsed_time) ); } Ok(()) } }
41.123179
184
0.451881
39c6c74e45eef62c59028409b2f1875bd3d0d449
14,300
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /*! Synchronous Timers This module exposes the functionality to create timers, block the current task, and create receivers which will receive notifications after a period of time. */ // FIXME: These functions take Durations but only pass ms to the backend impls. use comm::{Receiver, Sender, channel}; use time::Duration; use io::{IoResult, IoError}; use kinds::Send; use boxed::Box; use rt::rtio::{IoFactory, LocalIo, RtioTimer, Callback}; /// A synchronous timer object /// /// Values of this type can be used to put the current task to sleep for a /// period of time. Handles to this timer can also be created in the form of /// receivers which will receive notifications over time. /// /// # Example /// /// ``` /// # fn main() {} /// # fn foo() { /// use std::io::Timer; /// use std::time::Duration; /// /// let mut timer = Timer::new().unwrap(); /// timer.sleep(Duration::milliseconds(10)); // block the task for awhile /// /// let timeout = timer.oneshot(Duration::milliseconds(10)); /// // do some work /// timeout.recv(); // wait for the timeout to expire /// /// let periodic = timer.periodic(Duration::milliseconds(10)); /// loop { /// periodic.recv(); /// // this loop is only executed once every 10ms /// } /// # } /// ``` /// /// If only sleeping is necessary, then a convenience API is provided through /// the `io::timer` module. /// /// ``` /// # fn main() {} /// # fn foo() { /// use std::io::timer; /// use std::time::Duration; /// /// // Put this task to sleep for 5 seconds /// timer::sleep(Duration::seconds(5)); /// # } /// ``` pub struct Timer { obj: Box<RtioTimer + Send>, } struct TimerCallback { tx: Sender<()> } /// Sleep the current task for the specified duration. /// /// When provided a zero or negative `duration`, the function will /// return immediately. pub fn sleep(duration: Duration) { let timer = Timer::new(); let mut timer = timer.ok().expect("timer::sleep: could not create a Timer"); timer.sleep(duration) } impl Timer { /// Creates a new timer which can be used to put the current task to sleep /// for a number of milliseconds, or to possibly create channels which will /// get notified after an amount of time has passed. pub fn new() -> IoResult<Timer> { LocalIo::maybe_raise(|io| { io.timer_init().map(|t| Timer { obj: t }) }).map_err(IoError::from_rtio_error) } /// Blocks the current task for the specified duration. /// /// Note that this function will cause any other receivers for this timer to /// be invalidated (the other end will be closed). /// /// When provided a zero or negative `duration`, the function will /// return immediately. pub fn sleep(&mut self, duration: Duration) { // Short-circuit the timer backend for 0 duration let ms = in_ms_u64(duration); if ms == 0 { return } self.obj.sleep(ms); } /// Creates a oneshot receiver which will have a notification sent when /// the specified duration has elapsed. /// /// This does *not* block the current task, but instead returns immediately. /// /// Note that this invalidates any previous receiver which has been created /// by this timer, and that the returned receiver will be invalidated once /// the timer is destroyed (when it falls out of scope). In particular, if /// this is called in method-chaining style, the receiver will be /// invalidated at the end of that statement, and all `recv` calls will /// fail. /// /// # Example /// /// ```rust /// use std::io::Timer; /// use std::time::Duration; /// /// let mut timer = Timer::new().unwrap(); /// let ten_milliseconds = timer.oneshot(Duration::milliseconds(10)); /// /// for _ in range(0u, 100) { /* do work */ } /// /// // blocks until 10 ms after the `oneshot` call /// ten_milliseconds.recv(); /// ``` /// /// ```rust /// use std::io::Timer; /// use std::time::Duration; /// /// // Incorrect, method chaining-style: /// let mut five_ms = Timer::new().unwrap().oneshot(Duration::milliseconds(5)); /// // The timer object was destroyed, so this will always fail: /// // five_ms.recv() /// ``` /// /// When provided a zero or negative `duration`, the message will /// be sent immediately. pub fn oneshot(&mut self, duration: Duration) -> Receiver<()> { let (tx, rx) = channel(); // Short-circuit the timer backend for 0 duration if in_ms_u64(duration) != 0 { self.obj.oneshot(in_ms_u64(duration), box TimerCallback { tx: tx }); } else { tx.send(()); } return rx } /// Creates a receiver which will have a continuous stream of notifications /// being sent each time the specified duration has elapsed. /// /// This does *not* block the current task, but instead returns /// immediately. The first notification will not be received immediately, /// but rather after the first duration. /// /// Note that this invalidates any previous receiver which has been created /// by this timer, and that the returned receiver will be invalidated once /// the timer is destroyed (when it falls out of scope). In particular, if /// this is called in method-chaining style, the receiver will be /// invalidated at the end of that statement, and all `recv` calls will /// fail. /// /// # Example /// /// ```rust /// use std::io::Timer; /// use std::time::Duration; /// /// let mut timer = Timer::new().unwrap(); /// let ten_milliseconds = timer.periodic(Duration::milliseconds(10)); /// /// for _ in range(0u, 100) { /* do work */ } /// /// // blocks until 10 ms after the `periodic` call /// ten_milliseconds.recv(); /// /// for _ in range(0u, 100) { /* do work */ } /// /// // blocks until 20 ms after the `periodic` call (*not* 10ms after the /// // previous `recv`) /// ten_milliseconds.recv(); /// ``` /// /// ```rust /// use std::io::Timer; /// use std::time::Duration; /// /// // Incorrect, method chaining-style. /// let mut five_ms = Timer::new().unwrap().periodic(Duration::milliseconds(5)); /// // The timer object was destroyed, so this will always fail: /// // five_ms.recv() /// ``` /// /// When provided a zero or negative `duration`, the messages will /// be sent without delay. pub fn periodic(&mut self, duration: Duration) -> Receiver<()> { let ms = in_ms_u64(duration); // FIXME: The backend implementations don't ever send a message // if given a 0 ms duration. Temporarily using 1ms. It's // not clear what use a 0ms period is anyway... let ms = if ms == 0 { 1 } else { ms }; let (tx, rx) = channel(); self.obj.period(ms, box TimerCallback { tx: tx }); return rx } } impl Callback for TimerCallback { fn call(&mut self) { let _ = self.tx.send_opt(()); } } fn in_ms_u64(d: Duration) -> u64 { let ms = d.num_milliseconds(); if ms < 0 { return 0 }; return ms as u64; } #[cfg(test)] mod test { iotest!(fn test_io_timer_sleep_simple() { let mut timer = Timer::new().unwrap(); timer.sleep(Duration::milliseconds(1)); }) iotest!(fn test_io_timer_sleep_oneshot() { let mut timer = Timer::new().unwrap(); timer.oneshot(Duration::milliseconds(1)).recv(); }) iotest!(fn test_io_timer_sleep_oneshot_forget() { let mut timer = Timer::new().unwrap(); timer.oneshot(Duration::milliseconds(100000000)); }) iotest!(fn oneshot_twice() { let mut timer = Timer::new().unwrap(); let rx1 = timer.oneshot(Duration::milliseconds(10000)); let rx = timer.oneshot(Duration::milliseconds(1)); rx.recv(); assert_eq!(rx1.recv_opt(), Err(())); }) iotest!(fn test_io_timer_oneshot_then_sleep() { let mut timer = Timer::new().unwrap(); let rx = timer.oneshot(Duration::milliseconds(100000000)); timer.sleep(Duration::milliseconds(1)); // this should invalidate rx assert_eq!(rx.recv_opt(), Err(())); }) iotest!(fn test_io_timer_sleep_periodic() { let mut timer = Timer::new().unwrap(); let rx = timer.periodic(Duration::milliseconds(1)); rx.recv(); rx.recv(); rx.recv(); }) iotest!(fn test_io_timer_sleep_periodic_forget() { let mut timer = Timer::new().unwrap(); timer.periodic(Duration::milliseconds(100000000)); }) iotest!(fn test_io_timer_sleep_standalone() { sleep(Duration::milliseconds(1)) }) iotest!(fn oneshot() { let mut timer = Timer::new().unwrap(); let rx = timer.oneshot(Duration::milliseconds(1)); rx.recv(); assert!(rx.recv_opt().is_err()); let rx = timer.oneshot(Duration::milliseconds(1)); rx.recv(); assert!(rx.recv_opt().is_err()); }) iotest!(fn override() { let mut timer = Timer::new().unwrap(); let orx = timer.oneshot(Duration::milliseconds(100)); let prx = timer.periodic(Duration::milliseconds(100)); timer.sleep(Duration::milliseconds(1)); assert_eq!(orx.recv_opt(), Err(())); assert_eq!(prx.recv_opt(), Err(())); timer.oneshot(Duration::milliseconds(1)).recv(); }) iotest!(fn period() { let mut timer = Timer::new().unwrap(); let rx = timer.periodic(Duration::milliseconds(1)); rx.recv(); rx.recv(); let rx2 = timer.periodic(Duration::milliseconds(1)); rx2.recv(); rx2.recv(); }) iotest!(fn sleep() { let mut timer = Timer::new().unwrap(); timer.sleep(Duration::milliseconds(1)); timer.sleep(Duration::milliseconds(1)); }) iotest!(fn oneshot_fail() { let mut timer = Timer::new().unwrap(); let _rx = timer.oneshot(Duration::milliseconds(1)); fail!(); } #[should_fail]) iotest!(fn period_fail() { let mut timer = Timer::new().unwrap(); let _rx = timer.periodic(Duration::milliseconds(1)); fail!(); } #[should_fail]) iotest!(fn normal_fail() { let _timer = Timer::new().unwrap(); fail!(); } #[should_fail]) iotest!(fn closing_channel_during_drop_doesnt_kill_everything() { // see issue #10375 let mut timer = Timer::new().unwrap(); let timer_rx = timer.periodic(Duration::milliseconds(1000)); spawn(proc() { let _ = timer_rx.recv_opt(); }); // when we drop the TimerWatcher we're going to destroy the channel, // which must wake up the task on the other end }) iotest!(fn reset_doesnt_switch_tasks() { // similar test to the one above. let mut timer = Timer::new().unwrap(); let timer_rx = timer.periodic(Duration::milliseconds(1000)); spawn(proc() { let _ = timer_rx.recv_opt(); }); timer.oneshot(Duration::milliseconds(1)); }) iotest!(fn reset_doesnt_switch_tasks2() { // similar test to the one above. let mut timer = Timer::new().unwrap(); let timer_rx = timer.periodic(Duration::milliseconds(1000)); spawn(proc() { let _ = timer_rx.recv_opt(); }); timer.sleep(Duration::milliseconds(1)); }) iotest!(fn sender_goes_away_oneshot() { let rx = { let mut timer = Timer::new().unwrap(); timer.oneshot(Duration::milliseconds(1000)) }; assert_eq!(rx.recv_opt(), Err(())); }) iotest!(fn sender_goes_away_period() { let rx = { let mut timer = Timer::new().unwrap(); timer.periodic(Duration::milliseconds(1000)) }; assert_eq!(rx.recv_opt(), Err(())); }) iotest!(fn receiver_goes_away_oneshot() { let mut timer1 = Timer::new().unwrap(); timer1.oneshot(Duration::milliseconds(1)); let mut timer2 = Timer::new().unwrap(); // while sleeping, the previous timer should fire and not have its // callback do something terrible. timer2.sleep(Duration::milliseconds(2)); }) iotest!(fn receiver_goes_away_period() { let mut timer1 = Timer::new().unwrap(); timer1.periodic(Duration::milliseconds(1)); let mut timer2 = Timer::new().unwrap(); // while sleeping, the previous timer should fire and not have its // callback do something terrible. timer2.sleep(Duration::milliseconds(2)); }) iotest!(fn sleep_zero() { let mut timer = Timer::new().unwrap(); timer.sleep(Duration::milliseconds(0)); }) iotest!(fn sleep_negative() { let mut timer = Timer::new().unwrap(); timer.sleep(Duration::milliseconds(-1000000)); }) iotest!(fn oneshot_zero() { let mut timer = Timer::new().unwrap(); let rx = timer.oneshot(Duration::milliseconds(0)); rx.recv(); }) iotest!(fn oneshot_negative() { let mut timer = Timer::new().unwrap(); let rx = timer.oneshot(Duration::milliseconds(-1000000)); rx.recv(); }) iotest!(fn periodic_zero() { let mut timer = Timer::new().unwrap(); let rx = timer.periodic(Duration::milliseconds(0)); rx.recv(); rx.recv(); rx.recv(); rx.recv(); }) iotest!(fn periodic_negative() { let mut timer = Timer::new().unwrap(); let rx = timer.periodic(Duration::milliseconds(-1000000)); rx.recv(); rx.recv(); rx.recv(); rx.recv(); }) }
31.777778
84
0.594615
7502869094c35f629ba672615d7f6fa7d91b8793
12,898
use byteorder::ReadBytesExt; use error::{Error, Result}; use marker::Marker; use parser::ScanInfo; use std::io::Read; use std::iter::repeat; const LUT_BITS: u8 = 8; #[derive(Debug)] pub struct HuffmanDecoder { bits: u64, num_bits: u8, marker: Option<Marker>, } impl HuffmanDecoder { pub fn new() -> HuffmanDecoder { HuffmanDecoder { bits: 0, num_bits: 0, marker: None, } } // Section F.2.2.3 // Figure F.16 pub fn decode<R: Read>(&mut self, reader: &mut R, table: &HuffmanTable) -> Result<u8> { if self.num_bits < 16 { self.read_bits(reader)?; } let (value, size) = table.lut[self.peek_bits(LUT_BITS) as usize]; if size > 0 { self.consume_bits(size); Ok(value) } else { let bits = self.peek_bits(16); for i in LUT_BITS .. 16 { let code = (bits >> (15 - i)) as i32; if code <= table.maxcode[i as usize] { self.consume_bits(i + 1); let index = (code + table.delta[i as usize]) as usize; return Ok(table.values[index]); } } Err(Error::Format("failed to decode huffman code".to_owned())) } } pub fn decode_fast_ac<R: Read>(&mut self, reader: &mut R, table: &HuffmanTable) -> Result<Option<(i16, u8)>> { if let Some(ref ac_lut) = table.ac_lut { if self.num_bits < LUT_BITS { self.read_bits(reader)?; } let (value, run_size) = ac_lut[self.peek_bits(LUT_BITS) as usize]; if run_size != 0 { let run = run_size >> 4; let size = run_size & 0x0f; self.consume_bits(size); return Ok(Some((value, run))); } } Ok(None) } #[inline] pub fn get_bits<R: Read>(&mut self, reader: &mut R, count: u8) -> Result<u16> { if self.num_bits < count { self.read_bits(reader)?; } let bits = self.peek_bits(count); self.consume_bits(count); Ok(bits) } #[inline] pub fn receive_extend<R: Read>(&mut self, reader: &mut R, count: u8) -> Result<i16> { let value = self.get_bits(reader, count)?; Ok(extend(value, count)) } pub fn reset(&mut self) { self.bits = 0; self.num_bits = 0; } pub fn take_marker<R: Read>(&mut self, reader: &mut R) -> Result<Option<Marker>> { self.read_bits(reader).map(|_| self.marker.take()) } #[inline] fn peek_bits(&mut self, count: u8) -> u16 { debug_assert!(count <= 16); debug_assert!(self.num_bits >= count); ((self.bits >> (64 - count)) & ((1 << count) - 1)) as u16 } #[inline] fn consume_bits(&mut self, count: u8) { debug_assert!(self.num_bits >= count); self.bits <<= count as usize; self.num_bits -= count; } fn read_bits<R: Read>(&mut self, reader: &mut R) -> Result<()> { while self.num_bits <= 56 { // Fill with zero bits if we have reached the end. let byte = match self.marker { Some(_) => 0, None => reader.read_u8()?, }; if byte == 0xFF { let mut next_byte = reader.read_u8()?; // Check for byte stuffing. if next_byte != 0x00 { // We seem to have reached the end of entropy-coded data and encountered a // marker. Since we can't put data back into the reader, we have to continue // reading to identify the marker so we can pass it on. // Section B.1.1.2 // "Any marker may optionally be preceded by any number of fill bytes, which are bytes assigned code X’FF’." while next_byte == 0xFF { next_byte = reader.read_u8()?; } match next_byte { 0x00 => return Err(Error::Format("FF 00 found where marker was expected".to_owned())), _ => self.marker = Some(Marker::from_u8(next_byte).unwrap()), } continue; } } self.bits |= (byte as u64) << (56 - self.num_bits); self.num_bits += 8; } Ok(()) } } // Section F.2.2.1 // Figure F.12 fn extend(value: u16, count: u8) -> i16 { let vt = 1 << (count as u16 - 1); if value < vt { value as i16 + (-1 << count as i16) + 1 } else { value as i16 } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum HuffmanTableClass { DC, AC, } pub struct HuffmanTable { values: Vec<u8>, delta: [i32; 16], maxcode: [i32; 16], lut: [(u8, u8); 1 << LUT_BITS], ac_lut: Option<[(i16, u8); 1 << LUT_BITS]>, } impl HuffmanTable { pub fn new(bits: &[u8; 16], values: &[u8], class: HuffmanTableClass) -> Result<HuffmanTable> { let (huffcode, huffsize) = derive_huffman_codes(bits)?; // Section F.2.2.3 // Figure F.15 // delta[i] is set to VALPTR(I) - MINCODE(I) let mut delta = [0i32; 16]; let mut maxcode = [-1i32; 16]; let mut j = 0; for i in 0 .. 16 { if bits[i] != 0 { delta[i] = j as i32 - huffcode[j] as i32; j += bits[i] as usize; maxcode[i] = huffcode[j - 1] as i32; } } // Build a lookup table for faster decoding. let mut lut = [(0u8, 0u8); 1 << LUT_BITS]; for (i, &size) in huffsize.iter().enumerate().filter(|&(_, &size)| size <= LUT_BITS) { let bits_remaining = LUT_BITS - size; let start = (huffcode[i] << bits_remaining) as usize; for j in 0 .. 1 << bits_remaining { lut[start + j] = (values[i], size); } } // Build a lookup table for small AC coefficients which both decodes the value and does the // equivalent of receive_extend. let ac_lut = match class { HuffmanTableClass::DC => None, HuffmanTableClass::AC => { let mut table = [(0i16, 0u8); 1 << LUT_BITS]; for (i, &(value, size)) in lut.iter().enumerate() { let run_length = value >> 4; let magnitude_category = value & 0x0f; if magnitude_category > 0 && size + magnitude_category <= LUT_BITS { let unextended_ac_value = (((i << size) & ((1 << LUT_BITS) - 1)) >> (LUT_BITS - magnitude_category)) as u16; let ac_value = extend(unextended_ac_value, magnitude_category); table[i] = (ac_value, (run_length << 4) | (size + magnitude_category)); } } Some(table) }, }; Ok(HuffmanTable { values: values.to_vec(), delta: delta, maxcode: maxcode, lut: lut, ac_lut: ac_lut, }) } } // Section C.2 fn derive_huffman_codes(bits: &[u8; 16]) -> Result<(Vec<u16>, Vec<u8>)> { // Figure C.1 let huffsize = bits.iter() .enumerate() .fold(Vec::new(), |mut acc, (i, &value)| { let mut repeated_size: Vec<u8> = repeat((i + 1) as u8).take(value as usize).collect(); acc.append(&mut repeated_size); acc }); // Figure C.2 let mut huffcode = vec![0u16; huffsize.len()]; let mut code_size = huffsize[0]; let mut code = 0u32; for (i, &size) in huffsize.iter().enumerate() { while code_size < size { code <<= 1; code_size += 1; } if code >= (1u32 << size) { return Err(Error::Format("bad huffman code length".to_owned())); } huffcode[i] = code as u16; code += 1; } Ok((huffcode, huffsize)) } // https://www.loc.gov/preservation/digital/formats/fdd/fdd000063.shtml // "Avery Lee, writing in the rec.video.desktop newsgroup in 2001, commented that "MJPEG, or at // least the MJPEG in AVIs having the MJPG fourcc, is restricted JPEG with a fixed -- and // *omitted* -- Huffman table. The JPEG must be YCbCr colorspace, it must be 4:2:2, and it must // use basic Huffman encoding, not arithmetic or progressive.... You can indeed extract the // MJPEG frames and decode them with a regular JPEG decoder, but you have to prepend the DHT // segment to them, or else the decoder won't have any idea how to decompress the data. // The exact table necessary is given in the OpenDML spec."" pub fn fill_default_mjpeg_tables(scan: &ScanInfo, dc_huffman_tables: &mut[Option<HuffmanTable>], ac_huffman_tables: &mut[Option<HuffmanTable>]) { // Section K.3.3 if dc_huffman_tables[0].is_none() && scan.dc_table_indices.iter().any(|&i| i == 0) { // Table K.3 dc_huffman_tables[0] = Some(HuffmanTable::new( &[0x00, 0x01, 0x05, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], &[0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B], HuffmanTableClass::DC).unwrap()); } if dc_huffman_tables[1].is_none() && scan.dc_table_indices.iter().any(|&i| i == 1) { // Table K.4 dc_huffman_tables[1] = Some(HuffmanTable::new( &[0x00, 0x03, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00], &[0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B], HuffmanTableClass::DC).unwrap()); } if ac_huffman_tables[0].is_none() && scan.ac_table_indices.iter().any(|&i| i == 0) { // Table K.5 ac_huffman_tables[0] = Some(HuffmanTable::new( &[0x00, 0x02, 0x01, 0x03, 0x03, 0x02, 0x04, 0x03, 0x05, 0x05, 0x04, 0x04, 0x00, 0x00, 0x01, 0x7D], &[0x01, 0x02, 0x03, 0x00, 0x04, 0x11, 0x05, 0x12, 0x21, 0x31, 0x41, 0x06, 0x13, 0x51, 0x61, 0x07, 0x22, 0x71, 0x14, 0x32, 0x81, 0x91, 0xA1, 0x08, 0x23, 0x42, 0xB1, 0xC1, 0x15, 0x52, 0xD1, 0xF0, 0x24, 0x33, 0x62, 0x72, 0x82, 0x09, 0x0A, 0x16, 0x17, 0x18, 0x19, 0x1A, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE1, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 0xE8, 0xE9, 0xEA, 0xF1, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA ], HuffmanTableClass::AC).unwrap()); } if ac_huffman_tables[1].is_none() && scan.ac_table_indices.iter().any(|&i| i == 1) { // Table K.6 ac_huffman_tables[1] = Some(HuffmanTable::new( &[0x00, 0x02, 0x01, 0x02, 0x04, 0x04, 0x03, 0x04, 0x07, 0x05, 0x04, 0x04, 0x00, 0x01, 0x02, 0x77], &[0x00, 0x01, 0x02, 0x03, 0x11, 0x04, 0x05, 0x21, 0x31, 0x06, 0x12, 0x41, 0x51, 0x07, 0x61, 0x71, 0x13, 0x22, 0x32, 0x81, 0x08, 0x14, 0x42, 0x91, 0xA1, 0xB1, 0xC1, 0x09, 0x23, 0x33, 0x52, 0xF0, 0x15, 0x62, 0x72, 0xD1, 0x0A, 0x16, 0x24, 0x34, 0xE1, 0x25, 0xF1, 0x17, 0x18, 0x19, 0x1A, 0x26, 0x27, 0x28, 0x29, 0x2A, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4A, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5A, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6A, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7A, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8A, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9A, 0xA2, 0xA3, 0xA4, 0xA5, 0xA6, 0xA7, 0xA8, 0xA9, 0xAA, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB8, 0xB9, 0xBA, 0xC2, 0xC3, 0xC4, 0xC5, 0xC6, 0xC7, 0xC8, 0xC9, 0xCA, 0xD2, 0xD3, 0xD4, 0xD5, 0xD6, 0xD7, 0xD8, 0xD9, 0xDA, 0xE2, 0xE3, 0xE4, 0xE5, 0xE6, 0xE7, 0xE8, 0xE9, 0xEA, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF8, 0xF9, 0xFA ], HuffmanTableClass::AC).unwrap()); } }
37.494186
132
0.529152
0831f319ce7b9f9fac92ac17c3fa6027c50ffff9
5,880
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// CrateIds identify crates and include the crate name and optionally a path /// and version. In the full form, they look like relative URLs. Example: /// `github.com/mozilla/rust#std:1.0` would be a package ID with a path of /// `gitub.com/mozilla/rust` and a crate name of `std` with a version of /// `1.0`. If no crate name is given after the hash, the name is inferred to /// be the last component of the path. If no version is given, it is inferred /// to be `0.0`. #[deriving(Clone, Eq)] pub struct CrateId { /// A path which represents the codes origin. By convention this is the /// URL, without `http://` or `https://` prefix, to the crate's repository path: ~str, /// The name of the crate. name: ~str, /// The version of the crate. version: Option<~str>, } impl ToStr for CrateId { fn to_str(&self) -> ~str { let version = match self.version { None => "0.0", Some(ref version) => version.as_slice(), }; if self.path == self.name || self.path.ends_with(format!("/{}", self.name)) { format!("{}\\#{}", self.path, version) } else { format!("{}\\#{}:{}", self.path, self.name, version) } } } impl FromStr for CrateId { fn from_str(s: &str) -> Option<CrateId> { let pieces: ~[&str] = s.splitn('#', 1).collect(); let path = pieces[0].to_owned(); if path.starts_with("/") || path.ends_with("/") || path.starts_with(".") || path.is_empty() { return None; } let path_pieces: ~[&str] = path.rsplitn('/', 1).collect(); let inferred_name = path_pieces[0]; let (name, version) = if pieces.len() == 1 { (inferred_name.to_owned(), None) } else { let hash_pieces: ~[&str] = pieces[1].splitn(':', 1).collect(); let (hash_name, hash_version) = if hash_pieces.len() == 1 { ("", hash_pieces[0]) } else { (hash_pieces[0], hash_pieces[1]) }; let name = if !hash_name.is_empty() { hash_name.to_owned() } else { inferred_name.to_owned() }; let version = if !hash_version.is_empty() { if hash_version == "0.0" { None } else { Some(hash_version.to_owned()) } } else { None }; (name, version) }; Some(CrateId { path: path, name: name, version: version, }) } } impl CrateId { pub fn version_or_default<'a>(&'a self) -> &'a str { match self.version { None => "0.0", Some(ref version) => version.as_slice(), } } pub fn short_name_with_version(&self) -> ~str { format!("{}-{}", self.name, self.version_or_default()) } } #[test] fn bare_name() { let crateid: CrateId = from_str("foo").expect("valid crateid"); assert_eq!(crateid.name, ~"foo"); assert_eq!(crateid.version, None); assert_eq!(crateid.path, ~"foo"); } #[test] fn bare_name_single_char() { let crateid: CrateId = from_str("f").expect("valid crateid"); assert_eq!(crateid.name, ~"f"); assert_eq!(crateid.version, None); assert_eq!(crateid.path, ~"f"); } #[test] fn empty_crateid() { let crateid: Option<CrateId> = from_str(""); assert!(crateid.is_none()); } #[test] fn simple_path() { let crateid: CrateId = from_str("example.com/foo/bar").expect("valid crateid"); assert_eq!(crateid.name, ~"bar"); assert_eq!(crateid.version, None); assert_eq!(crateid.path, ~"example.com/foo/bar"); } #[test] fn simple_version() { let crateid: CrateId = from_str("foo#1.0").expect("valid crateid"); assert_eq!(crateid.name, ~"foo"); assert_eq!(crateid.version, Some(~"1.0")); assert_eq!(crateid.path, ~"foo"); } #[test] fn absolute_path() { let crateid: Option<CrateId> = from_str("/foo/bar"); assert!(crateid.is_none()); } #[test] fn path_ends_with_slash() { let crateid: Option<CrateId> = from_str("foo/bar/"); assert!(crateid.is_none()); } #[test] fn path_and_version() { let crateid: CrateId = from_str("example.com/foo/bar#1.0").expect("valid crateid"); assert_eq!(crateid.name, ~"bar"); assert_eq!(crateid.version, Some(~"1.0")); assert_eq!(crateid.path, ~"example.com/foo/bar"); } #[test] fn single_chars() { let crateid: CrateId = from_str("a/b#1").expect("valid crateid"); assert_eq!(crateid.name, ~"b"); assert_eq!(crateid.version, Some(~"1")); assert_eq!(crateid.path, ~"a/b"); } #[test] fn missing_version() { let crateid: CrateId = from_str("foo#").expect("valid crateid"); assert_eq!(crateid.name, ~"foo"); assert_eq!(crateid.version, None); assert_eq!(crateid.path, ~"foo"); } #[test] fn path_and_name() { let crateid: CrateId = from_str("foo/rust-bar#bar:1.0").expect("valid crateid"); assert_eq!(crateid.name, ~"bar"); assert_eq!(crateid.version, Some(~"1.0")); assert_eq!(crateid.path, ~"foo/rust-bar"); } #[test] fn empty_name() { let crateid: CrateId = from_str("foo/bar#:1.0").expect("valid crateid"); assert_eq!(crateid.name, ~"bar"); assert_eq!(crateid.version, Some(~"1.0")); assert_eq!(crateid.path, ~"foo/bar"); }
30.153846
87
0.581122
fec23ceb70a1ed39bd4016a4bc3348a32214c74c
4,483
use crate::prelude::*; use crate::resources::ResourceType; use crate::responses::CreateDocumentResponse; use azure_core::errors::UnexpectedHTTPResult; use azure_core::prelude::*; use chrono::{DateTime, Utc}; use http::StatusCode; use serde::Serialize; use std::convert::TryFrom; #[derive(Debug, Clone)] pub struct CreateDocumentBuilder<'a, 'b> { collection_client: &'a CollectionClient, partition_keys: Option<PartitionKeys>, is_upsert: IsUpsert, indexing_directive: IndexingDirective, if_match_condition: Option<IfMatchCondition<'b>>, if_modified_since: Option<IfModifiedSince<'b>>, user_agent: Option<UserAgent<'b>>, activity_id: Option<ActivityId<'b>>, consistency_level: Option<ConsistencyLevel>, allow_tentative_writes: TenativeWritesAllowance, } impl<'a, 'b> CreateDocumentBuilder<'a, 'b> { pub(crate) fn new(collection_client: &'a CollectionClient) -> Self { Self { collection_client, partition_keys: None, is_upsert: IsUpsert::No, indexing_directive: IndexingDirective::Default, if_match_condition: None, if_modified_since: None, user_agent: None, activity_id: None, consistency_level: None, allow_tentative_writes: TenativeWritesAllowance::Deny, } } } impl<'a, 'b> CreateDocumentBuilder<'a, 'b> { setters! { user_agent: &'b str => Some(UserAgent::new(user_agent)), activity_id: &'b str => Some(ActivityId::new(activity_id)), consistency_level: ConsistencyLevel => Some(consistency_level), if_match_condition: IfMatchCondition<'b> => Some(if_match_condition), if_modified_since: &'b DateTime<Utc> => Some(IfModifiedSince::new(if_modified_since)), allow_tentative_writes: TenativeWritesAllowance, is_upsert: bool => if is_upsert { IsUpsert::Yes } else { IsUpsert::No }, indexing_directive: IndexingDirective, partition_keys: PartitionKeys => Some(partition_keys), } } impl<'a, 'b> CreateDocumentBuilder<'a, 'b> { pub async fn execute<T: Serialize>( &self, document: &T, ) -> Result<CreateDocumentResponse, CosmosError> { let mut req = self.collection_client.cosmos_client().prepare_request( &format!( "dbs/{}/colls/{}/docs", self.collection_client.database_client().database_name(), self.collection_client.collection_name() ), http::Method::POST, ResourceType::Documents, ); req = azure_core::headers::add_optional_header(&self.if_match_condition, req); req = azure_core::headers::add_optional_header(&self.if_modified_since, req); req = azure_core::headers::add_optional_header(&self.user_agent, req); req = azure_core::headers::add_optional_header(&self.activity_id, req); req = azure_core::headers::add_optional_header(&self.consistency_level, req); req = azure_core::headers::add_optional_header(&self.partition_keys.as_ref(), req); req = azure_core::headers::add_mandatory_header(&self.is_upsert, req); req = azure_core::headers::add_mandatory_header(&self.indexing_directive, req); req = azure_core::headers::add_mandatory_header(&self.allow_tentative_writes, req); let serialized = azure_core::to_json(document)?; let req = req.body(serialized)?; let response = self .collection_client .http_client() .execute_request(req) .await?; debug!("status_core == {:?}", response.status()); debug!("headers == {:?}", response.headers()); debug!("whole body == {:#?}", response.body()); if self.is_upsert == IsUpsert::No && response.status() != StatusCode::CREATED { return Err(UnexpectedHTTPResult::new( StatusCode::CREATED, response.status(), std::str::from_utf8(response.body())?, ) .into()); } else if response.status() != StatusCode::CREATED && response.status() != StatusCode::OK { return Err(UnexpectedHTTPResult::new_multiple( vec![StatusCode::CREATED, StatusCode::OK], response.status(), std::str::from_utf8(response.body())?, ) .into()); } CreateDocumentResponse::try_from(response) } }
39.672566
99
0.632835
e46fee4cac5eefd90aeabbe06886950848938107
3,755
use clippy_utils::diagnostics::span_lint_and_sugg; use clippy_utils::source::{snippet_opt, snippet_with_applicability}; use clippy_utils::ty::match_type; use clippy_utils::{match_def_path, paths}; use if_chain::if_chain; use rustc_errors::Applicability; use rustc_hir::{Expr, ExprKind}; use rustc_lint::{LateContext, LateLintPass}; use rustc_session::{declare_lint_pass, declare_tool_lint}; declare_clippy_lint! { /// ### What it does /// Checks for non-octal values used to set Unix file permissions. /// /// ### Why is this bad? /// They will be converted into octal, creating potentially /// unintended file permissions. /// /// ### Example /// ```rust,ignore /// use std::fs::OpenOptions; /// use std::os::unix::fs::OpenOptionsExt; /// /// let mut options = OpenOptions::new(); /// options.mode(644); /// ``` /// Use instead: /// ```rust,ignore /// use std::fs::OpenOptions; /// use std::os::unix::fs::OpenOptionsExt; /// /// let mut options = OpenOptions::new(); /// options.mode(0o644); /// ``` #[clippy::version = "1.53.0"] pub NON_OCTAL_UNIX_PERMISSIONS, correctness, "use of non-octal value to set unix file permissions, which will be translated into octal" } declare_lint_pass!(NonOctalUnixPermissions => [NON_OCTAL_UNIX_PERMISSIONS]); impl<'tcx> LateLintPass<'tcx> for NonOctalUnixPermissions { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) { match &expr.kind { ExprKind::MethodCall(path, _, [func, param], _) => { let obj_ty = cx.typeck_results().expr_ty(func).peel_refs(); if_chain! { if (path.ident.name == sym!(mode) && (match_type(cx, obj_ty, &paths::OPEN_OPTIONS) || match_type(cx, obj_ty, &paths::DIR_BUILDER))) || (path.ident.name == sym!(set_mode) && match_type(cx, obj_ty, &paths::PERMISSIONS)); if let ExprKind::Lit(_) = param.kind; then { let snip = match snippet_opt(cx, param.span) { Some(s) => s, _ => return, }; if !snip.starts_with("0o") { show_error(cx, param); } } } }, ExprKind::Call(func, [param]) => { if_chain! { if let ExprKind::Path(ref path) = func.kind; if let Some(def_id) = cx.qpath_res(path, func.hir_id).opt_def_id(); if match_def_path(cx, def_id, &paths::PERMISSIONS_FROM_MODE); if let ExprKind::Lit(_) = param.kind; then { let snip = match snippet_opt(cx, param.span) { Some(s) => s, _ => return, }; if !snip.starts_with("0o") { show_error(cx, param); } } } }, _ => {}, }; } } fn show_error(cx: &LateContext<'_>, param: &Expr<'_>) { let mut applicability = Applicability::MachineApplicable; span_lint_and_sugg( cx, NON_OCTAL_UNIX_PERMISSIONS, param.span, "using a non-octal value to set unix file permissions", "consider using an octal literal instead", format!( "0o{}", snippet_with_applicability(cx, param.span, "0o..", &mut applicability,), ), applicability, ); }
35.093458
110
0.511318
db787c78f8c26a6eaef9d35ce187c1e6178bf6a9
4,715
// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use crate::TreeDragSource; use crate::TreeIter; use crate::TreeModel; use crate::TreePath; use crate::TreeSortable; use glib::object::IsA; use glib::translate::*; use std::fmt; glib::wrapper! { #[doc(alias = "GtkTreeModelSort")] pub struct TreeModelSort(Object<ffi::GtkTreeModelSort, ffi::GtkTreeModelSortClass>) @implements TreeDragSource, TreeModel, TreeSortable; match fn { type_ => || ffi::gtk_tree_model_sort_get_type(), } } impl TreeModelSort { pub const NONE: Option<&'static TreeModelSort> = None; #[doc(alias = "gtk_tree_model_sort_new_with_model")] #[doc(alias = "new_with_model")] pub fn new(child_model: &impl IsA<TreeModel>) -> TreeModelSort { skip_assert_initialized!(); unsafe { from_glib_full(ffi::gtk_tree_model_sort_new_with_model( child_model.as_ref().to_glib_none().0, )) } } } pub trait TreeModelSortExt: 'static { #[doc(alias = "gtk_tree_model_sort_clear_cache")] fn clear_cache(&self); #[doc(alias = "gtk_tree_model_sort_convert_child_iter_to_iter")] fn convert_child_iter_to_iter(&self, child_iter: &TreeIter) -> Option<TreeIter>; #[doc(alias = "gtk_tree_model_sort_convert_child_path_to_path")] fn convert_child_path_to_path(&self, child_path: &TreePath) -> Option<TreePath>; #[doc(alias = "gtk_tree_model_sort_convert_iter_to_child_iter")] fn convert_iter_to_child_iter(&self, sorted_iter: &TreeIter) -> TreeIter; #[doc(alias = "gtk_tree_model_sort_convert_path_to_child_path")] fn convert_path_to_child_path(&self, sorted_path: &TreePath) -> Option<TreePath>; #[doc(alias = "gtk_tree_model_sort_get_model")] #[doc(alias = "get_model")] fn model(&self) -> TreeModel; #[doc(alias = "gtk_tree_model_sort_iter_is_valid")] fn iter_is_valid(&self, iter: &TreeIter) -> bool; #[doc(alias = "gtk_tree_model_sort_reset_default_sort_func")] fn reset_default_sort_func(&self); } impl<O: IsA<TreeModelSort>> TreeModelSortExt for O { fn clear_cache(&self) { unsafe { ffi::gtk_tree_model_sort_clear_cache(self.as_ref().to_glib_none().0); } } fn convert_child_iter_to_iter(&self, child_iter: &TreeIter) -> Option<TreeIter> { unsafe { let mut sort_iter = TreeIter::uninitialized(); let ret = from_glib(ffi::gtk_tree_model_sort_convert_child_iter_to_iter( self.as_ref().to_glib_none().0, sort_iter.to_glib_none_mut().0, mut_override(child_iter.to_glib_none().0), )); if ret { Some(sort_iter) } else { None } } } fn convert_child_path_to_path(&self, child_path: &TreePath) -> Option<TreePath> { unsafe { from_glib_full(ffi::gtk_tree_model_sort_convert_child_path_to_path( self.as_ref().to_glib_none().0, mut_override(child_path.to_glib_none().0), )) } } fn convert_iter_to_child_iter(&self, sorted_iter: &TreeIter) -> TreeIter { unsafe { let mut child_iter = TreeIter::uninitialized(); ffi::gtk_tree_model_sort_convert_iter_to_child_iter( self.as_ref().to_glib_none().0, child_iter.to_glib_none_mut().0, mut_override(sorted_iter.to_glib_none().0), ); child_iter } } fn convert_path_to_child_path(&self, sorted_path: &TreePath) -> Option<TreePath> { unsafe { from_glib_full(ffi::gtk_tree_model_sort_convert_path_to_child_path( self.as_ref().to_glib_none().0, mut_override(sorted_path.to_glib_none().0), )) } } fn model(&self) -> TreeModel { unsafe { from_glib_none(ffi::gtk_tree_model_sort_get_model( self.as_ref().to_glib_none().0, )) } } fn iter_is_valid(&self, iter: &TreeIter) -> bool { unsafe { from_glib(ffi::gtk_tree_model_sort_iter_is_valid( self.as_ref().to_glib_none().0, mut_override(iter.to_glib_none().0), )) } } fn reset_default_sort_func(&self) { unsafe { ffi::gtk_tree_model_sort_reset_default_sort_func(self.as_ref().to_glib_none().0); } } } impl fmt::Display for TreeModelSort { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("TreeModelSort") } }
32.07483
140
0.623754
d58e56627cafd8da59150f6594bdc8cd347196b9
2,772
//! Component for BLE radio on Apollo3 based platforms. //! //! Usage //! ----- //! ```rust //! let ble_radio = BLEComponent::new(board_kernel, &apollo3::ble::BLE, mux_alarm).finalize(); //! ``` #![allow(dead_code)] // Components are intended to be conditionally included use capsules; use capsules::virtual_alarm::VirtualMuxAlarm; use kernel::capabilities; use kernel::component::Component; use kernel::hil; use kernel::{create_capability, static_init}; /// BLE component for Apollo3 BLE pub struct BLEComponent { board_kernel: &'static kernel::Kernel, driver_num: usize, radio: &'static apollo3::ble::Ble<'static>, mux_alarm: &'static capsules::virtual_alarm::MuxAlarm<'static, apollo3::stimer::STimer<'static>>, } /// BLE component for Apollo3 BLE impl BLEComponent { /// New instance pub fn new( board_kernel: &'static kernel::Kernel, driver_num: usize, radio: &'static apollo3::ble::Ble, mux_alarm: &'static capsules::virtual_alarm::MuxAlarm<'static, apollo3::stimer::STimer>, ) -> BLEComponent { BLEComponent { board_kernel: board_kernel, driver_num: driver_num, radio: radio, mux_alarm: mux_alarm, } } } impl Component for BLEComponent { type StaticInput = (); type Output = &'static capsules::ble_advertising_driver::BLE< 'static, apollo3::ble::Ble<'static>, VirtualMuxAlarm<'static, apollo3::stimer::STimer<'static>>, >; unsafe fn finalize(self, _s: Self::StaticInput) -> Self::Output { let grant_cap = create_capability!(capabilities::MemoryAllocationCapability); let ble_radio_virtual_alarm = static_init!( capsules::virtual_alarm::VirtualMuxAlarm<'static, apollo3::stimer::STimer>, capsules::virtual_alarm::VirtualMuxAlarm::new(self.mux_alarm) ); let ble_radio = static_init!( capsules::ble_advertising_driver::BLE< 'static, apollo3::ble::Ble, VirtualMuxAlarm<'static, apollo3::stimer::STimer>, >, capsules::ble_advertising_driver::BLE::new( self.radio, self.board_kernel.create_grant(self.driver_num, &grant_cap), &mut capsules::ble_advertising_driver::BUF, ble_radio_virtual_alarm ) ); kernel::hil::ble_advertising::BleAdvertisementDriver::set_receive_client( self.radio, ble_radio, ); kernel::hil::ble_advertising::BleAdvertisementDriver::set_transmit_client( self.radio, ble_radio, ); hil::time::Alarm::set_alarm_client(ble_radio_virtual_alarm, ble_radio); ble_radio } }
32.611765
96
0.630952
d7d51a154989942886a7f206a4cfa042e8a05b70
8,619
#[allow(dead_code)] #[repr(packed)] pub struct ScratchRegisters { pub r11: usize, pub r10: usize, pub r9: usize, pub r8: usize, pub rsi: usize, pub rdi: usize, pub rdx: usize, pub rcx: usize, pub rax: usize, } impl ScratchRegisters { pub fn dump(&self) { println!("RAX: {:>016X}", { self.rax }); println!("RCX: {:>016X}", { self.rcx }); println!("RDX: {:>016X}", { self.rdx }); println!("RDI: {:>016X}", { self.rdi }); println!("RSI: {:>016X}", { self.rsi }); println!("R8: {:>016X}", { self.r8 }); println!("R9: {:>016X}", { self.r9 }); println!("R10: {:>016X}", { self.r10 }); println!("R11: {:>016X}", { self.r11 }); } } #[macro_export] macro_rules! scratch_push { () => (asm!( "push rax push rcx push rdx push rdi push rsi push r8 push r9 push r10 push r11" : : : : "intel", "volatile" )); } #[macro_export] macro_rules! scratch_pop { () => (asm!( "pop r11 pop r10 pop r9 pop r8 pop rsi pop rdi pop rdx pop rcx pop rax" : : : : "intel", "volatile" )); } #[allow(dead_code)] #[repr(packed)] pub struct PreservedRegisters { pub r15: usize, pub r14: usize, pub r13: usize, pub r12: usize, pub rbp: usize, pub rbx: usize, } impl PreservedRegisters { pub fn dump(&self) { println!("RBX: {:>016X}", { self.rbx }); println!("RBP: {:>016X}", { self.rbp }); println!("R12: {:>016X}", { self.r12 }); println!("R13: {:>016X}", { self.r13 }); println!("R14: {:>016X}", { self.r14 }); println!("R15: {:>016X}", { self.r15 }); } } #[macro_export] macro_rules! preserved_push { () => (asm!( "push rbx push rbp push r12 push r13 push r14 push r15" : : : : "intel", "volatile" )); } #[macro_export] macro_rules! preserved_pop { () => (asm!( "pop r15 pop r14 pop r13 pop r12 pop rbp pop rbx" : : : : "intel", "volatile" )); } #[macro_export] macro_rules! fs_push { () => (asm!( "push fs mov rax, 0x18 mov fs, ax" : : : : "intel", "volatile" )); } #[macro_export] macro_rules! fs_pop { () => (asm!( "pop fs" : : : : "intel", "volatile" )); } #[allow(dead_code)] #[repr(packed)] pub struct IretRegisters { pub rip: usize, pub cs: usize, pub rflags: usize, } impl IretRegisters { pub fn dump(&self) { println!("RFLAG: {:>016X}", { self.rflags }); println!("CS: {:>016X}", { self.cs }); println!("RIP: {:>016X}", { self.rip }); } } #[macro_export] macro_rules! iret { () => (asm!( "iretq" : : : : "intel", "volatile" )); } /// Create an interrupt function that can safely run rust code #[macro_export] macro_rules! interrupt { ($name:ident, $func:block) => { #[naked] pub unsafe extern fn $name () { #[inline(never)] unsafe fn inner() { $func } // Push scratch registers scratch_push!(); fs_push!(); // Call inner rust function inner(); // Pop scratch registers and return fs_pop!(); scratch_pop!(); iret!(); } }; } #[allow(dead_code)] #[repr(packed)] pub struct InterruptStack { pub fs: usize, pub scratch: ScratchRegisters, pub iret: IretRegisters, } impl InterruptStack { pub fn dump(&self) { self.iret.dump(); self.scratch.dump(); println!("FS: {:>016X}", { self.fs }); } } #[macro_export] macro_rules! interrupt_stack { ($name:ident, $stack: ident, $func:block) => { #[naked] pub unsafe extern fn $name () { #[inline(never)] unsafe fn inner($stack: &mut $crate::interrupt::macros::InterruptStack) { $func } // Push scratch registers scratch_push!(); fs_push!(); // Get reference to stack variables let rsp: usize; asm!("" : "={rsp}"(rsp) : : : "intel", "volatile"); // Call inner rust function inner(&mut *(rsp as *mut $crate::interrupt::macros::InterruptStack)); // Pop scratch registers and return fs_pop!(); scratch_pop!(); iret!(); } }; } #[allow(dead_code)] #[repr(packed)] pub struct InterruptErrorStack { pub fs: usize, pub scratch: ScratchRegisters, pub code: usize, pub iret: IretRegisters, } impl InterruptErrorStack { pub fn dump(&self) { self.iret.dump(); println!("CODE: {:>016X}", { self.code }); self.scratch.dump(); println!("FS: {:>016X}", { self.fs }); } } #[macro_export] macro_rules! interrupt_error { ($name:ident, $stack:ident, $func:block) => { #[naked] pub unsafe extern fn $name () { #[inline(never)] unsafe fn inner($stack: &$crate::interrupt::macros::InterruptErrorStack) { $func } // Push scratch registers scratch_push!(); fs_push!(); // Get reference to stack variables let rsp: usize; asm!("" : "={rsp}"(rsp) : : : "intel", "volatile"); // Call inner rust function inner(&*(rsp as *const $crate::arch::x86_64::macros::InterruptErrorStack)); // Pop scratch registers, error code, and return fs_pop!(); scratch_pop!(); asm!("add rsp, 8" : : : : "intel", "volatile"); iret!(); } }; } #[allow(dead_code)] #[repr(packed)] pub struct InterruptStackP { pub fs: usize, pub preserved: PreservedRegisters, pub scratch: ScratchRegisters, pub iret: IretRegisters, } impl InterruptStackP { pub fn dump(&self) { self.iret.dump(); self.scratch.dump(); self.preserved.dump(); println!("FS: {:>016X}", { self.fs }); } } #[macro_export] macro_rules! interrupt_stack_p { ($name:ident, $stack: ident, $func:block) => { #[naked] pub unsafe extern fn $name () { #[inline(never)] unsafe fn inner($stack: &mut $crate::interrupt::macros::InterruptStackP) { $func } // Push scratch registers scratch_push!(); preserved_push!(); fs_push!(); // Get reference to stack variables let rsp: usize; asm!("" : "={rsp}"(rsp) : : : "intel", "volatile"); // Call inner rust function inner(&mut *(rsp as *mut $crate::interrupt::macros::InterruptStackP)); // Pop scratch registers and return fs_pop!(); preserved_pop!(); scratch_pop!(); iret!(); } }; } #[allow(dead_code)] #[repr(packed)] pub struct InterruptErrorStackP { pub fs: usize, pub preserved: PreservedRegisters, pub scratch: ScratchRegisters, pub code: usize, pub iret: IretRegisters, } impl InterruptErrorStackP { pub fn dump(&self) { self.iret.dump(); println!("CODE: {:>016X}", { self.code }); self.scratch.dump(); self.preserved.dump(); println!("FS: {:>016X}", { self.fs }); } } #[macro_export] macro_rules! interrupt_error_p { ($name:ident, $stack:ident, $func:block) => { #[naked] pub unsafe extern fn $name () { #[inline(never)] unsafe fn inner($stack: &$crate::interrupt::macros::InterruptErrorStackP) { $func } // Push scratch registers scratch_push!(); preserved_push!(); fs_push!(); // Get reference to stack variables let rsp: usize; asm!("" : "={rsp}"(rsp) : : : "intel", "volatile"); // Call inner rust function inner(&*(rsp as *const $crate::interrupt::macros::InterruptErrorStackP)); // Pop scratch registers, error code, and return fs_pop!(); preserved_pop!(); scratch_pop!(); asm!("add rsp, 8" : : : : "intel", "volatile"); iret!(); } }; }
23.743802
87
0.490196
29e2a31473721d68b9f3da6911bdc12567dae811
47
#[test] fn test_test() { assert!(true); }
7.833333
18
0.531915
18d9e82f33f395cb39afdf4a3de6d1984155bad5
21,569
// Copyright (c) The Libra Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::file_format::*; use failure::*; use hex; use std::{collections::VecDeque, fmt}; use types::{account_address::AccountAddress, byte_array::ByteArray}; // // Display printing // Display the top level compilation unit (CompiledScript and CompiledModule) in a more // readable format. Essentially the printing resolves all table indexes and is a line by line // for each table and with a reasonable indentation, e.g. // ```text // CompiledModule: { // Struct Handles: [ // ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000,] // Field Handles: [ // ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.item: Value,] // Function Handles: [ // ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.get(): Value, // ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.new(Value): ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000,] // Struct Definitions: [ // {public resource ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000 // private ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.item: Value // public ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.get(): Value // static public ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.new(Value): ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000},] // Field Definitions: [ // private ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.item: Value,] // Function Definitions: [ // public ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.get(): Value // local(0): ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000, // local(1): &Value, // local(2): Value, // CopyLoc(0) // BorrowField(ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.item: Value) // StLoc(1) // CopyLoc(1) // ReadRef // StLoc(2) // MoveLoc(2) // Ret, // static public ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000.new(Value): ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000 // local(0): Value, // local(1): ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000, // MoveLoc(0) // Pack(ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000) // StLoc(1) // MoveLoc(1) // Ret,] // Signatures: [ // Value, // (): Value, // (Value): ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000, // ResourceBox@0x0000000000000000000000000000000000000000000000000000000000000000, // &Value,] // Strings: [ // ResourceBox, // item, // get, // new,] // Addresses: [ // 0x0000000000000000000000000000000000000000000000000000000000000000,] // } // ``` // Trait to access tables for both CompiledScript and CompiledModule. // This is designed mainly for the printer -- public APIs should be based on the accessors in // `access.rs`. pub trait TableAccess { fn get_field_def_at(&self, idx: FieldDefinitionIndex) -> Result<&FieldDefinition>; fn get_module_at(&self, idx: ModuleHandleIndex) -> Result<&ModuleHandle>; fn get_struct_at(&self, idx: StructHandleIndex) -> Result<&StructHandle>; fn get_function_at(&self, idx: FunctionHandleIndex) -> Result<&FunctionHandle>; fn get_string_at(&self, idx: StringPoolIndex) -> Result<&String>; fn get_address_at(&self, idx: AddressPoolIndex) -> Result<&AccountAddress>; fn get_type_signature_at(&self, idx: TypeSignatureIndex) -> Result<&TypeSignature>; fn get_function_signature_at(&self, idx: FunctionSignatureIndex) -> Result<&FunctionSignature>; fn get_locals_signature_at(&self, idx: LocalsSignatureIndex) -> Result<&LocalsSignature>; } impl TableAccess for CompiledScript { fn get_field_def_at(&self, _idx: FieldDefinitionIndex) -> Result<&FieldDefinition> { bail!("no field definitions in scripts"); } fn get_module_at(&self, idx: ModuleHandleIndex) -> Result<&ModuleHandle> { match self.module_handles.get(idx.0 as usize) { None => bail!("bad module handle index {}", idx), Some(m) => Ok(m), } } fn get_struct_at(&self, idx: StructHandleIndex) -> Result<&StructHandle> { match self.struct_handles.get(idx.0 as usize) { None => bail!("bad struct handle index {}", idx), Some(s) => Ok(s), } } fn get_function_at(&self, idx: FunctionHandleIndex) -> Result<&FunctionHandle> { match self.function_handles.get(idx.0 as usize) { None => bail!("bad function handle index {}", idx), Some(m) => Ok(m), } } fn get_string_at(&self, idx: StringPoolIndex) -> Result<&String> { match self.string_pool.get(idx.0 as usize) { None => bail!("bad string index {}", idx), Some(s) => Ok(s), } } fn get_address_at(&self, idx: AddressPoolIndex) -> Result<&AccountAddress> { match self.address_pool.get(idx.0 as usize) { None => bail!("bad address index {}", idx), Some(addr) => Ok(addr), } } fn get_type_signature_at(&self, idx: TypeSignatureIndex) -> Result<&TypeSignature> { match self.type_signatures.get(idx.0 as usize) { None => bail!("bad signature index {}", idx), Some(sig) => Ok(sig), } } fn get_function_signature_at(&self, idx: FunctionSignatureIndex) -> Result<&FunctionSignature> { match self.function_signatures.get(idx.0 as usize) { None => bail!("bad signature index {}", idx), Some(sig) => Ok(sig), } } fn get_locals_signature_at(&self, idx: LocalsSignatureIndex) -> Result<&LocalsSignature> { match self.locals_signatures.get(idx.0 as usize) { None => bail!("bad signature index {}", idx), Some(sig) => Ok(sig), } } } impl TableAccess for CompiledModule { fn get_field_def_at(&self, idx: FieldDefinitionIndex) -> Result<&FieldDefinition> { match self.field_defs.get(idx.0 as usize) { None => bail!("bad field definition index {}", idx), Some(f) => Ok(f), } } fn get_module_at(&self, idx: ModuleHandleIndex) -> Result<&ModuleHandle> { match self.module_handles.get(idx.0 as usize) { None => bail!("bad module handle index {}", idx), Some(m) => Ok(m), } } fn get_struct_at(&self, idx: StructHandleIndex) -> Result<&StructHandle> { match self.struct_handles.get(idx.0 as usize) { None => bail!("bad struct handle index {}", idx), Some(s) => Ok(s), } } fn get_function_at(&self, idx: FunctionHandleIndex) -> Result<&FunctionHandle> { match self.function_handles.get(idx.0 as usize) { None => bail!("bad function handle index {}", idx), Some(m) => Ok(m), } } fn get_string_at(&self, idx: StringPoolIndex) -> Result<&String> { match self.string_pool.get(idx.0 as usize) { None => bail!("bad string index {}", idx), Some(s) => Ok(s), } } fn get_address_at(&self, idx: AddressPoolIndex) -> Result<&AccountAddress> { match self.address_pool.get(idx.0 as usize) { None => bail!("bad address index {}", idx), Some(addr) => Ok(addr), } } fn get_type_signature_at(&self, idx: TypeSignatureIndex) -> Result<&TypeSignature> { match self.type_signatures.get(idx.0 as usize) { None => bail!("bad signature index {}", idx), Some(sig) => Ok(sig), } } fn get_function_signature_at(&self, idx: FunctionSignatureIndex) -> Result<&FunctionSignature> { match self.function_signatures.get(idx.0 as usize) { None => bail!("bad signature index {}", idx), Some(sig) => Ok(sig), } } fn get_locals_signature_at(&self, idx: LocalsSignatureIndex) -> Result<&LocalsSignature> { match self.locals_signatures.get(idx.0 as usize) { None => bail!("bad signature index {}", idx), Some(sig) => Ok(sig), } } } impl fmt::Display for CompiledProgram { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CompiledProgram: {{\nModules: [\n")?; for m in &self.modules { writeln!(f, "{},", m)?; } write!(f, "],\nScript: {}\n}}", self.script) } } impl fmt::Display for CompiledScript { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "CompiledScript: {{\nMain:\n\t")?; display_function_definition(&self.main, self, f)?; display_code(&self.main.code, self, "\n\t\t", f)?; write!(f, "\nStruct Handles: [")?; for struct_handle in &self.struct_handles { write!(f, "\n\t")?; display_struct_handle(struct_handle, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Module Handles: [")?; for module_handle in &self.module_handles { write!(f, "\n\t")?; display_module_handle(module_handle, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Function Handles: [")?; for function_handle in &self.function_handles { write!(f, "\n\t")?; display_function_handle(function_handle, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Type Signatures: [")?; for signature in &self.type_signatures { write!(f, "\n\t")?; display_type_signature(signature, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Function Signatures: [")?; for signature in &self.function_signatures { write!(f, "\n\t")?; display_function_signature(signature, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Locals Signatures: [")?; for signature in &self.locals_signatures { write!(f, "\n\t")?; display_locals_signature(signature, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Strings: [")?; for string in &self.string_pool { write!(f, "\n\t{},", string)?; } writeln!(f, "]")?; write!(f, "ByteArrays: [")?; for byte_array in &self.byte_array_pool { write!(f, "\n\t")?; display_byte_array(byte_array, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Addresses: [")?; for address in &self.address_pool { write!(f, "\n\t")?; display_address(address, f)?; write!(f, ",")?; } writeln!(f, "]")?; writeln!(f, "}}") } } impl fmt::Display for CompiledModule { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { writeln!(f, "CompiledModule: {{")?; write!(f, "Module Handles: [")?; for module_handle in &self.module_handles { write!(f, "\n\t")?; display_module_handle(module_handle, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Struct Handles: [")?; for struct_handle in &self.struct_handles { write!(f, "\n\t")?; display_struct_handle(struct_handle, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Function Handles: [")?; for function_handle in &self.function_handles { write!(f, "\n\t")?; display_function_handle(function_handle, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Struct Definitions: [")?; for struct_def in &self.struct_defs { write!(f, "\n\t{{")?; display_struct_definition(struct_def, self, f)?; let f_start_idx = struct_def.fields; let f_end_idx = f_start_idx.0 as u16 + struct_def.field_count; for idx in f_start_idx.0 as u16..f_end_idx { let field_def = match self.field_defs.get(idx as usize) { None => panic!("bad field definition index {}", idx), Some(f) => f, }; write!(f, "\n\t\t")?; display_field_definition(field_def, self, f)?; } write!(f, "}},")?; } writeln!(f, "]")?; write!(f, "Field Definitions: [")?; for field_def in &self.field_defs { write!(f, "\n\t")?; display_field_definition(field_def, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Function Definitions: [")?; for function_def in &self.function_defs { write!(f, "\n\t")?; display_function_definition(function_def, self, f)?; if function_def.flags & CodeUnit::NATIVE == 0 { display_code(&function_def.code, self, "\n\t\t", f)?; } write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Type Signatures: [")?; for signature in &self.type_signatures { write!(f, "\n\t")?; display_type_signature(signature, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Function Signatures: [")?; for signature in &self.function_signatures { write!(f, "\n\t")?; display_function_signature(signature, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Locals Signatures: [")?; for signature in &self.locals_signatures { write!(f, "\n\t")?; display_locals_signature(signature, self, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Strings: [")?; for string in &self.string_pool { write!(f, "\n\t{},", string)?; } writeln!(f, "]")?; write!(f, "ByteArrays: [")?; for byte_array in &self.byte_array_pool { write!(f, "\n\t")?; display_byte_array(byte_array, f)?; write!(f, ",")?; } writeln!(f, "]")?; write!(f, "Addresses: [")?; for address in &self.address_pool { write!(f, "\n\t")?; display_address(address, f)?; write!(f, ",")?; } writeln!(f, "]")?; writeln!(f, "}}") } } fn display_struct_handle<T: TableAccess>( struct_: &StructHandle, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { write!( f, "{} ", if struct_.is_resource { "resource" } else { "struct" } )?; write!(f, "{}@", tables.get_string_at(struct_.name).unwrap())?; display_module_handle(tables.get_module_at(struct_.module).unwrap(), tables, f) } fn display_module_handle<T: TableAccess>( module: &ModuleHandle, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { display_address(tables.get_address_at(module.address).unwrap(), f)?; write!(f, ".{}", tables.get_string_at(module.name).unwrap()) } fn display_function_handle<T: TableAccess>( function: &FunctionHandle, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { display_module_handle(tables.get_module_at(function.module).unwrap(), tables, f)?; write!(f, ".{}", tables.get_string_at(function.name).unwrap())?; display_function_signature( tables .get_function_signature_at(function.signature) .unwrap(), tables, f, ) } fn display_struct_definition<T: TableAccess>( struct_: &StructDefinition, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { display_struct_handle( tables.get_struct_at(struct_.struct_handle).unwrap(), tables, f, ) } fn display_field_definition<T: TableAccess>( field: &FieldDefinition, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { display_struct_handle(tables.get_struct_at(field.struct_).unwrap(), tables, f)?; write!(f, ".{}: ", tables.get_string_at(field.name).unwrap())?; display_type_signature( tables.get_type_signature_at(field.signature).unwrap(), tables, f, ) } fn display_function_definition<T: TableAccess>( function: &FunctionDefinition, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { display_function_flags(function.flags, f)?; display_function_handle( tables.get_function_at(function.function).unwrap(), tables, f, ) } fn display_code<T: TableAccess>( code: &CodeUnit, tables: &T, indentation: &str, f: &mut fmt::Formatter, ) -> fmt::Result { write!(f, "{}locals({}): ", indentation, code.locals,)?; display_locals_signature( tables.get_locals_signature_at(code.locals).unwrap(), tables, f, )?; write!(f, ",")?; for bytecode in &code.code { write!(f, "{}", indentation)?; display_bytecode(bytecode, tables, f)?; } Ok(()) } fn display_address(addr: &AccountAddress, f: &mut fmt::Formatter) -> fmt::Result { let hex = format!("{:x}", addr); let mut v: VecDeque<char> = hex.chars().collect(); while v.len() > 1 && v[0] == '0' { v.pop_front(); } write!(f, "0x{}", v.into_iter().collect::<String>()) } // Clippy will complain about passing Vec<_> by reference; instead you should pass &[_] // In order to keep the logic of abstracting ByteArray, I think it is alright to ignore the warning #[allow(clippy::ptr_arg)] fn display_byte_array(byte_array: &ByteArray, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "0x{}", hex::encode(&byte_array.as_bytes())) } fn display_type_signature<T: TableAccess>( sig: &TypeSignature, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { display_signature_token(&sig.0, tables, f) } fn display_function_signature<T: TableAccess>( sig: &FunctionSignature, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { let mut iter = sig.arg_types.iter().peekable(); write!(f, "(")?; while let Some(token) = iter.next() { display_signature_token(token, tables, f)?; if iter.peek().is_some() { write!(f, ", ")?; } } write!(f, "): ")?; let mut iter = sig.return_types.iter().peekable(); write!(f, "(")?; while let Some(token) = iter.next() { display_signature_token(token, tables, f)?; if iter.peek().is_some() { write!(f, ", ")?; } } write!(f, ")")?; Ok(()) } fn display_locals_signature<T: TableAccess>( sig: &LocalsSignature, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { let mut iter = sig.0.iter().peekable(); while let Some(token) = iter.next() { display_signature_token(token, tables, f)?; if iter.peek().is_some() { write!(f, ", ")?; } } Ok(()) } fn display_signature_token<T: TableAccess>( token: &SignatureToken, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { match token { SignatureToken::Bool => write!(f, "Bool"), SignatureToken::U64 => write!(f, "Integer"), SignatureToken::String => write!(f, "String"), SignatureToken::ByteArray => write!(f, "ByteArray"), SignatureToken::Address => write!(f, "Address"), SignatureToken::Struct(idx) => { display_struct_handle(tables.get_struct_at(*idx).unwrap(), tables, f) } SignatureToken::Reference(token) => { write!(f, "&")?; display_signature_token(token, tables, f) } SignatureToken::MutableReference(token) => { write!(f, "&mut ")?; display_signature_token(token, tables, f) } } } fn display_function_flags(flags: u8, f: &mut fmt::Formatter) -> fmt::Result { if flags & CodeUnit::NATIVE != 0 { write!(f, "native ")?; } if flags & CodeUnit::PUBLIC != 0 { write!(f, "public ")?; } Ok(()) } fn display_bytecode<T: TableAccess>( bytecode: &Bytecode, tables: &T, f: &mut fmt::Formatter, ) -> fmt::Result { match bytecode { Bytecode::LdAddr(idx) => { write!(f, "LdAddr(")?; display_address(tables.get_address_at(*idx).unwrap(), f)?; write!(f, ")") } Bytecode::LdStr(idx) => write!(f, "LdStr({})", tables.get_string_at(*idx).unwrap()), Bytecode::BorrowField(idx) => { write!(f, "BorrowField(")?; display_field_definition(tables.get_field_def_at(*idx).unwrap(), tables, f)?; write!(f, ")") } Bytecode::Call(idx) => { write!(f, "Call(")?; display_function_handle(tables.get_function_at(*idx).unwrap(), tables, f)?; write!(f, ")") } _ => write!(f, "{:?}", bytecode), } }
34.676849
195
0.578794
7657265ef3da214c12d1de5c67dbfb21371b3dc1
8,027
use crate::ast::{Enum, Field, Input, Struct, Variant}; use crate::attr::Attrs; use quote::ToTokens; use std::collections::BTreeSet as Set; use syn::{Error, GenericArgument, Member, PathArguments, Result, Type}; impl Input<'_> { pub(crate) fn validate(&self) -> Result<()> { match self { Input::Struct(input) => input.validate(), Input::Enum(input) => input.validate(), } } } impl Struct<'_> { fn validate(&self) -> Result<()> { check_non_field_attrs(&self.attrs)?; if let Some(transparent) = self.attrs.transparent { if self.fields.len() != 1 { return Err(Error::new_spanned( transparent.original, "#[error(transparent)] requires exactly one field", )); } if let Some(source) = self.fields.iter().find_map(|f| f.attrs.source) { return Err(Error::new_spanned( source, "transparent error struct can't contain #[source]", )); } } check_field_attrs(&self.fields)?; for field in &self.fields { field.validate()?; } Ok(()) } } impl Enum<'_> { fn validate(&self) -> Result<()> { check_non_field_attrs(&self.attrs)?; let has_display = self.has_display(); for variant in &self.variants { variant.validate()?; if has_display && variant.attrs.display.is_none() && variant.attrs.transparent.is_none() { return Err(Error::new_spanned( variant.original, "missing #[error(\"...\")] display attribute", )); } } let mut from_types = Set::new(); for variant in &self.variants { if let Some(from_field) = variant.from_field() { let repr = from_field.ty.to_token_stream().to_string(); if !from_types.insert(repr) { return Err(Error::new_spanned( from_field.original, "cannot derive From because another variant has the same source type", )); } } } Ok(()) } } impl Variant<'_> { fn validate(&self) -> Result<()> { check_non_field_attrs(&self.attrs)?; if self.attrs.transparent.is_some() { if self.fields.len() != 1 { return Err(Error::new_spanned( self.original, "#[error(transparent)] requires exactly one field", )); } if let Some(source) = self.fields.iter().find_map(|f| f.attrs.source) { return Err(Error::new_spanned( source, "transparent variant can't contain #[source]", )); } } check_field_attrs(&self.fields)?; for field in &self.fields { field.validate()?; } Ok(()) } } impl Field<'_> { fn validate(&self) -> Result<()> { if let Some(display) = &self.attrs.display { return Err(Error::new_spanned( display.original, "not expected here; the #[error(...)] attribute belongs on top of a struct or an enum variant", )); } Ok(()) } } fn check_non_field_attrs(attrs: &Attrs) -> Result<()> { if let Some(from) = &attrs.from { return Err(Error::new_spanned( from, "not expected here; the #[from] attribute belongs on a specific field", )); } if let Some(source) = &attrs.source { return Err(Error::new_spanned( source, "not expected here; the #[source] attribute belongs on a specific field", )); } if let Some(backtrace) = &attrs.backtrace { return Err(Error::new_spanned( backtrace, "not expected here; the #[backtrace] attribute belongs on a specific field", )); } if let Some(display) = &attrs.display { if attrs.transparent.is_some() { return Err(Error::new_spanned( display.original, "cannot have both #[error(transparent)] and a display attribute", )); } } Ok(()) } fn check_field_attrs(fields: &[Field]) -> Result<()> { let mut from_field = None; let mut source_field = None; let mut backtrace_field = None; let mut has_backtrace = false; for field in fields { if let Some(from) = field.attrs.from { if from_field.is_some() { return Err(Error::new_spanned(from, "duplicate #[from] attribute")); } from_field = Some(field); } if let Some(source) = field.attrs.source { if source_field.is_some() { return Err(Error::new_spanned(source, "duplicate #[source] attribute")); } source_field = Some(field); } if let Some(backtrace) = field.attrs.backtrace { if backtrace_field.is_some() { return Err(Error::new_spanned( backtrace, "duplicate #[backtrace] attribute", )); } backtrace_field = Some(field); has_backtrace = true; } if let Some(transparent) = field.attrs.transparent { return Err(Error::new_spanned( transparent.original, "#[error(transparent)] needs to go outside the enum or struct, not on an individual field", )); } has_backtrace |= field.is_backtrace(); } if let (Some(from_field), Some(source_field)) = (from_field, source_field) { if !same_member(from_field, source_field) { return Err(Error::new_spanned( from_field.attrs.from, "#[from] is only supported on the source field, not any other field", )); } } if let Some(from_field) = from_field { if fields.len() > 1 + has_backtrace as usize { return Err(Error::new_spanned( from_field.attrs.from, "deriving From requires no fields other than source and backtrace", )); } } if let Some(source_field) = source_field.or(from_field) { if contains_non_static_lifetime(source_field.ty) { return Err(Error::new_spanned( &source_field.original.ty, "non-static lifetimes are not allowed in the source of an error, because std::error::Error requires the source is dyn Error + 'static", )); } } Ok(()) } fn same_member(one: &Field, two: &Field) -> bool { match (&one.member, &two.member) { (Member::Named(one), Member::Named(two)) => one == two, (Member::Unnamed(one), Member::Unnamed(two)) => one.index == two.index, _ => unreachable!(), } } fn contains_non_static_lifetime(ty: &Type) -> bool { match ty { Type::Path(ty) => { let bracketed = match &ty.path.segments.last().unwrap().arguments { PathArguments::AngleBracketed(bracketed) => bracketed, _ => return false, }; for arg in &bracketed.args { match arg { GenericArgument::Type(ty) if contains_non_static_lifetime(ty) => return true, GenericArgument::Lifetime(lifetime) if lifetime.ident != "static" => { return true } _ => {} } } false } Type::Reference(ty) => ty .lifetime .as_ref() .map_or(false, |lifetime| lifetime.ident != "static"), _ => false, // maybe implement later if there are common other cases } }
34.303419
151
0.514638
e43fb916c1380ba5f0407cac9dfa16e21d96e75f
30,976
extern crate env_logger; extern crate hyper; extern crate log; #[macro_use] extern crate serde_json; extern crate webdriver_client; use env_logger::{LogBuilder, LogTarget}; use log::LogLevelFilter; use std::io::Read; use std::env; use std::path::PathBuf; use std::sync::Once; use std::thread::sleep; use std::time::Duration; use webdriver_client::{Driver, DriverSession, HttpDriverBuilder, LocationStrategy}; use webdriver_client::firefox::GeckoDriver; use webdriver_client::chrome::ChromeDriver; use webdriver_client::messages::{ExecuteCmd, NewSessionCmd}; /// The different browsers supported in tests #[derive(Debug)] enum TestBrowser { Firefox, Chrome, } impl TestBrowser { fn session(&self) -> DriverSession { match *self { TestBrowser::Firefox => { GeckoDriver::build() .spawn() .expect("Error starting geckodriver") .session(&self.new_session_cmd()) .expect("Error starting session") } TestBrowser::Chrome => { ChromeDriver::build() .spawn() .expect("Error starting chromedriver") .session(&self.new_session_cmd()) .expect("Error starting session") } } } fn driver(&self) -> Box<dyn Driver> { match *self { TestBrowser::Firefox => { Box::new(GeckoDriver::build() .spawn().expect("Error starting geckodriver")) } TestBrowser::Chrome => { Box::new(ChromeDriver::build() .spawn().expect("Error starting chromedriver")) } } } fn new_session_cmd(&self) -> NewSessionCmd { let mut new = NewSessionCmd::default(); match *self { TestBrowser::Firefox => { new.always_match( "moz:firefoxOptions", json!({ "args": ["-headless"] })) } TestBrowser::Chrome => { // Tests must run in headless mode without a // sandbox (required for Travis CI). new.always_match( "goog:chromeOptions", json!({ "args": ["--no-sandbox", "--headless"], })) } }; new } } /// Tests defined in this macro are run once per browser. See the macro invocations below. macro_rules! browser_tests { ($mod_name:ident, $browser_type:expr) => { mod $mod_name { use super::*; fn test_browser() -> TestBrowser { $browser_type } #[test] fn navigation() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); assert_eq!(&sess.get_current_url().expect("Error getting url [1]"), &page1, "Wrong URL [1]"); let page2 = server.url("/page2.html"); sess.go(&page2).expect("Error going to page2"); assert_eq!(&sess.get_current_url().expect("Error getting url [2]"), &page2, "Wrong URL [2]"); sess.back().expect("Error going back"); assert_eq!(&sess.get_current_url().expect("Error getting url [3]"), &page1, "Wrong URL [3]"); sess.forward().expect("Error going forward"); assert_eq!(&sess.get_current_url().expect("Error getting url [4]"), &page2, "Wrong URL [4]"); } #[test] fn title() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); assert_eq!(&sess.get_title().expect("Error getting title"), "Test page 1 title", "Wrong title"); } #[test] fn get_page_source() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let page_source = sess.get_page_source().expect("Error getting page source"); if sess.browser_name() == Some("chrome") { // chrome sets the xmlns attribute in the html element assert!(page_source.contains(r#"<html>"#), "Want page_source to contain <html> but was {}", page_source); } else { assert!(page_source.contains("<html>"), "Want page_source to contain <html> but was {}", page_source); } assert!(page_source.contains("<title>Test page 1 title</title>"), "Want page_source to contain <title>Test page 1 title</title> but was {}", page_source); } #[test] fn find_element_by_css() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("span.red", LocationStrategy::Css).expect("Error finding element"); assert_eq!(element.text().expect("Error getting text"), "Red text", "Wrong element found"); sess.find_element("body.red", LocationStrategy::Css).expect_err("Want error"); } #[test] fn find_element_by_link_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("A really handy WebDriver crate", LocationStrategy::LinkText).expect("Error finding element"); assert_eq!(element.text().expect("Error getting text"), "A really handy WebDriver crate", "Wrong element found"); sess.find_element("A link with this text does not appear on the page", LocationStrategy::LinkText).expect_err("Want error"); } #[test] fn find_element_by_partial_link_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("crate", LocationStrategy::PartialLinkText).expect("Error finding element"); assert_eq!(element.text().expect("Error getting text"), "A really handy WebDriver crate", "Wrong element found"); sess.find_element("A link with this text does not appear on the page", LocationStrategy::PartialLinkText).expect_err("Want error"); } #[test] fn find_element_by_xpath() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("//a", LocationStrategy::XPath).expect("Error finding element"); assert_eq!(element.text().expect("Error getting text"), "A really handy WebDriver crate", "Wrong element found"); sess.find_element("//video", LocationStrategy::XPath).expect_err("Want error"); } #[test] fn find_elements_by_css() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let elements = sess.find_elements("span.red", LocationStrategy::Css).expect("Error finding elements"); let element_texts: Vec<String> = elements.into_iter().map(|elem| elem.text().expect("Error getting text")).collect(); assert_eq!(element_texts, vec!["Red text".to_owned(), "More red text".to_owned()], "Wrong element texts"); let found_elements = sess.find_elements("body.red", LocationStrategy::Css).expect("Error finding absent elements"); assert!(found_elements.is_empty(), "Want to find no elements, found {:?}", found_elements); } #[test] fn find_elements_by_link_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let elements = sess.find_elements("A really handy WebDriver crate", LocationStrategy::LinkText).expect("Error finding elements"); let element_texts: Vec<String> = elements.into_iter().map(|elem| elem.text().expect("Error getting text")).collect(); assert_eq!(element_texts, vec!["A really handy WebDriver crate".to_owned()], "Wrong element texts"); let found_elements = sess.find_elements("A really bad WebDriver crate", LocationStrategy::LinkText).expect("Error finding absent elements"); assert!(found_elements.is_empty(), "Want to find no elements, found {:?}", found_elements); } #[test] fn find_elements_by_partial_link_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let elements = sess.find_elements("crate", LocationStrategy::PartialLinkText).expect("Error finding elements"); let element_texts: Vec<String> = elements.into_iter().map(|elem| elem.text().expect("Error getting text")).collect(); assert_eq!(element_texts, vec!["A really handy WebDriver crate".to_owned(), "A WebDriver crate with just the server-side".to_owned()], "Wrong element texts"); let found_elements = sess.find_elements("A really bad WebDriver crate", LocationStrategy::PartialLinkText).expect("Error finding absent elements"); assert!(found_elements.is_empty(), "Want to find no elements, found {:?}", found_elements); } #[test] fn find_elements_by_xpath() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let elements = sess.find_elements("//body/span", LocationStrategy::XPath).expect("Error finding elements"); let element_texts: Vec<String> = elements.into_iter().map(|elem| elem.text().expect("Error getting text")).collect(); assert_eq!(element_texts, vec!["Red text".to_owned(), "More red text".to_owned()], "Wrong element texts"); let found_elements = sess.find_elements("//video", LocationStrategy::XPath).expect("Error finding absent elements"); assert!(found_elements.is_empty(), "Want to find no elements, found {:?}", found_elements); } #[test] fn element_attribute_and_property() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); let page2 = server.url("/page2.html"); sess.go(&page1).expect("Error going to page1"); let link = sess.find_element("#link_to_page_2", LocationStrategy::Css).expect("Error finding element"); assert_eq!(&link.attribute("href").expect("Error getting attribute"), "/page2.html"); if sess.browser_name() == Some("chrome") { // FIXME: chrome does not implement the property endpoint } else { assert_eq!(&link.property("href").expect("Error getting property"), &page2); } } #[test] fn element_css_value() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("span.red", LocationStrategy::Css).expect("Error finding element"); if sess.browser_name() == Some("chrome") { assert_eq!(&element.css_value("color").expect("Error getting css value"), "rgba(255, 0, 0, 1)"); } else { assert_eq!(&element.css_value("color").expect("Error getting css value"), "rgb(255, 0, 0)"); } } #[test] fn element_click() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let output = sess.find_element("#set-text-output", LocationStrategy::Css) .expect("Finding output element"); assert_eq!(&output.text().expect("Getting output text"), "Unset"); let button = sess.find_element("#set-text-btn", LocationStrategy::Css) .expect("Finding button element"); button.click().expect("Click button"); assert_eq!(&output.text().expect("Getting output text"), "Set"); } #[test] fn element_clear() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("#textfield", LocationStrategy::Css).expect("Error finding element"); assert_eq!(&element.property("value").expect("Error getting value [1]"), "Pre-filled"); element.clear().expect("Error clearing element"); assert_eq!(&element.property("value").expect("Error getting value [2]"), ""); } #[test] fn element_send_keys() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("#textfield", LocationStrategy::Css).expect("Error finding element"); assert_eq!(&element.property("value").expect("Error getting value [1]"), "Pre-filled"); element.send_keys(" hello").expect("Error sending keys to element"); assert_eq!(&element.property("value").expect("Error getting value [2]"), "Pre-filled hello"); } #[test] fn element_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("span.red", LocationStrategy::Css).expect("Error finding element"); assert_eq!(&element.text().expect("Error getting text"), "Red text"); } #[test] fn element_name() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("span.red", LocationStrategy::Css).expect("Error finding element"); assert_eq!(&element.name().expect("Error getting name"), "span"); } #[test] fn element_child() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("#parent", LocationStrategy::Css).expect("Error finding parent element"); let child_by_css = element.find_element("span", LocationStrategy::Css).expect("Error finding child by CSS"); assert_eq!(&child_by_css.attribute("id").expect("Error getting id [1]"), "child1"); let child_by_xpath = element.find_element(".//span", LocationStrategy::XPath).expect("Error finding child by XPath"); assert_eq!(&child_by_xpath.attribute("id").expect("Error getting id [2]"), "child1"); } #[test] fn element_children() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let element = sess.find_element("#parent", LocationStrategy::Css).expect("Error finding parent element"); let children = element.find_elements("span", LocationStrategy::Css).expect("Error finding children by CSS"); assert_eq!(children.iter().map(|e| e.attribute("id").expect("Error getting id")).collect::<Vec<_>>(), vec!["child1".to_owned(), "child2".to_owned()]); } #[test] fn refresh() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let elem = sess.find_element("#textfield", LocationStrategy::Css).expect("Error finding element [1]"); assert_eq!(elem.property("value").expect("Error getting value [1]"), "Pre-filled".to_owned()); elem.clear().expect("Error clearing"); assert_eq!(elem.property("value").expect("Error getting value [1]"), "".to_owned()); sess.refresh().expect("Error refreshing"); elem.text().expect_err("Want stale element error"); let elem2 = sess.find_element("#textfield", LocationStrategy::Css).expect("Error finding element [1]"); assert_eq!(elem2.property("value").expect("Error getting value [2]"), "Pre-filled".to_owned()); } #[test] fn execute() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let exec_json = sess.execute(ExecuteCmd { script: "return arguments[0] + arguments[1];".to_owned(), args: vec![json!(1), json!(2)], }).expect("Error executing script"); assert_eq!(serde_json::from_value::<i64>(exec_json).expect("Error converting result to i64"), 3); let exec_error = sess.execute(ExecuteCmd { script: "throw 'foo';".to_owned(), args: vec![], }).expect_err("Want error"); // FIXME: Chrome represents errors differently if sess.browser_name() != Some("chrome") { match exec_error { webdriver_client::Error::WebDriverError(err) => assert!(format!("{:?}", err).contains("foo"), "Bad error message: {:?}", err), other => panic!("Wrong error type: {:?}", other), }; } } #[test] fn browser_name() { let sess = test_browser().session(); match test_browser() { TestBrowser::Firefox => assert_eq!(sess.browser_name(), Some("firefox")), TestBrowser::Chrome => assert_eq!(sess.browser_name(), Some("chrome")), } } #[test] fn execute_async() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let exec_json = sess.execute_async(ExecuteCmd { script: "setTimeout(() => arguments[1](arguments[0]), 1000);".to_owned(), args: vec![json!(1)], }).unwrap(); let exec_int = serde_json::from_value::<i64>(exec_json).unwrap(); assert_eq!(exec_int, 1); } // TODO: Test cookies // TODO: Test window handles #[test] fn frame_switch() { let (server, sess) = setup(); let page3 = server.url("/page3.html"); sess.go(&page3).expect("Error going to page3"); // switching to parent from parent is harmless sess.switch_to_parent_frame().unwrap(); let frames = sess.find_elements("iframe", LocationStrategy::Css).unwrap(); assert_eq!(frames.len(), 1); sess.switch_to_frame(frames[0].reference().unwrap()).unwrap(); let frames = sess.find_elements("iframe", LocationStrategy::Css).unwrap(); assert_eq!(frames.len(), 2); for f in &frames { sess.switch_to_frame(f.reference().unwrap()).unwrap(); let childframes = sess.find_elements("iframe", LocationStrategy::Css).unwrap(); assert_eq!(childframes.len(), 0); sess.switch_to_parent_frame().unwrap(); } sess.switch_to_parent_frame().unwrap(); let frames = sess.find_elements("iframe", LocationStrategy::Css).unwrap(); assert_eq!(frames.len(), 1); } #[test] fn http_driver() { ensure_logging_init(); let driver = test_browser().driver(); // Hackily sleep a bit until geckodriver is ready, otherwise our session // will fail to connect. // If this is unreliable, we could try: // * Polling for the TCP port to become unavailable. // * Wait for geckodriver to log "Listening on 127.0.0.1:4444". sleep(Duration::from_millis(1000)); let http_driver = HttpDriverBuilder::default() .url(driver.url()) .build().unwrap(); let sess = http_driver.session(&test_browser().new_session_cmd()) .unwrap(); let server = FileServer::new(); let test_url = server.url("/page1.html"); sess.go(&test_url).unwrap(); let url = sess.get_current_url().unwrap(); assert_eq!(url, test_url); } #[test] fn screenshot_frame() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let ss = sess.screenshot().expect("Screenshot"); std::fs::create_dir_all("target/screenshots").expect("Create screenshot dir"); ss.save_file(&format!("target/screenshots/{:?}_frame.png", test_browser())) .expect("Save screenshot"); } #[test] fn screenshot_element() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let ss = sess.find_element("#parent", LocationStrategy::Css).expect("element") .screenshot().expect("Screenshot"); std::fs::create_dir_all("target/screenshots").expect("Create screenshot dir"); ss.save_file(&format!("target/screenshots/{:?}_element.png", test_browser())) .expect("Save screenshot"); } #[test] fn dismiss_alert() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let btn = sess.find_element("#alert-btn", LocationStrategy::Css).expect("btn"); btn.click().expect("click"); sess.dismiss_alert().expect("dismiss alert"); } #[test] fn accept_confirm_alert() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let btn = sess.find_element("#confirm-btn", LocationStrategy::Css) .expect("find btn"); btn.click().expect("click"); sess.accept_alert().expect("accept alert"); let out = sess.find_element("#alerts-out", LocationStrategy::Css) .expect("find output"); assert_eq!("true", out.text().expect("output text")); } #[test] fn dismiss_confirm_alert() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let btn = sess.find_element("#confirm-btn", LocationStrategy::Css) .expect("find btn"); btn.click().expect("click"); sess.dismiss_alert().expect("accept alert"); let out = sess.find_element("#alerts-out", LocationStrategy::Css) .expect("find output"); assert_eq!("false", out.text().expect("output text")); } #[test] fn get_alert_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let btn = sess.find_element("#alert-btn", LocationStrategy::Css) .expect("find btn"); btn.click().expect("click"); assert_eq!("Alert", sess.get_alert_text().expect("get_alert_text")); sess.dismiss_alert().expect("accept alert"); } #[test] fn send_alert_text() { let (server, sess) = setup(); let page1 = server.url("/page1.html"); sess.go(&page1).expect("Error going to page1"); let btn = sess.find_element("#prompt-btn", LocationStrategy::Css) .expect("find btn"); btn.click().expect("click"); sess.send_alert_text("foobar").expect("send_alert_text"); sess.accept_alert().expect("accept alert"); let out = sess.find_element("#alerts-out", LocationStrategy::Css) .expect("find output"); assert_eq!("foobar", out.text().expect("output text")); } fn setup() -> (FileServer, DriverSession) { ensure_logging_init(); let session = test_browser().session(); let server = FileServer::new(); (server, session) } // End of browser_tests tests } } } browser_tests!(firefox, TestBrowser::Firefox); browser_tests!(chrome, TestBrowser::Chrome); fn ensure_logging_init() { static DONE: Once = Once::new(); DONE.call_once(|| init_logging()); } fn init_logging() { let mut builder = LogBuilder::new(); builder.filter(None, LogLevelFilter::Info); builder.target(LogTarget::Stdout); if let Ok(ev) = env::var("RUST_LOG") { builder.parse(&ev); } builder.init().unwrap(); } struct FileServer { listening: hyper::server::Listening, base_url: String, } impl FileServer { pub fn new() -> FileServer { for i in 0..2000 { let port = 8000 + i; let base_url = format!("http://localhost:{}", port); let server = match hyper::Server::http(("localhost", port)) { Ok(server) => server, Err(_) => { continue; }, }; match server.handle_threads(FileServer::handle, 10) { Ok(listening) => { return FileServer { listening, base_url, }; }, Err(err) => panic!("Error listening: {:?}", err), } } panic!("Could not find free port to serve test pages") } pub fn url(&self, path: &str) -> String { format!("{base_url}{path}", base_url = self.base_url, path = path) } fn handle(req: hyper::server::Request, mut resp: hyper::server::Response) { match FileServer::handle_impl(&req) { Ok(bytes) => { *resp.status_mut() = hyper::status::StatusCode::Ok; resp.send(&bytes).expect("Failed to send HTTP response"); }, Err(err) => { eprintln!("{}", err); *resp.status_mut() = hyper::status::StatusCode::BadRequest; }, }; } fn handle_impl(req: &hyper::server::Request) -> Result<Vec<u8>, String> { let crate_root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let www_root = crate_root.join("tests").join("www"); match req.uri { hyper::uri::RequestUri::AbsolutePath(ref path) => { if path.starts_with("/") { let abs_path = www_root.join(&path[1..]); let file_path = std::fs::canonicalize(&abs_path); match file_path { Ok(realpath) => { if realpath.starts_with(&www_root) { let mut contents = Vec::new(); std::fs::File::open(&realpath) .and_then(|mut f| f.read_to_end(&mut contents)) .map_err(|err| format!("Error reading file {:?}: {:?}", realpath, err))?; return Ok(contents); } else { return Err(format!("Rejecting request for path outside of www: {:?}", realpath)); } }, Err(err) => { return Err(format!("Error canonicalizing file {:?}: {:?}", abs_path, err)); }, } } else { return Err(format!("Received bad request for path {:?}", path)); } }, ref path => { return Err(format!("Received request for non-AbsolutePath: {:?}", path)); }, } } } impl Drop for FileServer { fn drop(&mut self) { self.listening.close().expect("FileServer failed to stop listening"); } }
45.620029
174
0.515173
48e83b9b059e7e7effccd1f6746c66c2d3d6d719
8,092
// Copyright 2021 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. use crate::output::{ ArtifactType, DirectoryArtifactType, DirectoryWrite, DynArtifact, DynDirectoryArtifact, EntityId, ReportedOutcome, Reporter, Timestamp, }; use async_trait::async_trait; use futures::future::try_join; use std::{ io::{Error, Write}, path::Path, }; /// A writer that writes to two writers. pub struct MultiplexedWriter<A: Write, B: Write> { a: A, b: B, } impl<A: Write, B: Write> Write for MultiplexedWriter<A, B> { fn write(&mut self, bytes: &[u8]) -> Result<usize, Error> { let bytes_written = self.a.write(bytes)?; // Since write is allowed to only write a portion of the data, // we force a and b to write the same number of bytes. self.b.write_all(&bytes[..bytes_written])?; Ok(bytes_written) } fn flush(&mut self) -> Result<(), Error> { self.a.flush()?; self.b.flush() } } impl<A: Write, B: Write> MultiplexedWriter<A, B> { pub fn new(a: A, b: B) -> Self { Self { a, b } } } /// A reporter that reports results to two contained reporters. pub struct MultiplexedReporter<A: Reporter, B: Reporter> { a: A, b: B, } impl<A: Reporter, B: Reporter> MultiplexedReporter<A, B> { pub fn new(a: A, b: B) -> Self { Self { a, b } } } /// A stub that maps ((), ()) to (). fn map_void(_: ((), ())) { () } #[async_trait] impl<A: Reporter, B: Reporter> Reporter for MultiplexedReporter<A, B> { async fn new_entity(&self, entity: &EntityId, name: &str) -> Result<(), Error> { try_join(self.a.new_entity(entity, name), self.b.new_entity(entity, name)) .await .map(map_void) } async fn entity_started(&self, entity: &EntityId, timestamp: Timestamp) -> Result<(), Error> { try_join(self.a.entity_started(entity, timestamp), self.b.entity_started(entity, timestamp)) .await .map(map_void) } async fn entity_stopped( &self, entity: &EntityId, outcome: &ReportedOutcome, timestamp: Timestamp, ) -> Result<(), Error> { try_join( self.a.entity_stopped(entity, outcome, timestamp), self.b.entity_stopped(entity, outcome, timestamp), ) .await .map(map_void) } async fn entity_finished(&self, entity: &EntityId) -> Result<(), Error> { try_join(self.a.entity_finished(entity), self.b.entity_finished(entity)).await.map(map_void) } async fn new_artifact( &self, entity: &EntityId, artifact_type: &ArtifactType, ) -> Result<Box<DynArtifact>, Error> { let (a, b) = try_join( self.a.new_artifact(entity, artifact_type), self.b.new_artifact(entity, artifact_type), ) .await?; Ok(Box::new(MultiplexedWriter::new(a, b))) } async fn new_directory_artifact( &self, entity: &EntityId, artifact_type: &DirectoryArtifactType, component_moniker: Option<String>, ) -> Result<Box<DynDirectoryArtifact>, Error> { let (a, b) = try_join( self.a.new_directory_artifact(entity, artifact_type, component_moniker.clone()), self.b.new_directory_artifact(entity, artifact_type, component_moniker), ) .await?; Ok(Box::new(MultiplexedDirectoryWriter { a, b })) } } /// A directory artifact writer that writes to two contained directory artifact writers. pub(super) struct MultiplexedDirectoryWriter { a: Box<DynDirectoryArtifact>, b: Box<DynDirectoryArtifact>, } impl MultiplexedDirectoryWriter { pub(super) fn new(a: Box<DynDirectoryArtifact>, b: Box<DynDirectoryArtifact>) -> Self { Self { a, b } } } impl DirectoryWrite for MultiplexedDirectoryWriter { fn new_file(&self, path: &Path) -> Result<Box<DynArtifact>, Error> { Ok(Box::new(MultiplexedWriter::new(self.a.new_file(path)?, self.b.new_file(path)?))) } } #[cfg(test)] mod test { use super::*; use crate::output::{directory::DirectoryReporter, ArtifactType, RunReporter, SuiteId}; use tempfile::tempdir; use test_output_directory as directory; use test_output_directory::testing::{ assert_run_result, assert_suite_results, parse_json_in_output, ExpectedDirectory, ExpectedSuite, ExpectedTestRun, }; #[fuchsia::test] fn multiplexed_writer() { const WRITTEN: &str = "test output"; let mut buf_1: Vec<u8> = vec![]; let mut buf_2: Vec<u8> = vec![]; let mut multiplexed_writer = MultiplexedWriter::new(&mut buf_1, &mut buf_2); multiplexed_writer.write_all(WRITTEN.as_bytes()).expect("write_all failed"); assert_eq!(std::str::from_utf8(&buf_1).unwrap(), WRITTEN); assert_eq!(std::str::from_utf8(&buf_2).unwrap(), WRITTEN); } #[fuchsia::test] async fn multiplexed_reporter() { let tempdir_1 = tempdir().expect("create temp directory"); let reporter_1 = DirectoryReporter::new(tempdir_1.path().to_path_buf()).expect("Create reporter"); let tempdir_2 = tempdir().expect("create temp directory"); let reporter_2 = DirectoryReporter::new(tempdir_2.path().to_path_buf()).expect("Create reporter"); let multiplexed_reporter = MultiplexedReporter::new(reporter_1, reporter_2); let run_reporter = RunReporter::new_for_test(multiplexed_reporter); run_reporter.started(Timestamp::Unknown).await.expect("start run"); let mut run_artifact = run_reporter.new_artifact(&ArtifactType::Stdout).await.expect("create artifact"); writeln!(run_artifact, "run artifact contents").expect("write to run artifact"); run_artifact.flush().expect("flush run artifact"); let suite_reporter = run_reporter.new_suite("suite", &SuiteId(0)).await.expect("create suite"); suite_reporter.started(Timestamp::Unknown).await.expect("start suite"); suite_reporter .stopped(&ReportedOutcome::Passed, Timestamp::Unknown) .await .expect("start suite"); let suite_dir_artifact = suite_reporter .new_directory_artifact(&DirectoryArtifactType::Custom, None) .await .expect("new artifact"); let mut suite_artifact = suite_dir_artifact.new_file("test.txt".as_ref()).expect("create suite artifact file"); writeln!(suite_artifact, "suite artifact contents").expect("write to suite artifact"); suite_artifact.flush().expect("flush suite artifact"); suite_reporter.finished().await.expect("finish suite"); run_reporter.stopped(&ReportedOutcome::Passed, Timestamp::Unknown).await.expect("stop run"); run_reporter.finished().await.expect("finish run"); let expected_run = ExpectedTestRun::new(directory::Outcome::Passed).with_artifact( directory::ArtifactType::Stdout, Option::<&str>::None, "run artifact contents\n", ); let expected_suites = vec![ExpectedSuite::new("suite", directory::Outcome::Passed) .with_directory_artifact( directory::ArtifactType::Custom, Option::<&str>::None, ExpectedDirectory::new().with_file("test.txt", "suite artifact contents\n"), )]; // directories shuold contain identical contents. let (run_result_1, suite_results_1) = parse_json_in_output(tempdir_1.path()); assert_run_result(tempdir_1.path(), &run_result_1, &expected_run); assert_suite_results(tempdir_1.path(), &suite_results_1, &expected_suites); let (run_result_2, suite_results_2) = parse_json_in_output(tempdir_2.path()); assert_run_result(tempdir_2.path(), &run_result_2, &expected_run); assert_suite_results(tempdir_2.path(), &suite_results_2, &expected_suites); } }
36.286996
100
0.640138
23ff965949aa1ce099804f1f0462e470e3f97f45
3,740
mod a_graphic; mod a_graphic_data; mod abstract_numbering; mod based_on; mod bold; mod bold_cs; mod bookmark_end; mod bookmark_start; mod br; mod character_spacing; mod color; mod comment; mod comment_extended; mod comment_range_end; mod comment_range_start; mod default_tab_stop; mod delete; mod delete_text; mod div; mod doc_defaults; mod doc_grid; mod doc_id; mod doc_var; mod drawing; mod font; mod grid_span; mod header_reference; mod highlight; mod indent; mod indent_level; mod insert; mod italic; mod italic_cs; mod justification; mod level; mod level_jc; mod level_override; mod level_restart; mod level_text; mod line_spacing; mod mc_fallback; mod name; mod next; mod number_format; mod numbering; mod numbering_id; mod numbering_property; mod outline_lvl; mod page_margin; mod page_size; mod paragraph; mod paragraph_property; mod paragraph_style; mod pic; mod q_format; mod run; mod run_fonts; mod run_property; mod run_property_default; mod section; mod section_property; mod shading; mod start; mod style; mod sz; mod sz_cs; mod tab; mod table; mod table_borders; mod table_cell; mod table_cell_borders; mod table_cell_margins; mod table_cell_property; mod table_cell_width; mod table_grid; mod table_indent; mod table_layout; mod table_property; mod table_row; mod table_row_property; mod table_style; mod table_width; mod text; mod text_border; mod text_box_content; mod text_direction; mod underline; mod v_align; mod vanish; mod vert_align; mod vertical_merge; mod wp_anchor; mod wps_shape; mod wps_text_box; mod zoom; pub use a_graphic::*; pub use a_graphic_data::*; pub use abstract_numbering::*; pub use based_on::*; pub use bold::*; pub use bold_cs::*; pub use bookmark_end::*; pub use bookmark_start::*; pub use br::*; pub use character_spacing::*; pub use color::*; pub use comment::*; pub use comment_extended::*; pub use comment_range_end::*; pub use comment_range_start::*; pub use default_tab_stop::*; pub use delete::*; pub use delete_text::*; pub use div::*; pub use doc_defaults::*; pub use doc_grid::*; pub use doc_id::*; pub use doc_var::*; pub use drawing::*; pub use font::*; pub use grid_span::*; pub use header_reference::*; pub use highlight::*; pub use indent::*; pub use indent_level::*; pub use insert::*; pub use italic::*; pub use italic_cs::*; pub use justification::*; pub use level::*; pub use level_jc::*; pub use level_override::*; pub use level_restart::*; pub use level_text::*; pub use line_spacing::*; pub use mc_fallback::*; pub use name::*; pub use next::*; pub use number_format::*; pub use numbering::*; pub use numbering_id::*; pub use numbering_property::*; pub use outline_lvl::*; pub use page_margin::*; pub use page_size::*; pub use paragraph::*; pub use paragraph_property::*; pub use paragraph_style::*; pub use pic::*; pub use q_format::*; pub use run::*; pub use run_fonts::*; pub use run_property::*; pub use run_property_default::*; pub use section::*; pub use section_property::*; pub use shading::*; pub use start::*; pub use style::*; pub use sz::*; pub use sz_cs::*; pub use tab::*; pub use table::*; pub use table_borders::*; pub use table_cell::*; pub use table_cell_borders::*; pub use table_cell_margins::*; pub use table_cell_property::*; pub use table_cell_width::*; pub use table_grid::*; pub use table_indent::*; pub use table_layout::*; pub use table_property::*; pub use table_row::*; pub use table_row_property::*; pub use table_style::*; pub use table_width::*; pub use text::*; pub use text_border::*; pub use text_box_content::*; pub use text_direction::*; pub use underline::*; pub use v_align::*; pub use vanish::*; pub use vert_align::*; pub use vertical_merge::*; pub use wp_anchor::*; pub use wps_shape::*; pub use wps_text_box::*; pub use zoom::*;
19.479167
32
0.745722
1d6fcfb22f94e08ca97a569f07ae0c6a4e86670e
721
use super::super::error::{Error, Result}; use log::debug; use std::str::FromStr; use tokio_tungstenite::tungstenite::client::IntoClientRequest; use url::Url; use super::super::ShardStream; pub async fn connect(url: &str) -> Result<ShardStream> { let url = Url::from_str(url).map_err(|source| Error::ParsingUrl { source, url: url.to_owned(), })?; let request = url .into_client_request() .map_err(|source| Error::Connecting { source, })?; let (stream, _) = tokio_tungstenite::connect_async(request) .await .map_err(|source| Error::Connecting { source, })?; debug!("Shook hands with remote"); Ok(stream) }
24.033333
69
0.604716
674fc8da96781e46b2cd34551a9b6cb406cec7e3
3,428
// Copyright 2019 The Fuchsia Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #![allow(missing_docs)] use bitfield::bitfield; bitfield! { /// Bitfields for writing and reading segments of the header and payload of /// inspect VMO blocks. /// Represents the header structure of an inspect VMO Block. Not to confuse with /// the HEADER block. pub struct BlockHeader(u64); pub u8, order, set_order: 3, 0; pub u8, block_type, set_block_type: 15, 8; pub u8, block_type_v0, set_block_type_v0: 7, 4; // Only for a HEADER block pub u32, header_version, set_header_version: 31, 16; pub u32, header_version_v0, set_header_version_v0: 31, 8; pub u32, header_magic, set_header_magic: 63, 32; // Only for *_VALUE blocks pub u32, value_name_index, set_value_name_index: 63, 40; pub u32, value_parent_index, set_value_parent_index: 39, 16; pub u32, value_name_index_v0, set_value_name_index_v0: 63, 36; pub u32, value_parent_index_v0, set_value_parent_index_v0: 35, 8; // Only for FREE blocks pub u32, free_next_index, set_free_next_index: 39, 16; pub u32, free_next_index_v0, _: 35, 8; // Only for NAME blocks pub u16, name_length, set_name_length: 27, 16; pub u16, name_length_v0, _: 19, 8; // Only for EXTENT blocks pub u32, extent_next_index, set_extent_next_index: 39, 16; pub u32, extent_next_index_v0, _: 35, 8; pub value, _: 63, 0; } #[allow(missing_docs)] bitfield! { /// Represents the payload of inspect VMO Blocks (except for EXTENT and NAME). pub struct Payload(u64); pub value, _: 63, 0; // Only for PROPERTY blocks pub u32, property_total_length, set_property_total_length: 31, 0; pub u32, property_extent_index, set_property_extent_index: 59, 32; pub u8, property_flags, set_property_flags: 63, 60; // Only for INT/UINT/DOUBLE_VALUE blocks pub numeric_value, set_numeric_value: 63, 0; // Only for HEADER block pub header_generation_count, set_header_generation_count: 63, 0; // Only for ARRAY_VALUE blocks. pub u8, array_entry_type, set_array_entry_type: 3, 0; pub u8, array_flags, set_array_flags: 7, 4; pub u8, array_slots_count, set_array_slots_count: 15, 8; // Only for LINK_VALUE blocks. pub u32, content_index, set_content_index: 19, 0; pub u8, disposition_flags, set_disposition_flags: 63, 60; } #[cfg(test)] mod tests { use super::*; #[test] fn test_header() { let mut header = BlockHeader(0); let magic = 0x494e5350; header.set_order(13); header.set_block_type(3); header.set_header_version(1); header.set_header_magic(magic); assert_eq!(header.order(), 13); assert_eq!(header.header_version(), 1); assert_eq!(header.header_magic(), magic); assert_eq!(header.value(), 0x494e53500001030d); } #[test] fn test_payload() { let mut payload = Payload(0); payload.set_property_total_length(0xab); payload.set_property_extent_index(0x1234); payload.set_property_flags(3); assert_eq!(payload.property_total_length(), 0xab); assert_eq!(payload.property_extent_index(), 0x1234); assert_eq!(payload.property_flags(), 3); assert_eq!(payload.value(), 0x30001234000000ab); } }
33.607843
84
0.685531
89320281f52a893e9fc3a1f7a39827182c49af31
221
#[cfg(all( feature = "upgradable", feature = "futures_io", not(feature = "tokio_io") ))] mod http_tunnel_futures_io_tests { #![allow(unused_imports)] use async_stream_packed::HttpTunnelClientGrader; }
22.1
52
0.687783
79a5a1a7acc5b9b510fcc70411296039d56707a6
3,786
use super::module::ModuleIndex; use std::collections::HashMap; use std::fmt; #[derive(PartialEq, Eq, Hash)] pub struct GlobalAddress<T>(usize, std::marker::PhantomData<T>); impl<T> Clone for GlobalAddress<T> { fn clone(&self) -> Self { Self(self.0, self.1) } } impl<T> Copy for GlobalAddress<T> {} impl<T> fmt::Debug for GlobalAddress<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "GlobalAddress({})", self.0) } } #[derive(PartialEq, Eq, Hash)] pub struct LinkableAddress<T>(ModuleIndex, pub(crate) usize, std::marker::PhantomData<T>); impl<T> LinkableAddress<T> { pub fn new_unsafe(module: ModuleIndex, index: usize) -> Self { Self(module, index, std::marker::PhantomData) } pub fn module_index(&self) -> ModuleIndex { self.0 } } impl<T> Clone for LinkableAddress<T> { fn clone(&self) -> Self { Self::new_unsafe(self.0, self.1) } } impl<T> Copy for LinkableAddress<T> {} impl<T> fmt::Debug for LinkableAddress<T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "{:?}, func_index: {}", self.0, self.1) } } pub struct LinkableCollection<T> { items: Vec<T>, item_addrs_by_module: HashMap<ModuleIndex, Vec<usize>>, } impl<T> LinkableCollection<T> { pub fn new() -> Self { Self { items: Vec::new(), item_addrs_by_module: HashMap::new(), } } pub fn resolve(&self, address: LinkableAddress<T>) -> Option<GlobalAddress<T>> { let raw_address = self.item_addrs_by_module.get(&address.0)?.get(address.1)?; Some(GlobalAddress(*raw_address, std::marker::PhantomData)) } pub fn link(&mut self, source: GlobalAddress<T>, dist: ModuleIndex) -> LinkableAddress<T> { let index = self .item_addrs_by_module .get(&dist) .map(|c| c.len()) .unwrap_or(0); self.item_addrs_by_module .entry(dist) .or_insert(Vec::new()) .push(source.0); LinkableAddress::new_unsafe(dist, index) } pub fn get_global(&self, address: GlobalAddress<T>) -> &T { // Never panic because GlobalAddress is always valid self.items.get(address.0).unwrap() } pub fn get(&self, address: LinkableAddress<T>) -> Option<(&T, GlobalAddress<T>)> { let addr = self.resolve(address)?; Some((self.items.get(addr.0)?, addr)) } pub fn push_global(&mut self, item: T) -> GlobalAddress<T> { let index = self.items.len(); self.items.push(item); GlobalAddress(index, std::marker::PhantomData) } pub fn push(&mut self, module_index: ModuleIndex, item: T) -> LinkableAddress<T> { let globa_index = self.items.len(); self.items.push(item); let addrs = self .item_addrs_by_module .entry(module_index) .or_insert(Vec::new()); let index = addrs.len(); addrs.push(globa_index); LinkableAddress::new_unsafe(module_index, index) } pub fn remove_module(&mut self, index: &ModuleIndex) { // TODO: GC unlinked items self.item_addrs_by_module.remove(index); } pub fn items(&self, module_index: ModuleIndex) -> Option<Vec<GlobalAddress<T>>> { let item_addrs = self.item_addrs_by_module.get(&module_index)?; Some( item_addrs .iter() .map(|index| GlobalAddress(*index, std::marker::PhantomData)) .collect(), ) } pub fn is_empty(&self, module_index: ModuleIndex) -> bool { self.item_addrs_by_module .get(&module_index) .map(|v| v.is_empty()) .unwrap_or(true) } }
29.123077
95
0.590333
9b16febb27114106033cf74b510700c25559e41d
22,185
#[macro_use] extern crate log; use env_logger; #[macro_use] extern crate clap; use humantime; use racer; use clap::{App, AppSettings, Arg, ArgMatches, SubCommand}; use racer::{BytePos, Coordinate, FileCache, Match, MatchType, Session}; use std::fs::File; use std::io::{self, BufRead, Read}; use std::path::{Path, PathBuf}; use std::time::SystemTime; fn point(cfg: &Config) { let cache = FileCache::default(); let session = Session::new(&cache, None); cfg.interface.emit(Message::Coords(cfg.coords())); if let Some(point) = racer::to_point(cfg.coords(), cfg.expect_file(), &session) { cfg.interface.emit(Message::Point(point)); } cfg.interface.emit(Message::End); } fn coord(cfg: &Config) { let cache = FileCache::default(); let session = Session::new(&cache, None); cfg.interface.emit(Message::Point(cfg.point)); if let Some(coords) = racer::to_coords(cfg.point, cfg.expect_file(), &session) { cfg.interface.emit(Message::Coords(coords)); } cfg.interface.emit(Message::End); } fn match_with_snippet_fn(m: Match, session: &Session<'_>, interface: Interface) { let cd = m .coords .expect("[match_with_snipper_fn] failed to get coordinate"); if m.matchstr == "" { panic!("MATCHSTR is empty - waddup?"); } let snippet = racer::snippet_for_match(&m, session); interface.emit(Message::MatchWithSnippet( m.matchstr, snippet, cd, m.filepath.as_path(), m.mtype, m.contextstr, m.docs, )); } fn match_fn(m: Match, interface: Interface) { if let Some(coords) = m.coords { interface.emit(Message::Match( m.matchstr, coords, m.filepath.as_path(), m.mtype, m.contextstr, )); } else { error!("Could not resolve file coords for match {:?}", m); } } fn complete(cfg: Config, print_type: CompletePrinter) { if cfg.fqn.is_some() { return external_complete(&cfg, print_type); } complete_by_line_coords(cfg, print_type); } fn complete_by_line_coords(cfg: Config, print_type: CompletePrinter) { // input: linenum, colnum, fname let tb = std::thread::Builder::new().name("searcher".to_owned()); let interface = cfg.interface; // PD: this probably sucks for performance, but lots of plugins // end up failing and leaving tmp files around if racer crashes, // so catch the crash. let res = tb .spawn(move || { run_the_complete_fn(&cfg, print_type); }) .unwrap(); if let Err(e) = res.join() { error!("Search thread panicked: {:?}", e); } interface.emit(Message::End); } #[derive(Debug, Clone, Copy)] enum CompletePrinter { Normal, WithSnippets, } fn read_file_from_stdin() -> String { let mut rawbytes = Vec::new(); let stdin = io::stdin(); stdin .lock() .read_until(0x04, &mut rawbytes) .expect("read until EOT"); String::from_utf8(rawbytes).expect("utf8 from stdin") } fn read_file<P>(path: P) -> io::Result<String> where P: AsRef<Path>, { let mut res = String::new(); let mut f = File::open(path)?; f.read_to_string(&mut res)?; Ok(res) } fn load_query_file<P, S>(path: P, sub: S, session: &Session<'_>) where P: Into<PathBuf>, S: AsRef<Path>, { let path = path.into(); let sub = sub.as_ref(); if sub.to_str() == Some("-") { let contents = read_file_from_stdin(); session.cache_file_contents(path, contents); } else if sub != path { let contents = read_file(sub).unwrap(); session.cache_file_contents(path, contents); } } fn run_the_complete_fn(cfg: &Config, print_type: CompletePrinter) { let fn_path = cfg.fn_name.as_ref().unwrap(); let substitute_file = cfg.substitute_file.as_ref().unwrap_or(fn_path); let cache = FileCache::default(); let session = Session::new(&cache, Some(fn_path)); load_query_file(fn_path, &substitute_file, &session); if let Some(expanded) = racer::expand_ident(&fn_path, cfg.coords(), &session) { cfg.interface.emit(Message::Prefix( expanded.start(), expanded.pos(), expanded.ident(), )); for m in racer::complete_from_file(&fn_path, cfg.coords(), &session) { match print_type { CompletePrinter::Normal => match_fn(m, cfg.interface), CompletePrinter::WithSnippets => match_with_snippet_fn(m, &session, cfg.interface), }; } } } /// Completes a fully qualified name specified on command line fn external_complete(cfg: &Config, print_type: CompletePrinter) { let cwd = Path::new("."); let cache = FileCache::default(); let session = Session::new(&cache, Some(cwd)); for m in racer::complete_fully_qualified_name(cfg.fqn.as_ref().unwrap(), &cwd, &session) { match print_type { CompletePrinter::Normal => match_fn(m, cfg.interface), CompletePrinter::WithSnippets => match_with_snippet_fn(m, &session, cfg.interface), } } } fn prefix(cfg: &Config) { let fn_path = cfg.fn_name.as_ref().unwrap(); let substitute_file = cfg.substitute_file.as_ref().unwrap_or(fn_path); let cache = FileCache::default(); let session = Session::new(&cache, Some(fn_path)); // Cache query file in session load_query_file(fn_path, &substitute_file, &session); // print the start, end, and the identifier prefix being matched let expanded = racer::expand_ident(fn_path, cfg.coords(), &session).unwrap(); cfg.interface.emit(Message::Prefix( expanded.start(), expanded.pos(), expanded.ident(), )); } fn find_definition(cfg: &Config) { let fn_path = cfg.fn_name.as_ref().unwrap(); let substitute_file = cfg.substitute_file.as_ref().unwrap_or(fn_path); let cache = FileCache::default(); let session = Session::new(&cache, Some(fn_path)); // Cache query file in session load_query_file(fn_path, &substitute_file, &session); if let Some(m) = racer::find_definition(fn_path, cfg.coords(), &session) { match_fn(m, cfg.interface); } cfg.interface.emit(Message::End); } fn validate_rust_src_path_env_var() { match racer::get_rust_src_path() { Ok(_) => (), Err(err) => { println!("{}", err); std::process::exit(1); } } } fn daemon(cfg: &Config) { let mut input = String::new(); while let Ok(n) = io::stdin().read_line(&mut input) { // '\n' == 1 if n == 1 { break; } // We add the setting NoBinaryName because in daemon mode we won't be passed the preceeding // binary name let cli = build_cli().setting(AppSettings::NoBinaryName); let matches = match cfg.interface { Interface::Text => cli.get_matches_from(input.trim_end().split_whitespace()), Interface::TabText => cli.get_matches_from(input.trim_end().split('\t')), }; run(&matches, cfg.interface); input.clear(); } } enum Message<'a> { End, Prefix(BytePos, BytePos, &'a str), Match(String, Coordinate, &'a Path, MatchType, String), MatchWithSnippet( String, String, Coordinate, &'a Path, MatchType, String, String, ), Point(BytePos), Coords(Coordinate), } #[derive(Copy, Clone)] enum Interface { Text, // The original human-readable format. TabText, // Machine-readable format. This is basically the same as Text, except that all field // separators are replaced with tabs. // In `daemon` mode tabs are also used to delimit command arguments. } impl Default for Interface { fn default() -> Self { Interface::Text } } impl Interface { fn leading_space(&self) -> &str { match *self { Interface::Text => " ", Interface::TabText => "\t", } } fn field_separator(&self) -> &str { match *self { Interface::Text => ",", Interface::TabText => "\t", } } fn emit(&self, message: Message<'_>) { match message { Message::End => println!("END"), Message::Prefix(start, pos, text) => match *self { Interface::Text => println!("PREFIX {},{},{}", start, pos, text), Interface::TabText => println!("PREFIX\t{}\t{}\t{}", start, pos, text), }, Message::Point(point) => println!("POINT{}{}", self.leading_space(), point), Message::Coords(coord) => { println!( "COORD{lead}{}{field}{}", coord.row.0, coord.col.0, lead = self.leading_space(), field = self.field_separator() ); } Message::Match(mstr, cd, path, mtype, context) => match *self { Interface::Text => { let context = context.split_whitespace().collect::<Vec<&str>>().join(" "); println!( "MATCH {},{},{},{},{},{}", mstr, cd.row.0, cd.col.0, path.display(), mtype, context ); } Interface::TabText => { let context = context.split_whitespace().collect::<Vec<&str>>().join(" "); println!( "MATCH\t{}\t{}\t{}\t{}\t{}\t{}", mstr, cd.row.0, cd.col.0, path.display(), mtype, context ); } }, Message::MatchWithSnippet(mstr, snippet, cd, path, mtype, context, docs) => match *self { Interface::Text => { let context = context .replace(";", "\\;") .split_whitespace() .collect::<Vec<&str>>() .join(" "); let docs = format!("{:?}", docs).replace(";", "\\;"); println!( "MATCH {};{};{};{};{};{};{};{}", mstr, snippet, cd.row.0, cd.col.0, path.display(), mtype, context, docs ); } Interface::TabText => { let context = context .replace("\t", "\\t") .split_whitespace() .collect::<Vec<&str>>() .join(" "); println!( "MATCH\t{}\t{}\t{}\t{}\t{}\t{}\t{}\t{:?}", mstr, snippet, cd.row.0, cd.col.0, path.display(), mtype, context, docs ); } }, } } } #[derive(Default)] struct Config { fqn: Option<String>, linenum: usize, charnum: usize, fn_name: Option<PathBuf>, substitute_file: Option<PathBuf>, interface: Interface, point: BytePos, } impl Config { fn coords(&self) -> Coordinate { Coordinate::new(self.linenum as u32, self.charnum as u32) } fn expect_file(&self) -> &PathBuf { self.fn_name.as_ref().expect("File path required") } } impl<'a> From<&'a ArgMatches<'a>> for Config { fn from(m: &'a ArgMatches<'_>) -> Self { // Check for the presence of the `point` argument that indicates we're // being asked to convert from point to coordinates if m.is_present("point") && m.is_present("path") { return Config { point: value_t_or_exit!(m.value_of("point"), usize).into(), fn_name: m.value_of("path").map(PathBuf::from), ..Default::default() }; } // We check for charnum because it's the second argument, which means more than just // an FQN was used (i.e. racer complete <linenum> <charnum> <fn_name> [substitute_file]) if m.is_present("charnum") { let cfg = Config { charnum: value_t_or_exit!(m.value_of("charnum"), usize), fn_name: m.value_of("path").map(PathBuf::from), substitute_file: m.value_of("substitute_file").map(PathBuf::from), ..Default::default() }; if !m.is_present("linenum") { // Because of the hack to allow fqn and linenum to share a single arg we set FQN // to None and set the charnum correctly using the FQN arg so there's no // hackery later return Config { linenum: value_t_or_exit!(m.value_of("fqn"), usize), ..cfg }; } return Config { linenum: value_t_or_exit!(m.value_of("linenum"), usize), ..cfg }; } Config { fqn: m.value_of("fqn").map(ToOwned::to_owned), ..Default::default() } } } fn build_cli<'a, 'b>() -> App<'a, 'b> { // we use the more verbose "Builder Pattern" to create the CLI because it's a littel faster // than the less verbose "Usage String" method...faster, meaning runtime speed since that's // extremely important here App::new("racer") .version(env!("CARGO_PKG_VERSION")) .author("Phil Dawes") .about("A Rust code completion utility") .settings(&[ AppSettings::GlobalVersion, AppSettings::SubcommandRequiredElseHelp, ]) .arg( Arg::with_name("interface") .long("interface") .short("i") .takes_value(true) .possible_value("text") .possible_value("tab-text") .value_name("mode") .help("Interface mode"), ) .subcommand( SubCommand::with_name("complete") .about("performs completion and returns matches") // We set an explicit usage string here, instead of letting `clap` write one due to // using a single arg for multiple purposes .usage( "racer complete <fqn>\n \ racer complete <linenum> <charnum> <path> [substitute_file]", ) // Next we make it an error to run without any args .setting(AppSettings::ArgRequiredElseHelp) // Because we want a single arg to play two roles and be compatible with previous // racer releases, we have to be a little hacky here... // // We start by making 'fqn' the first positional arg, which will hold this dual value // of either an FQN as it says, or secretly a line-number .arg( Arg::with_name("fqn") .help("complete with a fully-qualified-name (e.g. std::io::)"), ) .arg( Arg::with_name("charnum") .help("The char number to search for matches") .requires("path"), ) .arg(Arg::with_name("path").help("The path to search for name to match")) .arg(Arg::with_name("substitute_file").help("An optional substitute file")) // 'linenum' **MUST** be last (or have the highest index so that it's never actually // used by the user, but still appears in the help text) .arg(Arg::with_name("linenum").help("The line number at which to find the match")), ) .subcommand( SubCommand::with_name("daemon") .about("start a process that receives the above commands via stdin"), ) .subcommand( SubCommand::with_name("find-definition") .about("finds the definition of a function") .arg( Arg::with_name("linenum") .help("The line number at which to find the match") .required(true), ) .arg( Arg::with_name("charnum") .help("The char number at which to find the match") .required(true), ) .arg( Arg::with_name("path") .help("The path to search for name to match") .required(true), ) .arg(Arg::with_name("substitute_file").help("An optional substitute file")), ) .subcommand( SubCommand::with_name("prefix") .arg( Arg::with_name("linenum") .help("The line number at which to find the match") .required(true), ) .arg( Arg::with_name("charnum") .help("The char number at which to find the match") .required(true), ) .arg( Arg::with_name("path") .help("The path to search for the match to prefix") .required(true), ), ) .subcommand( SubCommand::with_name("complete-with-snippet") .about("performs completion and returns more detailed matches") .usage( "racer complete-with-snippet <fqn>\n \ racer complete-with-snippet <linenum> <charnum> <path> [substitute_file]", ) .setting(AppSettings::ArgRequiredElseHelp) .arg( Arg::with_name("fqn") .help("complete with a fully-qualified-name (e.g. std::io::)"), ) .arg( Arg::with_name("charnum") .help("The char number to search for matches") .requires("path"), ) .arg(Arg::with_name("path").help("The path to search for name to match")) .arg(Arg::with_name("substitute_file").help("An optional substitute file")) .arg(Arg::with_name("linenum").help("The line number at which to find the match")), ) .subcommand( SubCommand::with_name("point") .about("converts linenum and charnum in a file to a point") // Next we make it an error to run without any args .setting(AppSettings::ArgRequiredElseHelp) .arg( Arg::with_name("linenum") .help("The line number at which to convert to point") .required(true), ) .arg( Arg::with_name("charnum") .help("The char number at which to convert to point") .required(true), ) .arg( Arg::with_name("path") .help("The path where the line and char occur") .required(true), ), ) .subcommand( SubCommand::with_name("coord") .about("converts a racer point to line and character numbers") // Next we make it an error to run without any args .setting(AppSettings::ArgRequiredElseHelp) .arg( Arg::with_name("point") .help("The point to convert to line and character coordinates") .required(true), ) .arg( Arg::with_name("path") .help("The path where the line and char occur") .required(true), ), ) .after_help("For more information about a specific command try 'racer <command> --help'") } fn main() { use std::io::Write; env_logger::Builder::from_default_env() .format(|f, record| { writeln!( f, "{:>5} {}: {}: {}", record.level(), humantime::format_rfc3339_nanos(SystemTime::now()), record.module_path().unwrap_or("-"), record.args() ) }) .init(); let matches = build_cli().get_matches(); let interface = match matches.value_of("interface") { Some("tab-text") => Interface::TabText, Some("text") | _ => Interface::Text, }; validate_rust_src_path_env_var(); run(&matches, interface); } fn run(m: &ArgMatches<'_>, interface: Interface) { use crate::CompletePrinter::{Normal, WithSnippets}; // match raw subcommand, and get it's sub-matches "m" if let (name, Some(sub_m)) = m.subcommand() { let mut cfg = Config::from(sub_m); cfg.interface = interface; match name { "daemon" => daemon(&cfg), "prefix" => prefix(&cfg), "complete" => complete(cfg, Normal), "complete-with-snippet" => complete(cfg, WithSnippets), "find-definition" => find_definition(&cfg), "point" => point(&cfg), "coord" => coord(&cfg), _ => unreachable!(), } } }
34.502333
101
0.503764
898f113231164878756edcdea68af37adbbd5897
2,652
// Copyright 2018 Cargill Incorporated // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #[macro_use] extern crate clap; extern crate crypto; extern crate log4rs; #[macro_use] extern crate log; extern crate grid_sdk; extern crate protobuf; extern crate rustc_serialize; extern crate sawtooth_sdk; mod addressing; mod handler; use log::LogLevelFilter; use log4rs::append::console::ConsoleAppender; use log4rs::config::{Appender, Config, Root}; use log4rs::encode::pattern::PatternEncoder; use std::process; use sawtooth_sdk::processor::TransactionProcessor; use handler::SupplyChainTransactionHandler; fn main() { let matches = clap_app!(intkey => (version: crate_version!()) (about: "SupplyChain Transaction Processor (Rust)") (@arg connect: -C --connect +takes_value "connection endpoint for validator") (@arg verbose: -v --verbose +multiple "increase output verbosity")) .get_matches(); let endpoint = matches .value_of("connect") .unwrap_or("tcp://localhost:4004"); let console_log_level; match matches.occurrences_of("verbose") { 0 => console_log_level = LogLevelFilter::Warn, 1 => console_log_level = LogLevelFilter::Info, 2 => console_log_level = LogLevelFilter::Debug, 3 | _ => console_log_level = LogLevelFilter::Trace, } let stdout = ConsoleAppender::builder() .encoder(Box::new(PatternEncoder::new( "{h({l:5.5})} | {({M}:{L}):20.20} | {m}{n}", ))) .build(); let config = match Config::builder() .appender(Appender::builder().build("stdout", Box::new(stdout))) .build(Root::builder().appender("stdout").build(console_log_level)) { Ok(x) => x, Err(_) => process::exit(1), }; match log4rs::init_config(config) { Ok(_) => (), Err(_) => process::exit(1), } let handler = SupplyChainTransactionHandler::new(); let mut processor = TransactionProcessor::new(endpoint); info!("Console logging level: {}", console_log_level); processor.add_handler(&handler); processor.start(); }
30.136364
75
0.66629
9ca42c90a3ab029c55192b922c8fbce1e75b49f4
5,358
use crate::{ text::{Fonts, Galley, TextStyle}, Color32, Mesh, Stroke, }; use emath::*; /// A paint primitive such as a circle or a piece of text. /// Coordinates are all screen space points (not physical pixels). #[must_use = "Add a Shape to a Painter"] #[derive(Clone, Debug)] pub enum Shape { /// Paint nothing. This can be useful as a placeholder. Noop, /// Recursively nest more shapes - sometimes a convenience to be able to do. /// For performance reasons it is better to avoid it. Vec(Vec<Shape>), Circle { center: Pos2, radius: f32, fill: Color32, stroke: Stroke, }, LineSegment { points: [Pos2; 2], stroke: Stroke, }, Path { points: Vec<Pos2>, /// If true, connect the first and last of the points together. /// This is required if `fill != TRANSPARENT`. closed: bool, fill: Color32, stroke: Stroke, }, Rect { rect: Rect, /// How rounded the corners are. Use `0.0` for no rounding. corner_radius: f32, fill: Color32, stroke: Stroke, }, Text { /// Top left corner of the first character. pos: Pos2, /// The layed out text galley: Galley, text_style: TextStyle, // TODO: Font? color: Color32, /// If true, tilt the letters for an ugly italics effect fake_italics: bool, }, Mesh(Mesh), } /// ## Constructors impl Shape { pub fn line_segment(points: [Pos2; 2], stroke: impl Into<Stroke>) -> Self { Self::LineSegment { points, stroke: stroke.into(), } } pub fn line(points: Vec<Pos2>, stroke: impl Into<Stroke>) -> Self { Self::Path { points, closed: false, fill: Default::default(), stroke: stroke.into(), } } pub fn closed_line(points: Vec<Pos2>, stroke: impl Into<Stroke>) -> Self { Self::Path { points, closed: true, fill: Default::default(), stroke: stroke.into(), } } pub fn polygon(points: Vec<Pos2>, fill: impl Into<Color32>, stroke: impl Into<Stroke>) -> Self { Self::Path { points, closed: true, fill: fill.into(), stroke: stroke.into(), } } pub fn circle_filled(center: Pos2, radius: f32, fill_color: impl Into<Color32>) -> Self { Self::Circle { center, radius, fill: fill_color.into(), stroke: Default::default(), } } pub fn circle_stroke(center: Pos2, radius: f32, stroke: impl Into<Stroke>) -> Self { Self::Circle { center, radius, fill: Default::default(), stroke: stroke.into(), } } pub fn rect_filled(rect: Rect, corner_radius: f32, fill_color: impl Into<Color32>) -> Self { Self::Rect { rect, corner_radius, fill: fill_color.into(), stroke: Default::default(), } } pub fn rect_stroke(rect: Rect, corner_radius: f32, stroke: impl Into<Stroke>) -> Self { Self::Rect { rect, corner_radius, fill: Default::default(), stroke: stroke.into(), } } pub fn text( fonts: &Fonts, pos: Pos2, anchor: Align2, text: impl Into<String>, text_style: TextStyle, color: Color32, ) -> Self { let font = &fonts[text_style]; let galley = font.layout_multiline(text.into(), f32::INFINITY); let rect = anchor.anchor_rect(Rect::from_min_size(pos, galley.size)); Self::Text { pos: rect.min, galley, text_style, color, fake_italics: false, } } } /// ## Operations impl Shape { pub fn mesh(mesh: Mesh) -> Self { debug_assert!(mesh.is_valid()); Self::Mesh(mesh) } #[deprecated = "Renamed `mesh`"] pub fn triangles(mesh: Mesh) -> Self { Self::mesh(mesh) } pub fn texture_id(&self) -> super::TextureId { if let Shape::Mesh(mesh) = self { mesh.texture_id } else { super::TextureId::Egui } } /// Translate location by this much, in-place pub fn translate(&mut self, delta: Vec2) { match self { Shape::Noop => {} Shape::Vec(shapes) => { for shape in shapes { shape.translate(delta); } } Shape::Circle { center, .. } => { *center += delta; } Shape::LineSegment { points, .. } => { for p in points { *p += delta; } } Shape::Path { points, .. } => { for p in points { *p += delta; } } Shape::Rect { rect, .. } => { *rect = rect.translate(delta); } Shape::Text { pos, .. } => { *pos += delta; } Shape::Mesh(mesh) => { mesh.translate(delta); } } } }
26.524752
100
0.484323
236f5bb1bfdfb2871e0428f891b4f90f630ca3d6
3,387
//! Declare various LLVM values. //! //! Prefer using functions and methods from this module rather than calling LLVM //! functions directly. These functions do some additional work to ensure we do //! the right thing given the preconceptions of codegen. //! //! Some useful guidelines: //! //! * Use declare_* family of methods if you are declaring, but are not //! interested in defining the Value they return. //! * Use define_* family of methods when you might be defining the Value. //! * When in doubt, define. use crate::abi::{FnAbi, FnAbiLlvmExt}; use crate::attributes; use crate::context::CodegenCx; use crate::llvm; use crate::llvm::AttributePlace::Function; use crate::type_::Type; use crate::value::Value; use log::debug; use rustc::ty::Ty; use rustc_codegen_ssa::traits::*; /// Declare a function. /// /// If there’s a value with the same name already declared, the function will /// update the declaration and return existing Value instead. fn declare_raw_fn( cx: &CodegenCx<'ll, '_>, name: &str, callconv: llvm::CallConv, ty: &'ll Type, ) -> &'ll Value { debug!("declare_raw_fn(name={:?}, ty={:?})", name, ty); let llfn = unsafe { llvm::LLVMRustGetOrInsertFunction(cx.llmod, name.as_ptr().cast(), name.len(), ty) }; llvm::SetFunctionCallConv(llfn, callconv); // Function addresses in Rust are never significant, allowing functions to // be merged. llvm::SetUnnamedAddress(llfn, llvm::UnnamedAddr::Global); if cx.tcx.sess.opts.cg.no_redzone.unwrap_or(cx.tcx.sess.target.target.options.disable_redzone) { llvm::Attribute::NoRedZone.apply_llfn(Function, llfn); } attributes::default_optimisation_attrs(cx.tcx.sess, llfn); attributes::non_lazy_bind(cx.sess(), llfn); llfn } impl DeclareMethods<'tcx> for CodegenCx<'ll, 'tcx> { fn declare_global(&self, name: &str, ty: &'ll Type) -> &'ll Value { debug!("declare_global(name={:?})", name); unsafe { llvm::LLVMRustGetOrInsertGlobal(self.llmod, name.as_ptr().cast(), name.len(), ty) } } fn declare_cfn(&self, name: &str, fn_type: &'ll Type) -> &'ll Value { declare_raw_fn(self, name, llvm::CCallConv, fn_type) } fn declare_fn(&self, name: &str, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> &'ll Value { debug!("declare_rust_fn(name={:?}, fn_abi={:?})", name, fn_abi); let llfn = declare_raw_fn(self, name, fn_abi.llvm_cconv(), fn_abi.llvm_type(self)); fn_abi.apply_attrs_llfn(self, llfn); llfn } fn define_global(&self, name: &str, ty: &'ll Type) -> Option<&'ll Value> { if self.get_defined_value(name).is_some() { None } else { Some(self.declare_global(name, ty)) } } fn define_private_global(&self, ty: &'ll Type) -> &'ll Value { unsafe { llvm::LLVMRustInsertPrivateGlobal(self.llmod, ty) } } fn get_declared_value(&self, name: &str) -> Option<&'ll Value> { debug!("get_declared_value(name={:?})", name); unsafe { llvm::LLVMRustGetNamedValue(self.llmod, name.as_ptr().cast(), name.len()) } } fn get_defined_value(&self, name: &str) -> Option<&'ll Value> { self.get_declared_value(name).and_then(|val| { let declaration = unsafe { llvm::LLVMIsDeclaration(val) != 0 }; if !declaration { Some(val) } else { None } }) } }
35.28125
100
0.643933
294eee3e6c3733ed8bb1f22853b7d86687a238c7
15,384
use pest::iterators::Pair; use pest::Parser; use tera::{to_value, Context, Map, Value}; use regex::Regex; use context::RenderContext; use errors::{Result, ResultExt}; // This include forces recompiling this source file if the grammar file changes. // Uncomment it when doing changes to the .pest file const _GRAMMAR: &str = include_str!("content.pest"); #[derive(Parser)] #[grammar = "content.pest"] pub struct ContentParser; lazy_static! { static ref MULTIPLE_NEWLINE_RE: Regex = Regex::new(r"\n\s*\n").unwrap(); } fn replace_string_markers(input: &str) -> String { match input.chars().next().unwrap() { '"' => input.replace('"', "").to_string(), '\'' => input.replace('\'', "").to_string(), '`' => input.replace('`', "").to_string(), _ => unreachable!("How did you even get there"), } } fn parse_literal(pair: Pair<Rule>) -> Value { let mut val = None; for p in pair.into_inner() { match p.as_rule() { Rule::boolean => match p.as_str() { "true" => val = Some(Value::Bool(true)), "false" => val = Some(Value::Bool(false)), _ => unreachable!(), }, Rule::string => val = Some(Value::String(replace_string_markers(p.as_str()))), Rule::float => { val = Some(to_value(p.as_str().parse::<f64>().unwrap()).unwrap()); } Rule::int => { val = Some(to_value(p.as_str().parse::<i64>().unwrap()).unwrap()); } _ => unreachable!("Unknown literal: {:?}", p), }; } val.unwrap() } /// Returns (shortcode_name, kwargs) fn parse_shortcode_call(pair: Pair<Rule>) -> (String, Map<String, Value>) { let mut name = None; let mut args = Map::new(); for p in pair.into_inner() { match p.as_rule() { Rule::ident => { name = Some(p.into_span().as_str().to_string()); } Rule::kwarg => { let mut arg_name = None; let mut arg_val = None; for p2 in p.into_inner() { match p2.as_rule() { Rule::ident => { arg_name = Some(p2.into_span().as_str().to_string()); } Rule::literal => { arg_val = Some(parse_literal(p2)); } Rule::array => { let mut vals = vec![]; for p3 in p2.into_inner() { match p3.as_rule() { Rule::literal => vals.push(parse_literal(p3)), _ => unreachable!( "Got something other than literal in an array: {:?}", p3 ), } } arg_val = Some(Value::Array(vals)); } _ => unreachable!("Got something unexpected in a kwarg: {:?}", p2), } } args.insert(arg_name.unwrap(), arg_val.unwrap()); } _ => unreachable!("Got something unexpected in a shortcode: {:?}", p), } } (name.unwrap(), args) } fn render_shortcode( name: &str, args: &Map<String, Value>, context: &RenderContext, body: Option<&str>, ) -> Result<String> { let mut tera_context = Context::new(); for (key, value) in args.iter() { tera_context.insert(key, value); } if let Some(ref b) = body { // Trimming right to avoid most shortcodes with bodies ending up with a HTML new line tera_context.insert("body", b.trim_right()); } tera_context.extend(context.tera_context.clone()); let tpl_name = format!("shortcodes/{}.html", name); let res = context .tera .render(&tpl_name, &tera_context) .chain_err(|| format!("Failed to render {} shortcode", name))?; // Small hack to avoid having multiple blank lines because of Tera tags for example // A blank like will cause the markdown parser to think we're out of HTML and start looking // at indentation, making the output a code block. let res = MULTIPLE_NEWLINE_RE.replace_all(&res, "\n"); Ok(res.to_string()) } pub fn render_shortcodes(content: &str, context: &RenderContext) -> Result<String> { let mut res = String::with_capacity(content.len()); let mut pairs = match ContentParser::parse(Rule::page, content) { Ok(p) => p, Err(e) => { let fancy_e = e.renamed_rules(|rule| match *rule { Rule::int => "an integer".to_string(), Rule::float => "a float".to_string(), Rule::string => "a string".to_string(), Rule::literal => "a literal (int, float, string, bool)".to_string(), Rule::array => "an array".to_string(), Rule::kwarg => "a keyword argument".to_string(), Rule::ident => "an identifier".to_string(), Rule::inline_shortcode => "an inline shortcode".to_string(), Rule::ignored_inline_shortcode => "an ignored inline shortcode".to_string(), Rule::sc_body_start => "the start of a shortcode".to_string(), Rule::ignored_sc_body_start => "the start of an ignored shortcode".to_string(), Rule::text => "some text".to_string(), Rule::EOI => "end of input".to_string(), Rule::double_quoted_string => "double quoted string".to_string(), Rule::single_quoted_string => "single quoted string".to_string(), Rule::backquoted_quoted_string => "backquoted quoted string".to_string(), Rule::boolean => "a boolean (true, false)".to_string(), Rule::all_chars => "a alphanumerical character".to_string(), Rule::kwargs => "a list of keyword arguments".to_string(), Rule::sc_def => "a shortcode definition".to_string(), Rule::shortcode_with_body => "a shortcode with body".to_string(), Rule::ignored_shortcode_with_body => "an ignored shortcode with body".to_string(), Rule::sc_body_end => "{% end %}".to_string(), Rule::ignored_sc_body_end => "{%/* end */%}".to_string(), Rule::text_in_body_sc => "text in a shortcode body".to_string(), Rule::text_in_ignored_body_sc => "text in an ignored shortcode body".to_string(), Rule::content => "some content".to_string(), Rule::page => "a page".to_string(), Rule::WHITESPACE => "whitespace".to_string(), }); bail!("{}", fancy_e); } }; // We have at least a `page` pair for p in pairs.next().unwrap().into_inner() { match p.as_rule() { Rule::text => res.push_str(p.into_span().as_str()), Rule::inline_shortcode => { let (name, args) = parse_shortcode_call(p); res.push_str(&render_shortcode(&name, &args, context, None)?); } Rule::shortcode_with_body => { let mut inner = p.into_inner(); // 3 items in inner: call, body, end // we don't care about the closing tag let (name, args) = parse_shortcode_call(inner.next().unwrap()); let body = inner.next().unwrap().into_span().as_str(); res.push_str(&render_shortcode(&name, &args, context, Some(body))?); } Rule::ignored_inline_shortcode => { res.push_str( &p.into_span().as_str().replacen("{{/*", "{{", 1).replacen("*/}}", "}}", 1), ); } Rule::ignored_shortcode_with_body => { for p2 in p.into_inner() { match p2.as_rule() { Rule::ignored_sc_body_start | Rule::ignored_sc_body_end => { res.push_str( &p2.into_span() .as_str() .replacen("{%/*", "{%", 1) .replacen("*/%}", "%}", 1), ); } Rule::text_in_ignored_body_sc => res.push_str(p2.into_span().as_str()), _ => unreachable!("Got something weird in an ignored shortcode: {:?}", p2), } } } Rule::EOI => (), _ => unreachable!("unexpected page rule: {:?}", p.as_rule()), } } Ok(res) } #[cfg(test)] mod tests { use std::collections::HashMap; use super::*; use config::Config; use front_matter::InsertAnchor; use tera::Tera; macro_rules! assert_lex_rule { ($rule: expr, $input: expr) => { let res = ContentParser::parse($rule, $input); println!("{:?}", $input); println!("{:#?}", res); if res.is_err() { println!("{}", res.unwrap_err()); panic!(); } assert!(res.is_ok()); assert_eq!(res.unwrap().last().unwrap().into_span().end(), $input.len()); }; } fn render_shortcodes(code: &str, tera: &Tera) -> String { let config = Config::default(); let permalinks = HashMap::new(); let context = RenderContext::new(&tera, &config, "", &permalinks, InsertAnchor::None); super::render_shortcodes(code, &context).unwrap() } #[test] fn lex_text() { let inputs = vec!["Hello world", "HEllo \n world", "Hello 1 2 true false 'hey'"]; for i in inputs { assert_lex_rule!(Rule::text, i); } } #[test] fn lex_inline_shortcode() { let inputs = vec![ "{{ youtube() }}", "{{ youtube(id=1, autoplay=true, url='hey') }}", "{{ youtube(id=1, \nautoplay=true, url='hey') }}", ]; for i in inputs { assert_lex_rule!(Rule::inline_shortcode, i); } } #[test] fn lex_inline_ignored_shortcode() { let inputs = vec![ "{{/* youtube() */}}", "{{/* youtube(id=1, autoplay=true, url='hey') */}}", "{{/* youtube(id=1, \nautoplay=true, \nurl='hey') */}}", ]; for i in inputs { assert_lex_rule!(Rule::ignored_inline_shortcode, i); } } #[test] fn lex_shortcode_with_body() { let inputs = vec![ r#"{% youtube() %} Some text {% end %}"#, r#"{% youtube(id=1, autoplay=true, url='hey') %} Some text {% end %}"#, ]; for i in inputs { assert_lex_rule!(Rule::shortcode_with_body, i); } } #[test] fn lex_ignored_shortcode_with_body() { let inputs = vec![ r#"{%/* youtube() */%} Some text {%/* end */%}"#, r#"{%/* youtube(id=1, autoplay=true, url='hey') */%} Some text {%/* end */%}"#, ]; for i in inputs { assert_lex_rule!(Rule::ignored_shortcode_with_body, i); } } #[test] fn lex_page() { let inputs = vec![ "Some text and a shortcode `{{/* youtube() */}}`", "{{ youtube(id=1, autoplay=true, url='hey') }}", "{{ youtube(id=1, \nautoplay=true, url='hey') }} that's it", r#" This is a test {% hello() %} Body {{ var }} {% end %} "#, ]; for i in inputs { assert_lex_rule!(Rule::page, i); } } #[test] fn does_nothing_with_no_shortcodes() { let res = render_shortcodes("Hello World", &Tera::default()); assert_eq!(res, "Hello World"); } #[test] fn can_unignore_inline_shortcode() { let res = render_shortcodes("Hello World {{/* youtube() */}}", &Tera::default()); assert_eq!(res, "Hello World {{ youtube() }}"); } #[test] fn can_unignore_shortcode_with_body() { let res = render_shortcodes( r#" Hello World {%/* youtube() */%}Some body {{ hello() }}{%/* end */%}"#, &Tera::default(), ); assert_eq!(res, "\nHello World\n{% youtube() %}Some body {{ hello() }}{% end %}"); } // https://github.com/Keats/gutenberg/issues/383 #[test] fn unignore_shortcode_with_body_does_not_swallow_initial_whitespace() { let res = render_shortcodes( r#" Hello World {%/* youtube() */%} Some body {{ hello() }}{%/* end */%}"#, &Tera::default(), ); assert_eq!(res, "\nHello World\n{% youtube() %}\nSome body {{ hello() }}{% end %}"); } #[test] fn can_parse_shortcode_arguments() { let inputs = vec![ ("{{ youtube() }}", "youtube", Map::new()), ("{{ youtube(id=1, autoplay=true, hello='salut', float=1.2) }}", "youtube", { let mut m = Map::new(); m.insert("id".to_string(), to_value(1).unwrap()); m.insert("autoplay".to_string(), to_value(true).unwrap()); m.insert("hello".to_string(), to_value("salut").unwrap()); m.insert("float".to_string(), to_value(1.2).unwrap()); m }), ("{{ gallery(photos=['something', 'else'], fullscreen=true) }}", "gallery", { let mut m = Map::new(); m.insert("photos".to_string(), to_value(["something", "else"]).unwrap()); m.insert("fullscreen".to_string(), to_value(true).unwrap()); m }), ]; for (i, n, a) in inputs { let mut res = ContentParser::parse(Rule::inline_shortcode, i).unwrap(); let (name, args) = parse_shortcode_call(res.next().unwrap()); assert_eq!(name, n); assert_eq!(args, a); } } #[test] fn can_render_inline_shortcodes() { let mut tera = Tera::default(); tera.add_raw_template("shortcodes/youtube.html", "Hello {{id}}").unwrap(); let res = render_shortcodes("Inline {{ youtube(id=1) }}.", &tera); assert_eq!(res, "Inline Hello 1."); } #[test] fn can_render_shortcodes_with_body() { let mut tera = Tera::default(); tera.add_raw_template("shortcodes/youtube.html", "{{body}}").unwrap(); let res = render_shortcodes("Body\n {% youtube() %}Hey!{% end %}", &tera); assert_eq!(res, "Body\n Hey!"); } // https://github.com/Keats/gutenberg/issues/462 #[test] fn shortcodes_with_body_do_not_eat_newlines() { let mut tera = Tera::default(); tera.add_raw_template("shortcodes/youtube.html", "{{body | safe}}").unwrap(); let res = render_shortcodes("Body\n {% youtube() %}\nHello \n World{% end %}", &tera); assert_eq!(res, "Body\n Hello \n World"); } }
36.980769
99
0.49259
d6ec68d83674d88b19e18477f52a410c60223e9f
4,571
// Copyright (c) The Move Contributors // SPDX-License-Identifier: Apache-2.0 use std::fmt::{self, Display}; use anyhow::bail; use move_core_types::{ account_address::AccountAddress, identifier::{self, Identifier}, language_storage::{StructTag, TypeTag}, }; use crate::{address::ParsedAddress, parser::Token}; #[derive(Eq, PartialEq, Debug, Clone, Copy)] pub enum TypeToken { Whitespace, Ident, AddressIdent, ColonColon, Lt, Gt, Comma, } #[derive(Eq, PartialEq, Debug, Clone)] pub struct ParsedStructType { pub address: ParsedAddress, pub module: String, pub name: String, pub type_args: Vec<ParsedType>, } #[derive(Eq, PartialEq, Debug, Clone)] pub enum ParsedType { U8, U64, U128, Bool, Address, Signer, Vector(Box<ParsedType>), Struct(ParsedStructType), } impl Display for TypeToken { fn fmt<'f>(&self, formatter: &mut fmt::Formatter<'f>) -> Result<(), fmt::Error> { let s = match *self { TypeToken::Whitespace => "[whitespace]", TypeToken::Ident => "[identifier]", TypeToken::AddressIdent => "[address]", TypeToken::ColonColon => "::", TypeToken::Lt => "<", TypeToken::Gt => ">", TypeToken::Comma => ",", }; fmt::Display::fmt(s, formatter) } } impl Token for TypeToken { fn is_whitespace(&self) -> bool { matches!(self, Self::Whitespace) } fn next_token(s: &str) -> anyhow::Result<Option<(Self, usize)>> { let mut chars = s.chars().peekable(); let c = match chars.next() { None => return Ok(None), Some(c) => c, }; Ok(Some(match c { '<' => (Self::Lt, 1), '>' => (Self::Gt, 1), ',' => (Self::Comma, 1), ':' => match chars.next() { Some(':') => (Self::ColonColon, 2), _ => bail!("unrecognized token: {}", s), }, '0' if matches!(chars.peek(), Some('x') | Some('X')) => { chars.next().unwrap(); match chars.next() { Some(c) if c.is_ascii_hexdigit() => { // 0x + c + remaining let len = 3 + chars.take_while(char::is_ascii_hexdigit).count(); (Self::AddressIdent, len) } _ => bail!("unrecognized token: {}", s), } } c if c.is_ascii_digit() => { // c + remaining let len = 1 + chars.take_while(char::is_ascii_digit).count(); (Self::AddressIdent, len) } c if c.is_ascii_whitespace() => { // c + remaining let len = 1 + chars.take_while(char::is_ascii_whitespace).count(); (Self::Whitespace, len) } c if c.is_ascii_alphabetic() => { // c + remaining let len = 1 + chars .take_while(|c| identifier::is_valid_identifier_char(*c)) .count(); (Self::Ident, len) } _ => bail!("unrecognized token: {}", s), })) } } impl ParsedStructType { pub fn into_struct_tag( self, mapping: &impl Fn(&str) -> Option<AccountAddress>, ) -> anyhow::Result<StructTag> { let Self { address, module, name, type_args, } = self; Ok(StructTag { address: address.into_account_address(mapping)?, module: Identifier::new(module)?, name: Identifier::new(name)?, type_params: type_args .into_iter() .map(|t| t.into_type_tag(mapping)) .collect::<anyhow::Result<_>>()?, }) } } impl ParsedType { pub fn into_type_tag( self, mapping: &impl Fn(&str) -> Option<AccountAddress>, ) -> anyhow::Result<TypeTag> { Ok(match self { ParsedType::U8 => TypeTag::U8, ParsedType::U64 => TypeTag::U64, ParsedType::U128 => TypeTag::U128, ParsedType::Bool => TypeTag::Bool, ParsedType::Address => TypeTag::Address, ParsedType::Signer => TypeTag::Signer, ParsedType::Vector(inner) => TypeTag::Vector(Box::new(inner.into_type_tag(mapping)?)), ParsedType::Struct(s) => TypeTag::Struct(s.into_struct_tag(mapping)?), }) } }
29.681818
98
0.496609
acf9ff75f580e77e6eaf826984fa3bceee8ebfba
2,565
use std::collections::HashMap; use std::io; use async_trait::async_trait; use futures::TryFutureExt; use tokio_tungstenite::client_async_with_config; use tungstenite::protocol::WebSocketConfig; use url::Url; use crate::{proxy::*, session::Session}; use super::stream; pub struct Handler { pub path: String, pub headers: HashMap<String, String>, } struct Request<'a> { pub uri: &'a str, pub headers: &'a HashMap<String, String>, } impl<'a> tungstenite::client::IntoClientRequest for Request<'a> { fn into_client_request( self, ) -> tungstenite::error::Result<tungstenite::handshake::client::Request> { let mut builder = http::Request::builder() .method("GET") .uri(self.uri) .header("User-Agent", &*crate::option::HTTP_USER_AGENT); for (k, v) in self.headers.iter() { if k != "Host" { builder = builder.header(k, v); } } Ok(builder.body(())?) } } #[async_trait] impl TcpOutboundHandler for Handler { type Stream = AnyStream; fn connect_addr(&self) -> Option<OutboundConnect> { None } async fn handle<'a>( &'a self, sess: &'a Session, stream: Option<Self::Stream>, ) -> io::Result<Self::Stream> { if let Some(stream) = stream { let host = if let Some(host) = self.headers.get("Host") { host.to_owned() } else { sess.destination.host() }; let mut url = Url::parse(&format!("ws://{}", host)).unwrap(); url = url.join(self.path.as_str()).unwrap(); let req = Request { uri: &url.to_string(), headers: &self.headers, }; let ws_config = WebSocketConfig { max_send_queue: Some(4), max_message_size: Some(64 << 20), max_frame_size: Some(16 << 20), accept_unmasked_frames: false, }; let (socket, _) = client_async_with_config(req, stream, Some(ws_config)) .map_err(|e| { io::Error::new( io::ErrorKind::Other, format!("connect ws {} failed: {}", &url, e), ) }) .await?; let ws_stream = stream::WebSocketToStream::new(socket); Ok(Box::new(ws_stream)) } else { Err(io::Error::new(io::ErrorKind::Other, "invalid input")) } } }
29.482759
84
0.519688
018e5826a8c11f7ab7a2cde384e593566c6fc7bb
59
mod backend; pub use backend::{CairoBackend, CairoError};
14.75
44
0.762712
16bd02e44c225e230b7d3e4d6af1b5ce51404036
1,804
#![allow( dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals )] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct Rooted { pub ptr: MaybeWrapped<::std::os::raw::c_int>, } #[test] fn bindgen_test_layout_Rooted() { assert_eq!( ::std::mem::size_of::<Rooted>(), 4usize, concat!("Size of: ", stringify!(Rooted)) ); assert_eq!( ::std::mem::align_of::<Rooted>(), 4usize, concat!("Alignment of ", stringify!(Rooted)) ); assert_eq!( unsafe { let uninit = ::std::mem::MaybeUninit::<Rooted>::uninit(); let ptr = uninit.as_ptr(); ::std::ptr::addr_of!((*ptr).ptr) as usize - ptr as usize }, 0usize, concat!( "Offset of field: ", stringify!(Rooted), "::", stringify!(ptr) ) ); } impl Default for Rooted { fn default() -> Self { let mut s = ::std::mem::MaybeUninit::<Self>::uninit(); unsafe { ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); s.assume_init() } } } /// <div rustbindgen replaces="MaybeWrapped"></div> pub type MaybeWrapped<a> = a; #[test] fn __bindgen_test_layout_MaybeWrapped_open0_int_close0_instantiation() { assert_eq!( ::std::mem::size_of::<MaybeWrapped<::std::os::raw::c_int>>(), 4usize, concat!( "Size of template specialization: ", stringify!(MaybeWrapped<::std::os::raw::c_int>) ) ); assert_eq!( ::std::mem::align_of::<MaybeWrapped<::std::os::raw::c_int>>(), 4usize, concat!( "Alignment of template specialization: ", stringify!(MaybeWrapped<::std::os::raw::c_int>) ) ); }
25.771429
72
0.527716
ef65f1871dbb06e968018fad171e125a1c866f02
9,748
use std::{collections::HashMap, mem}; use log::debug; use nickel_lang::{ identifier::Ident, term::{MetaValue, RichTerm, Term}, typecheck::{ linearization::{LinearizationState, Scope}, TypeWrapper, }, types::AbsType, }; use crate::linearization::interface::{TermKind, UsageState}; use super::{ interface::{Unresolved, ValueState}, Environment, IdGen, LinearizationItem, }; /// A concrete [LinearizationState] /// Holds any inner datatype that can be used as stable resource /// while recording terms. #[derive(Default)] pub struct Building { pub linearization: Vec<LinearizationItem<Unresolved>>, pub scope: HashMap<Scope, Vec<ID>>, } pub type ID = usize; impl Building { pub(super) fn push(&mut self, item: LinearizationItem<Unresolved>) { self.scope .remove(&item.scope) .map(|mut s| { s.push(item.id); s }) .or_else(|| Some(vec![item.id])) .into_iter() .for_each(|l| { self.scope.insert(item.scope.clone(), l); }); self.linearization.push(item); } pub(super) fn add_usage(&mut self, decl: usize, usage: usize) { match self .linearization .get_mut(decl) .expect("Could not find parent") .kind { TermKind::Structure => unreachable!(), TermKind::Usage(_) => unreachable!(), TermKind::Record(_) => unreachable!(), TermKind::Declaration(_, ref mut usages, _) | TermKind::RecordField { ref mut usages, .. } => usages.push(usage), }; } pub(super) fn inform_declaration(&mut self, declaration: ID, value: ID) { match self.linearization.get_mut(declaration) { Some(LinearizationItem { kind: TermKind::Declaration(_, _, value_state), .. }) => *value_state = ValueState::Known(value), _ => (), } } pub(super) fn register_fields( &mut self, record_fields: &HashMap<Ident, RichTerm>, record: usize, scope: Scope, env: &mut Environment, ) { for (ident, value) in record_fields.iter() { let id = self.id_gen().get_and_advance(); self.push(LinearizationItem { id, pos: ident.pos.unwrap(), // temporary, the actual type is resolved later and the item retyped ty: TypeWrapper::Concrete(AbsType::Dyn()), kind: TermKind::RecordField { record, ident: ident.clone(), usages: Vec::new(), value: ValueState::Unknown, }, scope: scope.clone(), meta: match value.term.as_ref() { Term::MetaValue(meta @ MetaValue { .. }) => Some(MetaValue { value: None, ..meta.clone() }), _ => None, }, }); env.insert(ident.clone(), id); self.add_record_field(record, (ident.clone(), id)) } } pub(super) fn add_record_field( &mut self, record: usize, (field_ident, reference_id): (Ident, usize), ) { match self .linearization .get_mut(record) .expect("Could not find record") .kind { TermKind::Record(ref mut fields) => { fields.insert(field_ident, reference_id); } _ => unreachable!(), } } pub(super) fn resolve_reference<'a>( &'a self, item: &'a LinearizationItem<TypeWrapper>, ) -> Option<&'a LinearizationItem<TypeWrapper>> { // if item is a usage, resolve the usage first match item.kind { TermKind::Usage(UsageState::Resolved(pointed)) => self.linearization.get(pointed), _ => Some(item), } // load referenced value, either from record field or declaration .and_then(|item_pointer| { match &item_pointer.kind { // if declaration is a record field, resolve its value TermKind::RecordField { value, .. } => { debug!("parent referenced a record field {:?}", value); value // retrieve record .as_option() .and_then(|value_index| self.linearization.get(value_index)) } // if declaration is a let binding resolve its value TermKind::Declaration(_, _, ValueState::Known(value)) => { self.linearization.get(*value) } // if something else was referenced, stop. _ => Some(item_pointer), } }) } pub(super) fn resolve_record_references(&mut self, mut defers: Vec<(usize, usize, Ident)>) { let mut unresolved: Vec<(usize, usize, Ident)> = Vec::new(); while let Some(deferred) = defers.pop() { // child_item: current deferred usage item // i.e.: root.<child> // parent_accessor_id: id of the parent usage // i.e.: <parent>.child // child_ident: identifier the child item references let (child_item, parent_accessor_id, child_ident) = &deferred; // resolve the value referenced by the parent accessor element // get the parent accessor, and read its resolved reference let parent_referenced = self.linearization.get(*parent_accessor_id); if let Some(LinearizationItem { kind: TermKind::Usage(UsageState::Deferred { .. }), .. }) = parent_referenced { debug!("parent references deferred usage"); unresolved.push(deferred.clone()); continue; } // load the parent referenced declaration (i.e.: a declaration or record field term) let parent_declaration = parent_referenced .and_then(|parent_usage_value| self.resolve_reference(parent_usage_value)); if let Some(LinearizationItem { kind: TermKind::Usage(UsageState::Deferred { .. }), .. }) = parent_declaration { debug!("parent references deferred usage"); unresolved.push(deferred.clone()); continue; } let referenced_declaration = parent_declaration // resolve indirection by following the usage .and_then(|parent_declaration| self.resolve_reference(parent_declaration)) // get record field .and_then(|parent_declaration| match &parent_declaration.kind { TermKind::Record(fields) => { fields.get(child_ident).and_then(|child_declaration_id| { self.linearization.get(*child_declaration_id) }) } _ => None, }); let referenced_declaration = referenced_declaration.and_then(|referenced| match &referenced.kind { TermKind::Usage(UsageState::Resolved(pointed)) => { self.linearization.get(*pointed) } TermKind::RecordField { value, .. } => value // retrieve record .as_option() .and_then(|value_index| self.linearization.get(value_index)) // retrieve field .and_then(|record| match &record.kind { TermKind::Record(fields) => { debug!( "parent referenced a nested record indirectly`: {:?}", fields ); fields .get(child_ident) .and_then(|accessor_id| self.linearization.get(*accessor_id)) } TermKind::Usage(UsageState::Resolved(pointed)) => { self.linearization.get(*pointed) } _ => None, }) .or(Some(referenced)), _ => Some(referenced), }); let referenced_id = referenced_declaration.map(|reference| reference.id); debug!( "Associating child {} to value {:?}", child_ident, referenced_declaration ); { let child: &mut LinearizationItem<TypeWrapper> = self.linearization.get_mut(*child_item).unwrap(); child.kind = TermKind::Usage(UsageState::from(referenced_id)); } if let Some(referenced_id) = referenced_id { self.add_usage(referenced_id, *child_item); } if defers.is_empty() && !unresolved.is_empty() { debug!("unresolved references: {:?}", unresolved); defers = mem::take(&mut unresolved); } } } pub(super) fn id_gen(&self) -> IdGen { IdGen::new(self.linearization.len()) } } impl LinearizationState for Building {}
36.509363
97
0.496615
01afd92abcae6752b3e252a2e296589334db8d2a
233
// macros2.rs // Make me compile! Execute `rustlings hint macros2` for hints :) // I AM NOT DONE fn main() { my_macro!(); } #[macro_export] macro_rules! my_macro { () => { println!("Check out my macro!"); }; }
14.5625
65
0.575107
eb8ced0813e4c27ea147de3ce4da5bb651a55c6f
7,087
//! Internal traits that define the Isahc configuration system. use super::{proxy::Proxy, *}; use curl::easy::Easy2; /// Base trait for any object that can be configured for requests, such as an /// HTTP request builder or an HTTP client. #[doc(hidden)] pub trait WithRequestConfig: Sized { /// Invoke a function to mutate the request configuration for this object. fn with_config(self, f: impl FnOnce(&mut RequestConfig)) -> Self; } /// A helper trait for applying a configuration value to a given curl handle. pub(crate) trait SetOpt { /// Apply this configuration property to the given curl handle. fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error>; } // Define this struct inside a macro to reduce some boilerplate. macro_rules! define_request_config { ($($field:ident: $t:ty,)*) => { /// Configuration for an HTTP request. /// /// This struct is not exposed directly, but rather is interacted with /// via the [`Configurable`] trait. #[derive(Clone, Debug, Default)] pub struct RequestConfig { $( pub(crate) $field: $t, )* } impl RequestConfig { pub(crate) fn client_defaults() -> Self { Self { // Always start out with latest compatible HTTP version. version_negotiation: Some(VersionNegotiation::default()), // Enable automatic decompression by default for convenience // (and maintain backwards compatibility). automatic_decompression: Some(true), // Erase curl's default auth method of Basic. authentication: Some(Authentication::default()), ..Default::default() } } /// Merge another request configuration into this one. Unspecified /// values in this config are replaced with values in the given /// config. pub(crate) fn merge(&mut self, defaults: &Self) { $( if self.$field.is_none() { if let Some(value) = defaults.$field.as_ref() { self.$field = Some(value.clone()); } } )* } } }; } define_request_config! { // Used by curl timeout: Option<Duration>, connect_timeout: Option<Duration>, expect_100_timeout: Option<Duration>, version_negotiation: Option<VersionNegotiation>, automatic_decompression: Option<bool>, authentication: Option<Authentication>, credentials: Option<Credentials>, tcp_keepalive: Option<Duration>, tcp_nodelay: Option<bool>, interface: Option<NetworkInterface>, ip_version: Option<IpVersion>, dial: Option<Dialer>, proxy: Option<Option<http::Uri>>, proxy_blacklist: Option<proxy::Blacklist>, proxy_authentication: Option<Proxy<Authentication>>, proxy_credentials: Option<Proxy<Credentials>>, max_upload_speed: Option<u64>, max_download_speed: Option<u64>, ssl_client_certificate: Option<ClientCertificate>, ssl_ca_certificate: Option<CaCertificate>, ssl_ciphers: Option<ssl::Ciphers>, ssl_options: Option<SslOption>, enable_metrics: Option<bool>, // Used by interceptors redirect_policy: Option<RedirectPolicy>, auto_referer: Option<bool>, title_case_headers: Option<bool>, } impl SetOpt for RequestConfig { fn set_opt<H>(&self, easy: &mut Easy2<H>) -> Result<(), curl::Error> { if let Some(timeout) = self.timeout { easy.timeout(timeout)?; } if let Some(timeout) = self.connect_timeout { easy.connect_timeout(timeout)?; } if let Some(timeout) = self.expect_100_timeout { easy.expect_100_timeout(timeout)?; } if let Some(negotiation) = self.version_negotiation.as_ref() { negotiation.set_opt(easy)?; } #[allow(unsafe_code)] { if let Some(enable) = self.automatic_decompression { if enable { // Enable automatic decompression, and also populate the // Accept-Encoding header with all supported encodings if not // explicitly set. easy.accept_encoding("")?; } else { // Use raw FFI because safe wrapper doesn't let us set to null. unsafe { match curl_sys::curl_easy_setopt( easy.raw(), curl_sys::CURLOPT_ACCEPT_ENCODING, 0, ) { curl_sys::CURLE_OK => {} code => return Err(curl::Error::new(code)), } } } } } if let Some(auth) = self.authentication.as_ref() { auth.set_opt(easy)?; } if let Some(credentials) = self.credentials.as_ref() { credentials.set_opt(easy)?; } if let Some(interval) = self.tcp_keepalive { easy.tcp_keepalive(true)?; easy.tcp_keepintvl(interval)?; } if let Some(enable) = self.tcp_nodelay { easy.tcp_nodelay(enable)?; } if let Some(interface) = self.interface.as_ref() { interface.set_opt(easy)?; } if let Some(version) = self.ip_version.as_ref() { version.set_opt(easy)?; } if let Some(dialer) = self.dial.as_ref() { dialer.set_opt(easy)?; } if let Some(proxy) = self.proxy.as_ref() { match proxy { Some(uri) => easy.proxy(&format!("{}", uri))?, None => easy.proxy("")?, } } if let Some(blacklist) = self.proxy_blacklist.as_ref() { blacklist.set_opt(easy)?; } if let Some(auth) = self.proxy_authentication.as_ref() { auth.set_opt(easy)?; } if let Some(credentials) = self.proxy_credentials.as_ref() { credentials.set_opt(easy)?; } if let Some(max) = self.max_upload_speed { easy.max_send_speed(max)?; } if let Some(max) = self.max_download_speed { easy.max_recv_speed(max)?; } if let Some(cert) = self.ssl_client_certificate.as_ref() { cert.set_opt(easy)?; } if let Some(cert) = self.ssl_ca_certificate.as_ref() { cert.set_opt(easy)?; } if let Some(ciphers) = self.ssl_ciphers.as_ref() { ciphers.set_opt(easy)?; } if let Some(options) = self.ssl_options.as_ref() { options.set_opt(easy)?; } if let Some(enable) = self.enable_metrics { easy.progress(enable)?; } Ok(()) } }
32.658986
83
0.547199
87f867d7f90acdfe3347e00070711fa3760ad4fe
32,744
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use middle::const_eval; use middle::def; use middle::infer; use middle::pat_util::{PatIdMap, pat_id_map, pat_is_binding}; use middle::pat_util::pat_is_resolved_const; use middle::privacy::{AllPublic, LastMod}; use middle::subst::Substs; use middle::ty::{self, Ty, HasTypeFlags}; use check::{check_expr, check_expr_has_type, check_expr_with_expectation}; use check::{check_expr_coercable_to_type, demand, FnCtxt, Expectation}; use check::{check_expr_with_lvalue_pref, LvaluePreference}; use check::{instantiate_path, resolve_ty_and_def_ufcs, structurally_resolved_type}; use require_same_types; use util::nodemap::FnvHashMap; use std::cmp::{self, Ordering}; use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::ast_util; use syntax::codemap::{Span, Spanned}; use syntax::parse::token; use syntax::print::pprust; use syntax::ptr::P; pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &'tcx ast::Pat, expected: Ty<'tcx>) { let fcx = pcx.fcx; let tcx = pcx.fcx.ccx.tcx; debug!("check_pat(pat={:?},expected={:?})", pat, expected); match pat.node { ast::PatWild(_) => { fcx.write_ty(pat.id, expected); } ast::PatLit(ref lt) => { check_expr(fcx, &**lt); let expr_ty = fcx.expr_ty(&**lt); // Byte string patterns behave the same way as array patterns // They can denote both statically and dynamically sized byte arrays let mut pat_ty = expr_ty; if let ast::ExprLit(ref lt) = lt.node { if let ast::LitBinary(_) = lt.node { let expected_ty = structurally_resolved_type(fcx, pat.span, expected); if let ty::TyRef(_, mt) = expected_ty.sty { if let ty::TySlice(_) = mt.ty.sty { pat_ty = tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), tcx.mk_slice(tcx.types.u8)) } } } } fcx.write_ty(pat.id, pat_ty); // somewhat surprising: in this case, the subtyping // relation goes the opposite way as the other // cases. Actually what we really want is not a subtyping // relation at all but rather that there exists a LUB (so // that they can be compared). However, in practice, // constants are always scalars or strings. For scalars // subtyping is irrelevant, and for strings `expr_ty` is // type is `&'static str`, so if we say that // // &'static str <: expected // // that's equivalent to there existing a LUB. demand::suptype(fcx, pat.span, expected, pat_ty); } ast::PatRange(ref begin, ref end) => { check_expr(fcx, begin); check_expr(fcx, end); let lhs_ty = fcx.expr_ty(begin); let rhs_ty = fcx.expr_ty(end); // Check that both end-points are of numeric or char type. let numeric_or_char = |ty: Ty| ty.is_numeric() || ty.is_char(); let lhs_compat = numeric_or_char(lhs_ty); let rhs_compat = numeric_or_char(rhs_ty); if !lhs_compat || !rhs_compat { let span = if !lhs_compat && !rhs_compat { pat.span } else if !lhs_compat { begin.span } else { end.span }; // Note: spacing here is intentional, we want a space before "start" and "end". span_err!(tcx.sess, span, E0029, "only char and numeric types are allowed in range patterns\n \ start type: {}\n end type: {}", fcx.infcx().ty_to_string(lhs_ty), fcx.infcx().ty_to_string(rhs_ty) ); return; } // Check that the types of the end-points can be unified. let types_unify = require_same_types( tcx, Some(fcx.infcx()), false, pat.span, rhs_ty, lhs_ty, || "mismatched types in range".to_string() ); // It's ok to return without a message as `require_same_types` prints an error. if !types_unify { return; } // Now that we know the types can be unified we find the unified type and use // it to type the entire expression. let common_type = fcx.infcx().resolve_type_vars_if_possible(&lhs_ty); fcx.write_ty(pat.id, common_type); // Finally we evaluate the constants and check that the range is non-empty. let get_substs = |id| fcx.item_substs()[&id].substs.clone(); match const_eval::compare_lit_exprs(tcx, begin, end, Some(&common_type), get_substs) { Some(Ordering::Less) | Some(Ordering::Equal) => {} Some(Ordering::Greater) => { span_err!(tcx.sess, begin.span, E0030, "lower range bound must be less than or equal to upper"); } None => tcx.sess.span_bug(begin.span, "literals of different types in range pat") } // subtyping doesn't matter here, as the value is some kind of scalar demand::eqtype(fcx, pat.span, expected, lhs_ty); } ast::PatEnum(..) | ast::PatIdent(..) if pat_is_resolved_const(&tcx.def_map, pat) => { let const_did = tcx.def_map.borrow().get(&pat.id).unwrap().def_id(); let const_scheme = tcx.lookup_item_type(const_did); assert!(const_scheme.generics.is_empty()); let const_ty = pcx.fcx.instantiate_type_scheme(pat.span, &Substs::empty(), &const_scheme.ty); fcx.write_ty(pat.id, const_ty); // FIXME(#20489) -- we should limit the types here to scalars or something! // As with PatLit, what we really want here is that there // exist a LUB, but for the cases that can occur, subtype // is good enough. demand::suptype(fcx, pat.span, expected, const_ty); } ast::PatIdent(bm, ref path, ref sub) if pat_is_binding(&tcx.def_map, pat) => { let typ = fcx.local_ty(pat.span, pat.id); match bm { ast::BindByRef(mutbl) => { // if the binding is like // ref x | ref const x | ref mut x // then `x` is assigned a value of type `&M T` where M is the mutability // and T is the expected type. let region_var = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); let mt = ty::mt { ty: expected, mutbl: mutbl }; let region_ty = tcx.mk_ref(tcx.mk_region(region_var), mt); // `x` is assigned a value of type `&M T`, hence `&M T <: typeof(x)` is // required. However, we use equality, which is stronger. See (*) for // an explanation. demand::eqtype(fcx, pat.span, region_ty, typ); } // otherwise the type of x is the expected type T ast::BindByValue(_) => { // As above, `T <: typeof(x)` is required but we // use equality, see (*) below. demand::eqtype(fcx, pat.span, expected, typ); } } fcx.write_ty(pat.id, typ); // if there are multiple arms, make sure they all agree on // what the type of the binding `x` ought to be let canon_id = *pcx.map.get(&path.node).unwrap(); if canon_id != pat.id { let ct = fcx.local_ty(pat.span, canon_id); demand::eqtype(fcx, pat.span, ct, typ); } if let Some(ref p) = *sub { check_pat(pcx, &**p, expected); } } ast::PatIdent(_, ref path, _) => { let path = ast_util::ident_to_path(path.span, path.node); check_pat_enum(pcx, pat, &path, Some(&[]), expected); } ast::PatEnum(ref path, ref subpats) => { let subpats = subpats.as_ref().map(|v| &v[..]); check_pat_enum(pcx, pat, path, subpats, expected); } ast::PatQPath(ref qself, ref path) => { let self_ty = fcx.to_ty(&qself.ty); let path_res = if let Some(&d) = tcx.def_map.borrow().get(&pat.id) { d } else if qself.position == 0 { def::PathResolution { // This is just a sentinel for finish_resolving_def_to_ty. base_def: def::DefMod(ast_util::local_def(ast::CRATE_NODE_ID)), last_private: LastMod(AllPublic), depth: path.segments.len() } } else { tcx.sess.span_bug(pat.span, &format!("unbound path {:?}", pat)) }; if let Some((opt_ty, segments, def)) = resolve_ty_and_def_ufcs(fcx, path_res, Some(self_ty), path, pat.span, pat.id) { if check_assoc_item_is_const(pcx, def, pat.span) { let scheme = tcx.lookup_item_type(def.def_id()); let predicates = tcx.lookup_predicates(def.def_id()); instantiate_path(fcx, segments, scheme, &predicates, opt_ty, def, pat.span, pat.id); let const_ty = fcx.node_ty(pat.id); demand::suptype(fcx, pat.span, expected, const_ty); } else { fcx.write_error(pat.id) } } } ast::PatStruct(ref path, ref fields, etc) => { check_pat_struct(pcx, pat, path, fields, etc, expected); } ast::PatTup(ref elements) => { let element_tys: Vec<_> = (0..elements.len()).map(|_| fcx.infcx().next_ty_var()) .collect(); let pat_ty = tcx.mk_tup(element_tys.clone()); fcx.write_ty(pat.id, pat_ty); demand::eqtype(fcx, pat.span, expected, pat_ty); for (element_pat, element_ty) in elements.iter().zip(element_tys) { check_pat(pcx, &**element_pat, element_ty); } } ast::PatBox(ref inner) => { let inner_ty = fcx.infcx().next_ty_var(); let uniq_ty = tcx.mk_box(inner_ty); if check_dereferencable(pcx, pat.span, expected, &**inner) { // Here, `demand::subtype` is good enough, but I don't // think any errors can be introduced by using // `demand::eqtype`. demand::eqtype(fcx, pat.span, expected, uniq_ty); fcx.write_ty(pat.id, uniq_ty); check_pat(pcx, &**inner, inner_ty); } else { fcx.write_error(pat.id); check_pat(pcx, &**inner, tcx.types.err); } } ast::PatRegion(ref inner, mutbl) => { let inner_ty = fcx.infcx().next_ty_var(); let mt = ty::mt { ty: inner_ty, mutbl: mutbl }; let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); let rptr_ty = tcx.mk_ref(tcx.mk_region(region), mt); if check_dereferencable(pcx, pat.span, expected, &**inner) { // `demand::subtype` would be good enough, but using // `eqtype` turns out to be equally general. See (*) // below for details. demand::eqtype(fcx, pat.span, expected, rptr_ty); fcx.write_ty(pat.id, rptr_ty); check_pat(pcx, &**inner, inner_ty); } else { fcx.write_error(pat.id); check_pat(pcx, &**inner, tcx.types.err); } } ast::PatVec(ref before, ref slice, ref after) => { let expected_ty = structurally_resolved_type(fcx, pat.span, expected); let inner_ty = fcx.infcx().next_ty_var(); let pat_ty = match expected_ty.sty { ty::TyArray(_, size) => tcx.mk_array(inner_ty, { let min_len = before.len() + after.len(); match *slice { Some(_) => cmp::max(min_len, size), None => min_len } }), _ => { let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); tcx.mk_ref(tcx.mk_region(region), ty::mt { ty: tcx.mk_slice(inner_ty), mutbl: expected_ty.builtin_deref(true).map(|mt| mt.mutbl) .unwrap_or(ast::MutImmutable) }) } }; fcx.write_ty(pat.id, pat_ty); // `demand::subtype` would be good enough, but using // `eqtype` turns out to be equally general. See (*) // below for details. demand::eqtype(fcx, pat.span, expected, pat_ty); for elt in before { check_pat(pcx, &**elt, inner_ty); } if let Some(ref slice) = *slice { let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); let mutbl = expected_ty.builtin_deref(true) .map_or(ast::MutImmutable, |mt| mt.mutbl); let slice_ty = tcx.mk_ref(tcx.mk_region(region), ty::mt { ty: tcx.mk_slice(inner_ty), mutbl: mutbl }); check_pat(pcx, &**slice, slice_ty); } for elt in after { check_pat(pcx, &**elt, inner_ty); } } ast::PatMac(_) => tcx.sess.bug("unexpanded macro") } // (*) In most of the cases above (literals and constants being // the exception), we relate types using strict equality, evewn // though subtyping would be sufficient. There are a few reasons // for this, some of which are fairly subtle and which cost me // (nmatsakis) an hour or two debugging to remember, so I thought // I'd write them down this time. // // 1. There is no loss of expressiveness here, though it does // cause some inconvenience. What we are saying is that the type // of `x` becomes *exactly* what is expected. This can cause unnecessary // errors in some cases, such as this one: // it will cause errors in a case like this: // // ``` // fn foo<'x>(x: &'x int) { // let a = 1; // let mut z = x; // z = &a; // } // ``` // // The reason we might get an error is that `z` might be // assigned a type like `&'x int`, and then we would have // a problem when we try to assign `&a` to `z`, because // the lifetime of `&a` (i.e., the enclosing block) is // shorter than `'x`. // // HOWEVER, this code works fine. The reason is that the // expected type here is whatever type the user wrote, not // the initializer's type. In this case the user wrote // nothing, so we are going to create a type variable `Z`. // Then we will assign the type of the initializer (`&'x // int`) as a subtype of `Z`: `&'x int <: Z`. And hence we // will instantiate `Z` as a type `&'0 int` where `'0` is // a fresh region variable, with the constraint that `'x : // '0`. So basically we're all set. // // Note that there are two tests to check that this remains true // (`regions-reassign-{match,let}-bound-pointer.rs`). // // 2. Things go horribly wrong if we use subtype. The reason for // THIS is a fairly subtle case involving bound regions. See the // `givens` field in `region_inference`, as well as the test // `regions-relate-bound-regions-on-closures-to-inference-variables.rs`, // for details. Short version is that we must sometimes detect // relationships between specific region variables and regions // bound in a closure signature, and that detection gets thrown // off when we substitute fresh region variables here to enable // subtyping. } fn check_assoc_item_is_const(pcx: &pat_ctxt, def: def::Def, span: Span) -> bool { match def { def::DefAssociatedConst(..) => true, def::DefMethod(..) => { span_err!(pcx.fcx.ccx.tcx.sess, span, E0327, "associated items in match patterns must be constants"); false } _ => { pcx.fcx.ccx.tcx.sess.span_bug(span, "non-associated item in check_assoc_item_is_const"); } } } pub fn check_dereferencable<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, span: Span, expected: Ty<'tcx>, inner: &ast::Pat) -> bool { let fcx = pcx.fcx; let tcx = pcx.fcx.ccx.tcx; if pat_is_binding(&tcx.def_map, inner) { let expected = fcx.infcx().shallow_resolve(expected); expected.builtin_deref(true).map_or(true, |mt| match mt.ty.sty { ty::TyTrait(_) => { // This is "x = SomeTrait" being reduced from // "let &x = &SomeTrait" or "let box x = Box<SomeTrait>", an error. span_err!(tcx.sess, span, E0033, "type `{}` cannot be dereferenced", fcx.infcx().ty_to_string(expected)); false } _ => true }) } else { true } } pub fn check_match<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx ast::Expr, discrim: &'tcx ast::Expr, arms: &'tcx [ast::Arm], expected: Expectation<'tcx>, match_src: ast::MatchSource) { let tcx = fcx.ccx.tcx; // Not entirely obvious: if matches may create ref bindings, we // want to use the *precise* type of the discriminant, *not* some // supertype, as the "discriminant type" (issue #23116). let contains_ref_bindings = arms.iter() .filter_map(|a| tcx.arm_contains_ref_binding(a)) .max_by(|m| match *m { ast::MutMutable => 1, ast::MutImmutable => 0, }); let discrim_ty; if let Some(m) = contains_ref_bindings { check_expr_with_lvalue_pref(fcx, discrim, LvaluePreference::from_mutbl(m)); discrim_ty = fcx.expr_ty(discrim); } else { // ...but otherwise we want to use any supertype of the // discriminant. This is sort of a workaround, see note (*) in // `check_pat` for some details. discrim_ty = fcx.infcx().next_ty_var(); check_expr_has_type(fcx, discrim, discrim_ty); }; // Typecheck the patterns first, so that we get types for all the // bindings. for arm in arms { let mut pcx = pat_ctxt { fcx: fcx, map: pat_id_map(&tcx.def_map, &*arm.pats[0]), }; for p in &arm.pats { check_pat(&mut pcx, &**p, discrim_ty); } } // Now typecheck the blocks. // // The result of the match is the common supertype of all the // arms. Start out the value as bottom, since it's the, well, // bottom the type lattice, and we'll be moving up the lattice as // we process each arm. (Note that any match with 0 arms is matching // on any empty type and is therefore unreachable; should the flow // of execution reach it, we will panic, so bottom is an appropriate // type in that case) let expected = expected.adjust_for_branches(fcx); let result_ty = arms.iter().fold(fcx.infcx().next_diverging_ty_var(), |result_ty, arm| { let bty = match expected { // We don't coerce to `()` so that if the match expression is a // statement it's branches can have any consistent type. That allows // us to give better error messages (pointing to a usually better // arm for inconsistent arms or to the whole match when a `()` type // is required). Expectation::ExpectHasType(ety) if ety != fcx.tcx().mk_nil() => { check_expr_coercable_to_type(fcx, &*arm.body, ety); ety } _ => { check_expr_with_expectation(fcx, &*arm.body, expected); fcx.node_ty(arm.body.id) } }; if let Some(ref e) = arm.guard { check_expr_has_type(fcx, &**e, tcx.types.bool); } if result_ty.references_error() || bty.references_error() { tcx.types.err } else { let (origin, expected, found) = match match_src { /* if-let construct without an else block */ ast::MatchSource::IfLetDesugar { contains_else_clause } if !contains_else_clause => ( infer::IfExpressionWithNoElse(expr.span), bty, result_ty, ), _ => ( infer::MatchExpressionArm(expr.span, arm.body.span), result_ty, bty, ), }; infer::common_supertype( fcx.infcx(), origin, true, expected, found, ) } }); fcx.write_ty(expr.id, result_ty); } pub struct pat_ctxt<'a, 'tcx: 'a> { pub fcx: &'a FnCtxt<'a, 'tcx>, pub map: PatIdMap, } pub fn check_pat_struct<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &'tcx ast::Pat, path: &ast::Path, fields: &'tcx [Spanned<ast::FieldPat>], etc: bool, expected: Ty<'tcx>) { let fcx = pcx.fcx; let tcx = pcx.fcx.ccx.tcx; let def = tcx.def_map.borrow().get(&pat.id).unwrap().full_def(); let (enum_def_id, variant_def_id) = match def { def::DefTrait(_) => { let name = pprust::path_to_string(path); span_err!(tcx.sess, pat.span, E0168, "use of trait `{}` in a struct pattern", name); fcx.write_error(pat.id); for field in fields { check_pat(pcx, &*field.node.pat, tcx.types.err); } return; }, _ => { let def_type = tcx.lookup_item_type(def.def_id()); match def_type.ty.sty { ty::TyStruct(struct_def_id, _) => (struct_def_id, struct_def_id), ty::TyEnum(enum_def_id, _) if def == def::DefVariant(enum_def_id, def.def_id(), true) => (enum_def_id, def.def_id()), _ => { let name = pprust::path_to_string(path); span_err!(tcx.sess, pat.span, E0163, "`{}` does not name a struct or a struct variant", name); fcx.write_error(pat.id); for field in fields { check_pat(pcx, &*field.node.pat, tcx.types.err); } return; } } } }; instantiate_path(pcx.fcx, &path.segments, tcx.lookup_item_type(enum_def_id), &tcx.lookup_predicates(enum_def_id), None, def, pat.span, pat.id); let pat_ty = fcx.node_ty(pat.id); demand::eqtype(fcx, pat.span, expected, pat_ty); let item_substs = fcx .item_substs() .get(&pat.id) .map(|substs| substs.substs.clone()) .unwrap_or_else(|| Substs::empty()); let struct_fields = tcx.struct_fields(variant_def_id, &item_substs); check_struct_pat_fields(pcx, pat.span, fields, &struct_fields, variant_def_id, etc); } pub fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &ast::Pat, path: &ast::Path, subpats: Option<&'tcx [P<ast::Pat>]>, expected: Ty<'tcx>) { // Typecheck the path. let fcx = pcx.fcx; let tcx = pcx.fcx.ccx.tcx; let path_res = *tcx.def_map.borrow().get(&pat.id).unwrap(); let (opt_ty, segments, def) = match resolve_ty_and_def_ufcs(fcx, path_res, None, path, pat.span, pat.id) { Some(resolution) => resolution, // Error handling done inside resolve_ty_and_def_ufcs, so if // resolution fails just return. None => {return;} }; // Items that were partially resolved before should have been resolved to // associated constants (i.e. not methods). if path_res.depth != 0 && !check_assoc_item_is_const(pcx, def, pat.span) { fcx.write_error(pat.id); return; } let enum_def = def.variant_def_ids() .map_or_else(|| def.def_id(), |(enum_def, _)| enum_def); let ctor_scheme = tcx.lookup_item_type(enum_def); let ctor_predicates = tcx.lookup_predicates(enum_def); let path_scheme = if ctor_scheme.ty.is_fn() { let fn_ret = tcx.no_late_bound_regions(&ctor_scheme.ty.fn_ret()).unwrap(); ty::TypeScheme { ty: fn_ret.unwrap(), generics: ctor_scheme.generics, } } else { ctor_scheme }; instantiate_path(pcx.fcx, segments, path_scheme, &ctor_predicates, opt_ty, def, pat.span, pat.id); // If we didn't have a fully resolved path to start with, we had an // associated const, and we should quit now, since the rest of this // function uses checks specific to structs and enums. if path_res.depth != 0 { let pat_ty = fcx.node_ty(pat.id); demand::suptype(fcx, pat.span, expected, pat_ty); return; } let pat_ty = fcx.node_ty(pat.id); demand::eqtype(fcx, pat.span, expected, pat_ty); let real_path_ty = fcx.node_ty(pat.id); let (arg_tys, kind_name): (Vec<_>, &'static str) = match real_path_ty.sty { ty::TyEnum(enum_def_id, expected_substs) if def == def::DefVariant(enum_def_id, def.def_id(), false) => { let variant = tcx.enum_variant_with_id(enum_def_id, def.def_id()); (variant.args.iter() .map(|t| fcx.instantiate_type_scheme(pat.span, expected_substs, t)) .collect(), "variant") } ty::TyStruct(struct_def_id, expected_substs) => { let struct_fields = tcx.struct_fields(struct_def_id, expected_substs); (struct_fields.iter() .map(|field| fcx.instantiate_type_scheme(pat.span, expected_substs, &field.mt.ty)) .collect(), "struct") } _ => { let name = pprust::path_to_string(path); span_err!(tcx.sess, pat.span, E0164, "`{}` does not name a non-struct variant or a tuple struct", name); fcx.write_error(pat.id); if let Some(subpats) = subpats { for pat in subpats { check_pat(pcx, &**pat, tcx.types.err); } } return; } }; if let Some(subpats) = subpats { if subpats.len() == arg_tys.len() { for (subpat, arg_ty) in subpats.iter().zip(arg_tys) { check_pat(pcx, &**subpat, arg_ty); } } else if arg_tys.is_empty() { span_err!(tcx.sess, pat.span, E0024, "this pattern has {} field{}, but the corresponding {} has no fields", subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name); for pat in subpats { check_pat(pcx, &**pat, tcx.types.err); } } else { span_err!(tcx.sess, pat.span, E0023, "this pattern has {} field{}, but the corresponding {} has {} field{}", subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name, arg_tys.len(), if arg_tys.len() == 1 {""} else {"s"}); for pat in subpats { check_pat(pcx, &**pat, tcx.types.err); } } } } /// `path` is the AST path item naming the type of this struct. /// `fields` is the field patterns of the struct pattern. /// `struct_fields` describes the type of each field of the struct. /// `struct_id` is the ID of the struct. /// `etc` is true if the pattern said '...' and false otherwise. pub fn check_struct_pat_fields<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, span: Span, fields: &'tcx [Spanned<ast::FieldPat>], struct_fields: &[ty::field<'tcx>], struct_id: ast::DefId, etc: bool) { let tcx = pcx.fcx.ccx.tcx; // Index the struct fields' types. let field_type_map = struct_fields .iter() .map(|field| (field.name, field.mt.ty)) .collect::<FnvHashMap<_, _>>(); // Keep track of which fields have already appeared in the pattern. let mut used_fields = FnvHashMap(); // Typecheck each field. for &Spanned { node: ref field, span } in fields { let field_type = match used_fields.entry(field.ident.name) { Occupied(occupied) => { span_err!(tcx.sess, span, E0025, "field `{}` bound multiple times in the pattern", token::get_ident(field.ident)); span_note!(tcx.sess, *occupied.get(), "field `{}` previously bound here", token::get_ident(field.ident)); tcx.types.err } Vacant(vacant) => { vacant.insert(span); field_type_map.get(&field.ident.name).cloned() .unwrap_or_else(|| { span_err!(tcx.sess, span, E0026, "struct `{}` does not have a field named `{}`", tcx.item_path_str(struct_id), token::get_ident(field.ident)); tcx.types.err }) } }; let field_type = pcx.fcx.normalize_associated_types_in(span, &field_type); check_pat(pcx, &*field.pat, field_type); } // Report an error if not all the fields were specified. if !etc { for field in struct_fields .iter() .filter(|field| !used_fields.contains_key(&field.name)) { span_err!(tcx.sess, span, E0027, "pattern does not mention field `{}`", token::get_name(field.name)); } } }
41.606099
98
0.51359
296411a8c2f50e923c629525fbc7fd39fa35177d
2,208
// Copyright 2021 - Nym Technologies SA <[email protected]> // SPDX-License-Identifier: Apache-2.0 use crate::nymd::error::NymdError; use cosmrs::proto::cosmos::base::query::v1beta1::{PageRequest, PageResponse}; use cosmrs::rpc::endpoint::broadcast; use flate2::write::GzEncoder; use flate2::Compression; use std::io::Write; pub(crate) trait CheckResponse: Sized { fn check_response(self) -> Result<Self, NymdError>; } impl CheckResponse for broadcast::tx_commit::Response { fn check_response(self) -> Result<Self, NymdError> { if self.check_tx.code.is_err() { return Err(NymdError::BroadcastTxErrorCheckTx { hash: self.hash, height: self.height, code: self.check_tx.code.value(), raw_log: self.check_tx.log.value().to_owned(), }); } if self.deliver_tx.code.is_err() { return Err(NymdError::BroadcastTxErrorDeliverTx { hash: self.hash, height: self.height, code: self.deliver_tx.code.value(), raw_log: self.deliver_tx.log.value().to_owned(), }); } Ok(self) } } pub(crate) fn compress_wasm_code(code: &[u8]) -> Result<Vec<u8>, NymdError> { // using compression level 9, same as cosmjs, that optimises for size let mut encoder = GzEncoder::new(Vec::new(), Compression::best()); encoder .write_all(code) .map_err(NymdError::WasmCompressionError)?; encoder.finish().map_err(NymdError::WasmCompressionError) } pub(crate) fn create_pagination(key: Vec<u8>) -> PageRequest { PageRequest { key, offset: 0, limit: 0, count_total: false, } } pub(crate) fn next_page_key(pagination_info: Option<PageResponse>) -> Option<Vec<u8>> { if let Some(next_page_info) = pagination_info { // it turns out, even though `PageResponse` is always returned wrapped in an `Option`, // the `next_key` can still be empty, so check whether we actually need to perform another call if !next_page_info.next_key.is_empty() { return Some(next_page_info.next_key); } } None }
32.470588
103
0.625453
1dd7d3021647d46a519d058aba283c9646087493
33
pub mod main; pub mod functions;
11
18
0.757576
1890540be1ba743917520b76a24b4e055e91bd18
1,031
// Copyright 2013-2017, The Gtk-rs Project Developers. // See the COPYRIGHT file at the top-level directory of this distribution. // Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT> use cairo; use gdk_sys; use glib; use glib::translate::*; use Screen; impl Screen { pub fn get_font_options(&self) -> Option<cairo::FontOptions> { unsafe { from_glib_none(mut_override(gdk_sys::gdk_screen_get_font_options( self.to_glib_none().0, ))) } } pub fn get_setting(&self, name: &str) -> Option<glib::Value> { unsafe { let mut value = glib::Value::uninitialized(); let done: bool = from_glib(gdk_sys::gdk_screen_get_setting( self.to_glib_none().0, name.to_glib_none().0, value.to_glib_none_mut().0, )); if done == true { Some(value) } else { None } } } }
27.864865
95
0.559651
ab4566c178c697e7a2991f45327010cec8ee2166
3,579
#[doc = r" Value read from the register"] pub struct R { bits: u32, } #[doc = r" Value to write to the register"] pub struct W { bits: u32, } impl super::AESTAGOUT { #[doc = r" Modifies the contents of the register"] #[inline] pub fn modify<F>(&self, f: F) where for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W, { let bits = self.register.get(); let r = R { bits: bits }; let mut w = W { bits: bits }; f(&r, &mut w); self.register.set(w.bits); } #[doc = r" Reads the contents of the register"] #[inline] pub fn read(&self) -> R { R { bits: self.register.get(), } } #[doc = r" Writes to the register"] #[inline] pub fn write<F>(&self, f: F) where F: FnOnce(&mut W) -> &mut W, { let mut w = W::reset_value(); f(&mut w); self.register.set(w.bits); } #[doc = r" Writes the reset value to the register"] #[inline] pub fn reset(&self) { self.write(|w| w) } } #[doc = r" Value of the field"] pub struct AES_TAGR { bits: u32, } impl AES_TAGR { #[doc = r" Value of the field as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } } #[doc = r" Proxy"] pub struct _AES_TAGW<'a> { w: &'a mut W, } impl<'a> _AES_TAGW<'a> { #[doc = r" Writes raw bits to the field"] #[inline] pub unsafe fn bits(self, value: u32) -> &'a mut W { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; self.w.bits &= !((MASK as u32) << OFFSET); self.w.bits |= ((value & MASK) as u32) << OFFSET; self.w } } impl R { #[doc = r" Value of the register as raw bits"] #[inline] pub fn bits(&self) -> u32 { self.bits } #[doc = "Bits 0:31 - 31:0\\] AES_TAG\\[31:0\\] Bits \\[31:0\\] of this register stores the authentication value for the combined and authentication only modes. For a host read operation, these registers contain the last 128-bit TAG output of the EIP-120t; the TAG is available until the next context is written. This register will only contain valid data if the TAG is available and when the AESCTL.SAVED_CONTEXT_RDY register is set. During processing or for operations/modes that do not return a TAG, reads from this register return data from the IV register."] #[inline] pub fn aes_tag(&self) -> AES_TAGR { let bits = { const MASK: u32 = 4294967295; const OFFSET: u8 = 0; ((self.bits >> OFFSET) & MASK as u32) as u32 }; AES_TAGR { bits } } } impl W { #[doc = r" Reset value of the register"] #[inline] pub fn reset_value() -> W { W { bits: 0 } } #[doc = r" Writes raw bits to the register"] #[inline] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.bits = bits; self } #[doc = "Bits 0:31 - 31:0\\] AES_TAG\\[31:0\\] Bits \\[31:0\\] of this register stores the authentication value for the combined and authentication only modes. For a host read operation, these registers contain the last 128-bit TAG output of the EIP-120t; the TAG is available until the next context is written. This register will only contain valid data if the TAG is available and when the AESCTL.SAVED_CONTEXT_RDY register is set. During processing or for operations/modes that do not return a TAG, reads from this register return data from the IV register."] #[inline] pub fn aes_tag(&mut self) -> _AES_TAGW { _AES_TAGW { w: self } } }
33.764151
566
0.58508
dbb02b2815bf94e7a5b7f16a09b860d01168457d
27,153
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT. /// All possible error types for this service. #[non_exhaustive] #[derive(std::fmt::Debug)] pub enum Error { /// <p>The request is missing required parameters or has invalid parameters.</p> InvalidRequestException(crate::error::InvalidRequestException), /// <p>The resource was not found. Verify that the name or Amazon Resource Name (ARN) of the resource is /// correct.</p> ResourceNotFoundException(crate::error::ResourceNotFoundException), /// <p>You have reached the maximum number of sampling rules.</p> RuleLimitExceededException(crate::error::RuleLimitExceededException), /// <p>The request exceeds the maximum number of requests per second.</p> ThrottledException(crate::error::ThrottledException), /// <p>You have exceeded the maximum number of tags you can apply to this resource.</p> TooManyTagsException(crate::error::TooManyTagsException), /// An unhandled error occurred. Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>), } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Error::InvalidRequestException(inner) => inner.fmt(f), Error::ResourceNotFoundException(inner) => inner.fmt(f), Error::RuleLimitExceededException(inner) => inner.fmt(f), Error::ThrottledException(inner) => inner.fmt(f), Error::TooManyTagsException(inner) => inner.fmt(f), Error::Unhandled(inner) => inner.fmt(f), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::BatchGetTracesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::BatchGetTracesError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::BatchGetTracesErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::BatchGetTracesErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::BatchGetTracesErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::CreateGroupError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::CreateGroupError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::CreateGroupErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::CreateGroupErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::CreateGroupErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::CreateSamplingRuleError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::CreateSamplingRuleError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::CreateSamplingRuleErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::CreateSamplingRuleErrorKind::RuleLimitExceededException(inner) => { Error::RuleLimitExceededException(inner) } crate::error::CreateSamplingRuleErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::CreateSamplingRuleErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeleteGroupError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::DeleteGroupError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::DeleteGroupErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::DeleteGroupErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::DeleteGroupErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeleteSamplingRuleError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::DeleteSamplingRuleError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::DeleteSamplingRuleErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::DeleteSamplingRuleErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::DeleteSamplingRuleErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetEncryptionConfigError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetEncryptionConfigError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetEncryptionConfigErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetEncryptionConfigErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetEncryptionConfigErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetGroupError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::GetGroupError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetGroupErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetGroupErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetGroupErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetGroupsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::GetGroupsError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetGroupsErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetGroupsErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetGroupsErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetInsightError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::GetInsightError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetInsightErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetInsightErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetInsightErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetInsightEventsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetInsightEventsError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetInsightEventsErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetInsightEventsErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetInsightEventsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetInsightImpactGraphError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetInsightImpactGraphError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetInsightImpactGraphErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetInsightImpactGraphErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetInsightImpactGraphErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetInsightSummariesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetInsightSummariesError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetInsightSummariesErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetInsightSummariesErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetInsightSummariesErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetSamplingRulesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetSamplingRulesError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetSamplingRulesErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetSamplingRulesErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetSamplingRulesErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetSamplingStatisticSummariesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetSamplingStatisticSummariesError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetSamplingStatisticSummariesErrorKind::InvalidRequestException( inner, ) => Error::InvalidRequestException(inner), crate::error::GetSamplingStatisticSummariesErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetSamplingStatisticSummariesErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetSamplingTargetsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetSamplingTargetsError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetSamplingTargetsErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetSamplingTargetsErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetSamplingTargetsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetServiceGraphError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::GetServiceGraphError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetServiceGraphErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetServiceGraphErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetServiceGraphErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetTimeSeriesServiceStatisticsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError< crate::error::GetTimeSeriesServiceStatisticsError, R, >, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetTimeSeriesServiceStatisticsErrorKind::InvalidRequestException( inner, ) => Error::InvalidRequestException(inner), crate::error::GetTimeSeriesServiceStatisticsErrorKind::ThrottledException( inner, ) => Error::ThrottledException(inner), crate::error::GetTimeSeriesServiceStatisticsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetTraceGraphError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::GetTraceGraphError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetTraceGraphErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetTraceGraphErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetTraceGraphErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetTraceSummariesError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::GetTraceSummariesError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::GetTraceSummariesErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::GetTraceSummariesErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::GetTraceSummariesErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::ListTagsForResourceErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::ListTagsForResourceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::ListTagsForResourceErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::ListTagsForResourceErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutEncryptionConfigError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::PutEncryptionConfigError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::PutEncryptionConfigErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::PutEncryptionConfigErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::PutEncryptionConfigErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutTelemetryRecordsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::PutTelemetryRecordsError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::PutTelemetryRecordsErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::PutTelemetryRecordsErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::PutTelemetryRecordsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutTraceSegmentsError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::PutTraceSegmentsError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::PutTraceSegmentsErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::PutTraceSegmentsErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::PutTraceSegmentsErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::TagResourceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::TagResourceError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::TagResourceErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::TagResourceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::TagResourceErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::TagResourceErrorKind::TooManyTagsException(inner) => { Error::TooManyTagsException(inner) } crate::error::TagResourceErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::UntagResourceError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::UntagResourceError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::UntagResourceErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::UntagResourceErrorKind::ResourceNotFoundException(inner) => { Error::ResourceNotFoundException(inner) } crate::error::UntagResourceErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::UntagResourceErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::UpdateGroupError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from(err: aws_smithy_http::result::SdkError<crate::error::UpdateGroupError, R>) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::UpdateGroupErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::UpdateGroupErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::UpdateGroupErrorKind::Unhandled(inner) => Error::Unhandled(inner), }, _ => Error::Unhandled(err.into()), } } } impl<R> From<aws_smithy_http::result::SdkError<crate::error::UpdateSamplingRuleError, R>> for Error where R: Send + Sync + std::fmt::Debug + 'static, { fn from( err: aws_smithy_http::result::SdkError<crate::error::UpdateSamplingRuleError, R>, ) -> Self { match err { aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind { crate::error::UpdateSamplingRuleErrorKind::InvalidRequestException(inner) => { Error::InvalidRequestException(inner) } crate::error::UpdateSamplingRuleErrorKind::ThrottledException(inner) => { Error::ThrottledException(inner) } crate::error::UpdateSamplingRuleErrorKind::Unhandled(inner) => { Error::Unhandled(inner) } }, _ => Error::Unhandled(err.into()), } } } impl std::error::Error for Error {}
42.963608
108
0.576548
cc7ca9c9bb761606158434251f27364841f2c626
8,135
// Copyright 2018-2021 Cargill Incorporated // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Contains the implementation of `NodeBuilder`. use std::time::Duration; use cylinder::Verifier; use scabbard::service::ScabbardFactory; use splinter::admin::rest_api::CircuitResourceProvider; use splinter::admin::service::AdminServiceBuilder; use splinter::circuit::routing::RoutingTableWriter; use splinter::error::InternalError; use splinter::events::Reactor; use splinter::orchestrator::ServiceOrchestratorBuilder; use splinter::peer::PeerManagerConnector; use splinter::registry::{LocalYamlRegistry, RegistryReader, UnifiedRegistry}; use splinter::rest_api::actix_web_1::RestResourceProvider as _; use splinter::service::ServiceProcessorBuilder; use splinter::store::StoreFactory; use splinter::transport::{inproc::InprocTransport, Transport}; use crate::node::builder::admin::AdminServiceEventClientVariant; use crate::node::running::admin::{self as running_admin, AdminSubsystem}; pub struct RunnableAdminSubsystem { pub node_id: String, pub admin_timeout: Duration, pub store_factory: Box<dyn StoreFactory>, pub peer_connector: PeerManagerConnector, pub routing_writer: Box<dyn RoutingTableWriter>, pub service_transport: InprocTransport, pub admin_service_verifier: Box<dyn Verifier>, pub scabbard_service_factory: Option<ScabbardFactory>, pub registries: Option<Vec<String>>, pub admin_service_event_client_variant: AdminServiceEventClientVariant, } impl RunnableAdminSubsystem { pub fn run(self) -> Result<AdminSubsystem, InternalError> { let node_id = self.node_id; let store_factory = self.store_factory; let admin_timeout = self.admin_timeout; let peer_connector = self.peer_connector; let mut service_transport = self.service_transport; let routing_writer = self.routing_writer; let mut registry = store_factory.get_registry_store(); if let Some(external_registries) = self.registries { let read_only_registries = external_registries .iter() .map(|registry| { let mut iter = registry.splitn(2, "://"); match (iter.next(), iter.next()) { (Some(scheme), Some(path)) => match scheme { "file" => { debug!( "Attempting to add local read-only registry from file: {}", path ); match LocalYamlRegistry::new(path) { Ok(registry) => { Ok(Box::new(registry) as Box<dyn RegistryReader>) } Err(err) => Err(InternalError::from_source_with_message( Box::new(err), format!( "Failed to add read-only LocalYamlRegistry '{}'", path ), )), } } _ => Err(InternalError::with_message(format!( "Invalid registry provided ({}): must be valid 'file://' URI", registry ))), }, (Some(_), None) => Err(InternalError::with_message( "Failed to parse registry argument: no URI scheme provided".to_string(), )), _ => unreachable!(), // splitn always returns at least one item } }) .collect::<Result<Vec<_>, _>>()?; registry = Box::new(UnifiedRegistry::new(registry, read_only_registries)); } let orchestrator_connection = service_transport .connect("inproc://orchestator") .map_err(|err| InternalError::from_source(Box::new(err)))?; let mut orchestrator_builder = ServiceOrchestratorBuilder::new().with_connection(orchestrator_connection); if let Some(scabbard_service_factory) = self.scabbard_service_factory { orchestrator_builder = orchestrator_builder.with_service_factory(Box::new(scabbard_service_factory)); } let orchestrator = orchestrator_builder .build() .map_err(|e| InternalError::from_source(Box::new(e)))? .run() .map_err(|e| InternalError::from_source(Box::new(e)))?; let mut orchestrator_resources = orchestrator.resources(); let mut admin_service_builder = AdminServiceBuilder::new(); admin_service_builder = admin_service_builder .with_node_id(node_id.clone()) .with_service_orchestrator(orchestrator) .with_peer_manager_connector(peer_connector) .with_admin_service_store(store_factory.get_admin_service_store()) .with_admin_event_store(store_factory.get_admin_service_store()) .with_signature_verifier(self.admin_service_verifier) .with_admin_key_verifier(Box::new(registry.clone_box_as_reader())) .with_key_permission_manager(Box::new( splinter::keys::insecure::AllowAllKeyPermissionManager, )) .with_coordinator_timeout(admin_timeout) .with_routing_table_writer(routing_writer) .with_admin_event_store(store_factory.get_admin_service_store()); let circuit_resource_provider = CircuitResourceProvider::new(node_id, store_factory.get_admin_service_store()); let admin_service = admin_service_builder .build() .map_err(|err| InternalError::from_source(Box::new(err)))?; let mut actix1_resources = vec![]; actix1_resources.append(&mut admin_service.resources()); actix1_resources.append(&mut circuit_resource_provider.resources()); actix1_resources.append(&mut registry.resources()); actix1_resources.append(&mut orchestrator_resources); // set up inproc connections let admin_connection = service_transport .connect("inproc://admin-service") .map_err(|err| InternalError::from_source(Box::new(err)))?; let admin_service_processor = ServiceProcessorBuilder::new() .with_connection(admin_connection) .with_circuit("admin".into()) .with_service(Box::new(admin_service)) .build() .map_err(|e| InternalError::from_source(Box::new(e)))?; let admin_service_shutdown = admin_service_processor .start() .map_err(|e| InternalError::from_source(Box::new(e)))?; let running_admin_service_event_client_variant = match self.admin_service_event_client_variant { AdminServiceEventClientVariant::ActixWebClient => { running_admin::AdminServiceEventClientVariant::ActixWebClient(Reactor::new()) } }; Ok(AdminSubsystem { registry_writer: registry.clone_box_as_writer(), admin_service_shutdown, actix1_resources, store_factory, admin_service_event_client_variant: running_admin_service_event_client_variant, }) } }
44.453552
100
0.607498
f7c8fbe88f1505ce730495bfbf5b04b58af9e044
6,949
#![allow(unused_imports, non_camel_case_types)] use crate::models::r4::Element::Element; use crate::models::r4::Extension::Extension; use crate::models::r4::Period::Period; use crate::models::r4::Reference::Reference; use serde_json::json; use serde_json::value::Value; use std::borrow::Cow; /// A financial tool for tracking value accrued for a particular purpose. In the /// healthcare field, used to track charges for a patient, cost centers, etc. #[derive(Debug)] pub struct Account_Guarantor<'a> { pub(crate) value: Cow<'a, Value>, } impl Account_Guarantor<'_> { pub fn new(value: &Value) -> Account_Guarantor { Account_Guarantor { value: Cow::Borrowed(value), } } pub fn to_json(&self) -> Value { (*self.value).clone() } /// Extensions for onHold pub fn _on_hold(&self) -> Option<Element> { if let Some(val) = self.value.get("_onHold") { return Some(Element { value: Cow::Borrowed(val), }); } return None; } /// May be used to represent additional information that is not part of the basic /// definition of the element. To make the use of extensions safe and manageable, /// there is a strict set of governance applied to the definition and use of /// extensions. Though any implementer can define an extension, there is a set of /// requirements that SHALL be met as part of the definition of the extension. pub fn extension(&self) -> Option<Vec<Extension>> { if let Some(Value::Array(val)) = self.value.get("extension") { return Some( val.into_iter() .map(|e| Extension { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// Unique id for the element within a resource (for internal references). This may be /// any string value that does not contain spaces. pub fn id(&self) -> Option<&str> { if let Some(Value::String(string)) = self.value.get("id") { return Some(string); } return None; } /// May be used to represent additional information that is not part of the basic /// definition of the element and that modifies the understanding of the element /// in which it is contained and/or the understanding of the containing element's /// descendants. Usually modifier elements provide negation or qualification. To make /// the use of extensions safe and manageable, there is a strict set of governance /// applied to the definition and use of extensions. Though any implementer can define /// an extension, there is a set of requirements that SHALL be met as part of the /// definition of the extension. Applications processing a resource are required to /// check for modifier extensions. Modifier extensions SHALL NOT change the meaning /// of any elements on Resource or DomainResource (including cannot change the meaning /// of modifierExtension itself). pub fn modifier_extension(&self) -> Option<Vec<Extension>> { if let Some(Value::Array(val)) = self.value.get("modifierExtension") { return Some( val.into_iter() .map(|e| Extension { value: Cow::Borrowed(e), }) .collect::<Vec<_>>(), ); } return None; } /// A guarantor may be placed on credit hold or otherwise have their role temporarily /// suspended. pub fn on_hold(&self) -> Option<bool> { if let Some(val) = self.value.get("onHold") { return Some(val.as_bool().unwrap()); } return None; } /// The entity who is responsible. pub fn party(&self) -> Reference { Reference { value: Cow::Borrowed(&self.value["party"]), } } /// The timeframe during which the guarantor accepts responsibility for the account. pub fn period(&self) -> Option<Period> { if let Some(val) = self.value.get("period") { return Some(Period { value: Cow::Borrowed(val), }); } return None; } pub fn validate(&self) -> bool { if let Some(_val) = self._on_hold() { if !_val.validate() { return false; } } if let Some(_val) = self.extension() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.id() {} if let Some(_val) = self.modifier_extension() { if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) { return false; } } if let Some(_val) = self.on_hold() {} if !self.party().validate() { return false; } if let Some(_val) = self.period() { if !_val.validate() { return false; } } return true; } } #[derive(Debug)] pub struct Account_GuarantorBuilder { pub(crate) value: Value, } impl Account_GuarantorBuilder { pub fn build(&self) -> Account_Guarantor { Account_Guarantor { value: Cow::Owned(self.value.clone()), } } pub fn with(existing: Account_Guarantor) -> Account_GuarantorBuilder { Account_GuarantorBuilder { value: (*existing.value).clone(), } } pub fn new(party: Reference) -> Account_GuarantorBuilder { let mut __value: Value = json!({}); __value["party"] = json!(party.value); return Account_GuarantorBuilder { value: __value }; } pub fn _on_hold<'a>(&'a mut self, val: Element) -> &'a mut Account_GuarantorBuilder { self.value["_onHold"] = json!(val.value); return self; } pub fn extension<'a>(&'a mut self, val: Vec<Extension>) -> &'a mut Account_GuarantorBuilder { self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn id<'a>(&'a mut self, val: &str) -> &'a mut Account_GuarantorBuilder { self.value["id"] = json!(val); return self; } pub fn modifier_extension<'a>( &'a mut self, val: Vec<Extension>, ) -> &'a mut Account_GuarantorBuilder { self.value["modifierExtension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>()); return self; } pub fn on_hold<'a>(&'a mut self, val: bool) -> &'a mut Account_GuarantorBuilder { self.value["onHold"] = json!(val); return self; } pub fn period<'a>(&'a mut self, val: Period) -> &'a mut Account_GuarantorBuilder { self.value["period"] = json!(val.value); return self; } }
33.897561
97
0.574903
649bcd8aad98a60cace7ccc6df786edb4e18eaae
24,661
use super::{GetProcAddress, _sapp_opengl32, _sapp_wglGetProcAddress}; pub type GLenum = ::std::os::raw::c_uint; pub type GLboolean = ::std::os::raw::c_uchar; pub type GLbitfield = ::std::os::raw::c_uint; pub type GLvoid = ::std::os::raw::c_void; pub type GLbyte = ::std::os::raw::c_schar; pub type GLshort = ::std::os::raw::c_short; pub type GLint = ::std::os::raw::c_int; pub type GLubyte = ::std::os::raw::c_uchar; pub type GLushort = ::std::os::raw::c_ushort; pub type GLuint = ::std::os::raw::c_uint; pub type GLuint64 = ::std::os::raw::c_ulonglong; pub type GLsizei = ::std::os::raw::c_int; pub type GLchar = ::std::os::raw::c_char; pub type khronos_ssize_t = ::std::os::raw::c_long; pub type khronos_usize_t = ::std::os::raw::c_ulong; pub type khronos_intptr_t = ::std::os::raw::c_long; pub type GLsizeiptr = khronos_ssize_t; pub type GLintptr = khronos_intptr_t; pub type GLfloat = f32; pub type GLclampf = f32; pub type GLdouble = f64; pub type GLclampd = f64; pub const GL_INT_2_10_10_10_REV: u32 = 0x8D9F; pub const GL_PROGRAM_POINT_SIZE: u32 = 0x8642; pub const GL_STENCIL_ATTACHMENT: u32 = 0x8D20; pub const GL_DEPTH_ATTACHMENT: u32 = 0x8D00; pub const GL_COLOR_ATTACHMENT2: u32 = 0x8CE2; pub const GL_COLOR_ATTACHMENT0: u32 = 0x8CE0; pub const GL_COLOR_ATTACHMENT22: u32 = 0x8CF6; pub const GL_DRAW_FRAMEBUFFER: u32 = 0x8CA9; pub const GL_FRAMEBUFFER_COMPLETE: u32 = 0x8CD5; pub const GL_NUM_EXTENSIONS: u32 = 0x821D; pub const GL_INFO_LOG_LENGTH: u32 = 0x8B84; pub const GL_VERTEX_SHADER: u32 = 0x8B31; pub const GL_INCR: u32 = 0x1E02; pub const GL_DYNAMIC_DRAW: u32 = 0x88E8; pub const GL_STATIC_DRAW: u32 = 0x88E4; pub const GL_TEXTURE_CUBE_MAP_POSITIVE_Z: u32 = 0x8519; pub const GL_TEXTURE_CUBE_MAP: u32 = 0x8513; pub const GL_FUNC_SUBTRACT: u32 = 0x800A; pub const GL_FUNC_REVERSE_SUBTRACT: u32 = 0x800B; pub const GL_CONSTANT_COLOR: u32 = 0x8001; pub const GL_DECR_WRAP: u32 = 0x8508; pub const GL_LINEAR_MIPMAP_LINEAR: u32 = 0x2703; pub const GL_ELEMENT_ARRAY_BUFFER: u32 = 0x8893; pub const GL_SHORT: u32 = 0x1402; pub const GL_DEPTH_TEST: u32 = 0x0B71; pub const GL_TEXTURE_CUBE_MAP_NEGATIVE_Y: u32 = 0x8518; pub const GL_LINK_STATUS: u32 = 0x8B82; pub const GL_TEXTURE_CUBE_MAP_POSITIVE_Y: u32 = 0x8517; pub const GL_SAMPLE_ALPHA_TO_COVERAGE: u32 = 0x809E; pub const GL_RGBA16F: u32 = 0x881A; pub const GL_CONSTANT_ALPHA: u32 = 0x8003; pub const GL_READ_FRAMEBUFFER: u32 = 0x8CA8; pub const GL_TEXTURE0: u32 = 0x84C0; pub const GL_TEXTURE_MIN_LOD: u32 = 0x813A; pub const GL_CLAMP_TO_EDGE: u32 = 0x812F; pub const GL_UNSIGNED_SHORT_5_6_5: u32 = 0x8363; pub const GL_TEXTURE_WRAP_R: u32 = 0x8072; pub const GL_UNSIGNED_SHORT_5_5_5_1: u32 = 0x8034; pub const GL_NEAREST_MIPMAP_NEAREST: u32 = 0x2700; pub const GL_UNSIGNED_SHORT_4_4_4_4: u32 = 0x8033; pub const GL_SRC_ALPHA_SATURATE: u32 = 0x0308; pub const GL_STREAM_DRAW: u32 = 0x88E0; pub const GL_ONE: u32 = 1; pub const GL_NEAREST_MIPMAP_LINEAR: u32 = 0x2702; pub const GL_RGB10_A2: u32 = 0x8059; pub const GL_RGBA8: u32 = 0x8058; pub const GL_COLOR_ATTACHMENT1: u32 = 0x8CE1; pub const GL_RGBA4: u32 = 0x8056; pub const GL_RGB8: u32 = 0x8051; pub const GL_ARRAY_BUFFER: u32 = 0x8892; pub const GL_STENCIL: u32 = 0x1802; pub const GL_TEXTURE_2D: u32 = 0x0DE1; pub const GL_DEPTH: u32 = 0x1801; pub const GL_FRONT: u32 = 0x0404; pub const GL_STENCIL_BUFFER_BIT: u32 = 0x00000400; pub const GL_REPEAT: u32 = 0x2901; pub const GL_RGBA: u32 = 0x1908; pub const GL_TEXTURE_CUBE_MAP_POSITIVE_X: u32 = 0x8515; pub const GL_DECR: u32 = 0x1E03; pub const GL_FRAGMENT_SHADER: u32 = 0x8B30; pub const GL_FLOAT: u32 = 0x1406; pub const GL_TEXTURE_MAX_LOD: u32 = 0x813B; pub const GL_DEPTH_COMPONENT: u32 = 0x1902; pub const GL_ONE_MINUS_DST_ALPHA: u32 = 0x0305; pub const GL_COLOR: u32 = 0x1800; pub const GL_TEXTURE_2D_ARRAY: u32 = 0x8C1A; pub const GL_TRIANGLES: u32 = 0x0004; pub const GL_UNSIGNED_BYTE: u32 = 0x1401; pub const GL_TEXTURE_MAG_FILTER: u32 = 0x2800; pub const GL_ONE_MINUS_CONSTANT_ALPHA: u32 = 0x8004; pub const GL_NONE: u32 = 0; pub const GL_SRC_COLOR: u32 = 0x0300; pub const GL_BYTE: u32 = 0x1400; pub const GL_TEXTURE_CUBE_MAP_NEGATIVE_Z: u32 = 0x851A; pub const GL_LINE_STRIP: u32 = 0x0003; pub const GL_TEXTURE_3D: u32 = 0x806F; pub const GL_CW: u32 = 0x0900; pub const GL_LINEAR: u32 = 0x2601; pub const GL_RENDERBUFFER: u32 = 0x8D41; pub const GL_GEQUAL: u32 = 0x0206; pub const GL_COLOR_BUFFER_BIT: u32 = 0x00004000; pub const GL_RGBA32F: u32 = 0x8814; pub const GL_BLEND: u32 = 0x0BE2; pub const GL_ONE_MINUS_SRC_ALPHA: u32 = 0x0303; pub const GL_ONE_MINUS_CONSTANT_COLOR: u32 = 0x8002; pub const GL_TEXTURE_WRAP_T: u32 = 0x2803; pub const GL_TEXTURE_WRAP_S: u32 = 0x2802; pub const GL_TEXTURE_MIN_FILTER: u32 = 0x2801; pub const GL_LINEAR_MIPMAP_NEAREST: u32 = 0x2701; pub const GL_EXTENSIONS: u32 = 0x1F03; pub const GL_NO_ERROR: u32 = 0; pub const GL_REPLACE: u32 = 0x1E01; pub const GL_KEEP: u32 = 0x1E00; pub const GL_CCW: u32 = 0x0901; pub const GL_TEXTURE_CUBE_MAP_NEGATIVE_X: u32 = 0x8516; pub const GL_RGB: u32 = 0x1907; pub const GL_TRIANGLE_STRIP: u32 = 0x0005; pub const GL_FALSE: u32 = 0; pub const GL_ZERO: u32 = 0; pub const GL_CULL_FACE: u32 = 0x0B44; pub const GL_INVERT: u32 = 0x150A; pub const GL_INT: u32 = 0x1404; pub const GL_UNSIGNED_INT: u32 = 0x1405; pub const GL_UNSIGNED_SHORT: u32 = 0x1403; pub const GL_NEAREST: u32 = 0x2600; pub const GL_SCISSOR_TEST: u32 = 0x0C11; pub const GL_LEQUAL: u32 = 0x0203; pub const GL_STENCIL_TEST: u32 = 0x0B90; pub const GL_DITHER: u32 = 0x0BD0; pub const GL_DEPTH_COMPONENT16: u32 = 0x81A5; pub const GL_EQUAL: u32 = 0x0202; pub const GL_FRAMEBUFFER: u32 = 0x8D40; pub const GL_RGB5: u32 = 0x8050; pub const GL_LINES: u32 = 0x0001; pub const GL_DEPTH_BUFFER_BIT: u32 = 0x00000100; pub const GL_SRC_ALPHA: u32 = 0x0302; pub const GL_INCR_WRAP: u32 = 0x8507; pub const GL_LESS: u32 = 0x0201; pub const GL_MULTISAMPLE: u32 = 0x809D; pub const GL_FRAMEBUFFER_BINDING: u32 = 0x8CA6; pub const GL_BACK: u32 = 0x0405; pub const GL_ALWAYS: u32 = 0x0207; pub const GL_FUNC_ADD: u32 = 0x8006; pub const GL_ONE_MINUS_DST_COLOR: u32 = 0x0307; pub const GL_NOTEQUAL: u32 = 0x0205; pub const GL_DST_COLOR: u32 = 0x0306; pub const GL_COMPILE_STATUS: u32 = 0x8B81; pub const GL_RED: u32 = 0x1903; pub const GL_GREEN: u32 = 6404; pub const GL_BLUE: u32 = 6405; pub const GL_ALPHA: u32 = 6406; pub const GL_LUMINANCE: u32 = 6409; pub const GL_LUMINANCE_ALPHA: u32 = 6410; pub const GL_ALPHA_BITS: u32 = 3413; pub const GL_RED_BITS: u32 = 3410; pub const GL_GREEN_BITS: u32 = 3411; pub const GL_BLUE_BITS: u32 = 3412; pub const GL_INDEX_BITS: u32 = 3409; pub const GL_SUBPIXEL_BITS: u32 = 3408; pub const GL_AUX_BUFFERS: u32 = 3072; pub const GL_READ_BUFFER: u32 = 3074; pub const GL_DRAW_BUFFER: u32 = 3073; pub const GL_DOUBLEBUFFER: u32 = 3122; pub const GL_COLOR_ATTACHMENT3: u32 = 0x8CE3; pub const GL_DST_ALPHA: u32 = 0x0304; pub const GL_RGB5_A1: u32 = 0x8057; pub const GL_GREATER: u32 = 0x0204; pub const GL_POLYGON_OFFSET_FILL: u32 = 0x8037; pub const GL_TRUE: u32 = 1; pub const GL_NEVER: u32 = 0x0200; pub const GL_POINTS: u32 = 0x0000; pub const GL_ONE_MINUS_SRC_COLOR: u32 = 0x0301; pub const GL_MIRRORED_REPEAT: u32 = 0x8370; pub const GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS: u32 = 0x8B4D; pub const GL_R11F_G11F_B10F: u32 = 0x8C3A; pub const GL_UNSIGNED_INT_10F_11F_11F_REV: u32 = 0x8C3B; pub const GL_RGBA32UI: u32 = 0x8D70; pub const GL_RGB32UI: u32 = 0x8D71; pub const GL_RGBA16UI: u32 = 0x8D76; pub const GL_RGB16UI: u32 = 0x8D77; pub const GL_RGBA8UI: u32 = 0x8D7C; pub const GL_RGB8UI: u32 = 0x8D7D; pub const GL_RGBA32I: u32 = 0x8D82; pub const GL_RGB32I: u32 = 0x8D83; pub const GL_RGBA16I: u32 = 0x8D88; pub const GL_RGB16I: u32 = 0x8D89; pub const GL_RGBA8I: u32 = 0x8D8E; pub const GL_RGB8I: u32 = 0x8D8F; pub const GL_RED_INTEGER: u32 = 0x8D94; pub const GL_RG: u32 = 0x8227; pub const GL_RG_INTEGER: u32 = 0x8228; pub const GL_R8: u32 = 0x8229; pub const GL_R16: u32 = 0x822A; pub const GL_RG8: u32 = 0x822B; pub const GL_RG16: u32 = 0x822C; pub const GL_R16F: u32 = 0x822D; pub const GL_R32F: u32 = 0x822E; pub const GL_RG16F: u32 = 0x822F; pub const GL_RG32F: u32 = 0x8230; pub const GL_R8I: u32 = 0x8231; pub const GL_R8UI: u32 = 0x8232; pub const GL_R16I: u32 = 0x8233; pub const GL_R16UI: u32 = 0x8234; pub const GL_R32I: u32 = 0x8235; pub const GL_R32UI: u32 = 0x8236; pub const GL_RG8I: u32 = 0x8237; pub const GL_RG8UI: u32 = 0x8238; pub const GL_RG16I: u32 = 0x8239; pub const GL_RG16UI: u32 = 0x823A; pub const GL_RG32I: u32 = 0x823B; pub const GL_RG32UI: u32 = 0x823C; pub const GL_RGBA_INTEGER: u32 = 0x8D99; pub const GL_R8_SNORM: u32 = 0x8F94; pub const GL_RG8_SNORM: u32 = 0x8F95; pub const GL_RGB8_SNORM: u32 = 0x8F96; pub const GL_RGBA8_SNORM: u32 = 0x8F97; pub const GL_R16_SNORM: u32 = 0x8F98; pub const GL_RG16_SNORM: u32 = 0x8F99; pub const GL_RGB16_SNORM: u32 = 0x8F9A; pub const GL_RGBA16_SNORM: u32 = 0x8F9B; pub const GL_RGBA16: u32 = 0x805B; pub const GL_MAX_TEXTURE_SIZE: u32 = 0x0D33; pub const GL_MAX_CUBE_MAP_TEXTURE_SIZE: u32 = 0x851C; pub const GL_MAX_3D_TEXTURE_SIZE: u32 = 0x8073; pub const GL_MAX_ARRAY_TEXTURE_LAYERS: u32 = 0x88FF; pub const GL_MAX_VERTEX_ATTRIBS: u32 = 0x8869; pub const GL_CLAMP_TO_BORDER: u32 = 0x812D; pub const GL_TEXTURE_BORDER_COLOR: u32 = 0x1004; pub const GL_UNPACK_ALIGNMENT: u32 = 3317; pub const GL_TEXTURE_SWIZZLE_R: u32 = 36418; pub const GL_TEXTURE_SWIZZLE_G: u32 = 36419; pub const GL_TEXTURE_SWIZZLE_B: u32 = 36420; pub const GL_TEXTURE_SWIZZLE_A: u32 = 36421; pub const GL_TEXTURE_SWIZZLE_RGBA: u32 = 36422; pub const GL_DRAW_FRAMEBUFFER_BINDING: u32 = 36006; pub const GL_TIME_ELAPSED: u32 = 35007; pub const GL_QUERY_RESULT: u32 = 34918; pub const GL_QUERY_RESULT_AVAILABLE: u32 = 34919; pub const GL_VENDOR: u32 = 0x1F00; pub const GL_VERSION: u32 = 0x1F02; pub const GL_TEXTURE_BINDING_2D: u32 = 32873u32; pub const GL_TEXTURE_COMPARE_MODE: u32 = 0x884C; pub const GL_COMPARE_REF_TO_TEXTURE: u32 = 0x884E; pub const WGL_NUMBER_PIXEL_FORMATS_ARB: u32 = 0x2000; pub const WGL_SUPPORT_OPENGL_ARB: u32 = 0x2010; pub const WGL_DRAW_TO_WINDOW_ARB: u32 = 0x2001; pub const WGL_PIXEL_TYPE_ARB: u32 = 0x2013; pub const WGL_TYPE_RGBA_ARB: u32 = 0x202b; pub const WGL_ACCELERATION_ARB: u32 = 0x2003; pub const WGL_NO_ACCELERATION_ARB: u32 = 0x2025; pub const WGL_RED_BITS_ARB: u32 = 0x2015; pub const WGL_RED_SHIFT_ARB: u32 = 0x2016; pub const WGL_GREEN_BITS_ARB: u32 = 0x2017; pub const WGL_GREEN_SHIFT_ARB: u32 = 0x2018; pub const WGL_BLUE_BITS_ARB: u32 = 0x2019; pub const WGL_BLUE_SHIFT_ARB: u32 = 0x201a; pub const WGL_ALPHA_BITS_ARB: u32 = 0x201b; pub const WGL_ALPHA_SHIFT_ARB: u32 = 0x201c; pub const WGL_ACCUM_BITS_ARB: u32 = 0x201d; pub const WGL_ACCUM_RED_BITS_ARB: u32 = 0x201e; pub const WGL_ACCUM_GREEN_BITS_ARB: u32 = 0x201f; pub const WGL_ACCUM_BLUE_BITS_ARB: u32 = 0x2020; pub const WGL_ACCUM_ALPHA_BITS_ARB: u32 = 0x2021; pub const WGL_DEPTH_BITS_ARB: u32 = 0x2022; pub const WGL_STENCIL_BITS_ARB: u32 = 0x2023; pub const WGL_AUX_BUFFERS_ARB: u32 = 0x2024; pub const WGL_STEREO_ARB: u32 = 0x2012; pub const WGL_DOUBLE_BUFFER_ARB: u32 = 0x2011; pub const WGL_SAMPLES_ARB: u32 = 0x2042; pub const WGL_FRAMEBUFFER_SRGB_CAPABLE_ARB: u32 = 0x20a9; pub const WGL_CONTEXT_DEBUG_BIT_ARB: u32 = 0x00000001; pub const WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB: u32 = 0x00000002; pub const WGL_CONTEXT_PROFILE_MASK_ARB: u32 = 0x9126; pub const WGL_CONTEXT_CORE_PROFILE_BIT_ARB: u32 = 0x00000001; pub const WGL_CONTEXT_COMPATIBILITY_PROFILE_BIT_ARB: u32 = 0x00000002; pub const WGL_CONTEXT_MAJOR_VERSION_ARB: u32 = 0x2091; pub const WGL_CONTEXT_MINOR_VERSION_ARB: u32 = 0x2092; pub const WGL_CONTEXT_FLAGS_ARB: u32 = 0x2094; pub const WGL_CONTEXT_ROBUST_ACCESS_BIT_ARB: u32 = 0x00000004; pub const WGL_LOSE_CONTEXT_ON_RESET_ARB: u32 = 0x8252; pub const WGL_CONTEXT_RESET_NOTIFICATION_STRATEGY_ARB: u32 = 0x8256; pub const WGL_NO_RESET_NOTIFICATION_ARB: u32 = 0x8261; pub const WGL_CONTEXT_RELEASE_BEHAVIOR_ARB: u32 = 0x2097; pub const WGL_CONTEXT_RELEASE_BEHAVIOR_NONE_ARB: u32 = 0; pub const WGL_CONTEXT_RELEASE_BEHAVIOR_FLUSH_ARB: u32 = 0x2098; pub const WGL_COLORSPACE_EXT: u32 = 0x309d; pub const WGL_COLORSPACE_SRGB_EXT: u32 = 0x3089; pub const ERROR_INVALID_VERSION_ARB: u32 = 0x2095; pub const ERROR_INVALID_PROFILE_ARB: u32 = 0x2096; pub const ERROR_INCOMPATIBLE_DEVICE_CONTEXTS_ARB: u32 = 0x2054; macro_rules! gl_loader { ( $( fn $fn:ident ( $($arg:ident : $t:ty),* ) -> $res:ty ),* ) => { mod __pfns { use super::*; $( pub static mut $fn: Option<extern "C" fn ($($arg: $t),*) -> $res> = None; )* } $( pub unsafe fn $fn($($arg: $t),*) -> $res { __pfns::$fn.unwrap()( $($arg),* ) } )* pub fn load_gl_funcs() { $( unsafe { let fn_name = concat!(stringify!($fn), '\0').as_ptr() as *const std::os::raw::c_char; let mut proc_ptr = _sapp_wglGetProcAddress.unwrap()(fn_name); if proc_ptr.is_null() { proc_ptr = GetProcAddress(_sapp_opengl32, fn_name); } assert!(proc_ptr.is_null() == false, "Load GL func {:?} failed.", stringify!($fn)); __pfns::$fn = Some(std::mem::transmute(proc_ptr)); } )* } }; } gl_loader!( fn glGetStringi(name: GLenum, index: GLuint) -> *const GLubyte, fn glGetString(name: GLenum) -> *const GLubyte, fn glFramebufferTextureLayer( target: GLenum, attachment: GLenum, texture: GLuint, level: GLint, layer: GLint ) -> (), fn glGenFramebuffers(n: GLsizei, framebuffers: *mut GLuint) -> (), fn glBindFramebuffer(target: GLenum, framebuffer: GLuint) -> (), fn glBindRenderbuffer(target: GLenum, renderbuffer: GLuint) -> (), fn glClearBufferfi(buffer: GLenum, drawbuffer: GLint, depth: GLfloat, stencil: GLint) -> (), fn glClearBufferfv(buffer: GLenum, drawbuffer: GLint, value: *const GLfloat) -> (), fn glClearBufferuiv(buffer: GLenum, drawbuffer: GLint, value: *const GLuint) -> (), fn glDeleteRenderbuffers(n: GLsizei, renderbuffers: *const GLuint) -> (), fn glUniform4fv(location: GLint, count: GLsizei, value: *const GLfloat) -> (), fn glUniform3fv(location: GLint, count: GLsizei, value: *const GLfloat) -> (), fn glUniform2fv(location: GLint, count: GLsizei, value: *const GLfloat) -> (), fn glUniform1fv(location: GLint, count: GLsizei, value: *const GLfloat) -> (), fn glUniform1iv(location: GLint, count: GLsizei, value: *const GLint) -> (), fn glUniform2iv(location: GLint, count: GLsizei, value: *const GLint) -> (), fn glUniform3iv(location: GLint, count: GLsizei, value: *const GLint) -> (), fn glUniform4iv(location: GLint, count: GLsizei, value: *const GLint) -> (), fn glUniform1i(location: GLint, v0: GLint) -> (), fn glUniform2i(location: GLint, v0: GLint, v1: GLint) -> (), fn glUniform3i(location: GLint, v0: GLint, v1: GLint, v2: GLint) -> (), fn glUniform4i(location: GLint, v0: GLint, v1: GLint, v2: GLint, v3: GLint) -> (), fn glUniform1f(location: GLint, v0: GLfloat) -> (), fn glUniform2f(location: GLint, v0: GLfloat, v1: GLfloat) -> (), fn glUniform3f(location: GLint, v0: GLfloat, v1: GLfloat, v2: GLfloat) -> (), fn glUniform4f(location: GLint, v0: GLfloat, v1: GLfloat, v2: GLfloat, v3: GLfloat) -> (), fn glUseProgram(program: GLuint) -> (), fn glShaderSource( shader: GLuint, count: GLsizei, string: *const *const GLchar, length: *const GLint ) -> (), fn glLinkProgram(program: GLuint) -> (), fn glPixelStorei(pname: GLenum, param: GLint) -> (), fn glGetUniformLocation(program: GLuint, name: *const GLchar) -> GLint, fn glGetShaderiv(shader: GLuint, pname: GLenum, params: *mut GLint) -> (), fn glGetProgramInfoLog( program: GLuint, bufSize: GLsizei, length: *mut GLsizei, infoLog: *mut GLchar ) -> (), fn glGetAttribLocation(program: GLuint, name: *const GLchar) -> GLint, fn glDisableVertexAttribArray(index: GLuint) -> (), fn glDeleteShader(shader: GLuint) -> (), fn glDeleteProgram(program: GLuint) -> (), fn glCompileShader(shader: GLuint) -> (), fn glStencilFuncSeparate(face: GLenum, func: GLenum, ref_: GLint, mask: GLuint) -> (), fn glStencilOpSeparate(face: GLenum, sfail: GLenum, dpfail: GLenum, dppass: GLenum) -> (), fn glRenderbufferStorageMultisample( target: GLenum, samples: GLsizei, internalformat: GLenum, width: GLsizei, height: GLsizei ) -> (), fn glDrawBuffers(n: GLsizei, bufs: *const GLenum) -> (), fn glVertexAttribDivisor(index: GLuint, divisor: GLuint) -> (), fn glBufferSubData( target: GLenum, offset: GLintptr, size: GLsizeiptr, data: *const ::std::os::raw::c_void ) -> (), fn glGenBuffers(n: GLsizei, buffers: *mut GLuint) -> (), fn glCheckFramebufferStatus(target: GLenum) -> GLenum, fn glFramebufferRenderbuffer( target: GLenum, attachment: GLenum, renderbuffertarget: GLenum, renderbuffer: GLuint ) -> (), fn glCompressedTexImage2D( target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, height: GLsizei, border: GLint, imageSize: GLsizei, data: *const GLvoid ) -> (), fn glCompressedTexImage3D( target: GLenum, level: GLint, internalformat: GLenum, width: GLsizei, height: GLsizei, depth: GLsizei, border: GLint, imageSize: GLsizei, data: *const GLvoid ) -> (), fn glActiveTexture(texture: GLenum) -> (), fn glTexSubImage3D( target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, zoffset: GLint, width: GLsizei, height: GLsizei, depth: GLsizei, format: GLenum, type_: GLenum, pixels: *const GLvoid ) -> (), fn glUniformMatrix2fv( location: GLint, count: GLsizei, transpose: GLboolean, value: *const GLfloat ) -> (), fn glUniformMatrix3fv( location: GLint, count: GLsizei, transpose: GLboolean, value: *const GLfloat ) -> (), fn glUniformMatrix4fv( location: GLint, count: GLsizei, transpose: GLboolean, value: *const GLfloat ) -> (), fn glRenderbufferStorage( target: GLenum, internalformat: GLenum, width: GLsizei, height: GLsizei ) -> (), fn glPolygonOffset(factor: GLfloat, units: GLfloat) -> (), fn glDrawElements(mode: GLenum, count: GLsizei, type_: GLenum, indices: *const GLvoid) -> (), fn glDeleteFramebuffers(n: GLsizei, framebuffers: *const GLuint) -> (), fn glBlendEquationSeparate(modeRGB: GLenum, modeAlpha: GLenum) -> (), fn glDeleteTextures(n: GLsizei, textures: *const GLuint) -> (), fn glGetProgramiv(program: GLuint, pname: GLenum, params: *mut GLint) -> (), fn glBindTexture(target: GLenum, texture: GLuint) -> (), fn glTexImage3D( target: GLenum, level: GLint, internalFormat: GLint, width: GLsizei, height: GLsizei, depth: GLsizei, border: GLint, format: GLenum, type_: GLenum, pixels: *const GLvoid ) -> (), fn glCreateShader(type_: GLenum) -> GLuint, fn glTexSubImage2D( target: GLenum, level: GLint, xoffset: GLint, yoffset: GLint, width: GLsizei, height: GLsizei, format: GLenum, type_: GLenum, pixels: *const GLvoid ) -> (), fn glCopyTexImage2D( target: GLenum, level: GLint, internalformat: GLenum, x: GLint, y: GLint, width: GLsizei, height: GLsizei, border: GLint ) -> (), fn glClearDepthf(d: GLfloat) -> (), fn glClearDepth(depth: GLclampd) -> (), fn glFramebufferTexture2D( target: GLenum, attachment: GLenum, textarget: GLenum, texture: GLuint, level: GLint ) -> (), fn glCreateProgram() -> GLuint, fn glViewport(x: GLint, y: GLint, width: GLsizei, height: GLsizei) -> (), fn glDeleteBuffers(n: GLsizei, buffers: *const GLuint) -> (), fn glDrawArrays(mode: GLenum, first: GLint, count: GLsizei) -> (), fn glDrawElementsInstanced( mode: GLenum, count: GLsizei, type_: GLenum, indices: *const ::std::os::raw::c_void, instancecount: GLsizei ) -> (), fn glVertexAttribPointer( index: GLuint, size: GLint, type_: GLenum, normalized: GLboolean, stride: GLsizei, pointer: *const ::std::os::raw::c_void ) -> (), fn glDisable(cap: GLenum) -> (), fn glColorMask(red: GLboolean, green: GLboolean, blue: GLboolean, alpha: GLboolean) -> (), fn glBindBuffer(target: GLenum, buffer: GLuint) -> (), fn glBindVertexArray(array: GLuint) -> (), fn glDeleteVertexArrays(n: GLsizei, arrays: *const GLuint) -> (), fn glDepthMask(flag: GLboolean) -> (), fn glDrawArraysInstanced( mode: GLenum, first: GLint, count: GLsizei, instancecount: GLsizei ) -> (), fn glClearStencil(s: GLint) -> (), fn glScissor(x: GLint, y: GLint, width: GLsizei, height: GLsizei) -> (), fn glGenRenderbuffers(n: GLsizei, renderbuffers: *mut GLuint) -> (), fn glBufferData( target: GLenum, size: GLsizeiptr, data: *const ::std::os::raw::c_void, usage: GLenum ) -> (), fn glBlendFuncSeparate( sfactorRGB: GLenum, dfactorRGB: GLenum, sfactorAlpha: GLenum, dfactorAlpha: GLenum ) -> (), fn glTexParameteri(target: GLenum, pname: GLenum, param: GLint) -> (), fn glGetIntegerv(pname: GLenum, params: *mut GLint) -> (), fn glEnable(cap: GLenum) -> (), fn glBlitFramebuffer( srcX0: GLint, srcY0: GLint, srcX1: GLint, srcY1: GLint, dstX0: GLint, dstY0: GLint, dstX1: GLint, dstY1: GLint, mask: GLbitfield, filter: GLenum ) -> (), fn glStencilMask(mask: GLuint) -> (), fn glStencilMaskSeparate(face: GLenum, mask: GLuint) -> (), fn glAttachShader(program: GLuint, shader: GLuint) -> (), fn glGetError() -> GLenum, fn glClearColor(red: GLclampf, green: GLclampf, blue: GLclampf, alpha: GLclampf) -> (), fn glBlendColor(red: GLclampf, green: GLclampf, blue: GLclampf, alpha: GLclampf) -> (), fn glTexParameterf(target: GLenum, pname: GLenum, param: GLfloat) -> (), fn glTexParameterfv(target: GLenum, pname: GLenum, params: *const GLfloat) -> (), fn glGetShaderInfoLog( shader: GLuint, bufSize: GLsizei, length: *mut GLsizei, infoLog: *mut GLchar ) -> (), fn glDepthFunc(func: GLenum) -> (), fn glStencilOp(fail: GLenum, zfail: GLenum, zpass: GLenum) -> (), fn glStencilFunc(func: GLenum, ref_: GLint, mask: GLuint) -> (), fn glEnableVertexAttribArray(index: GLuint) -> (), fn glBlendFunc(sfactor: GLenum, dfactor: GLenum) -> (), fn glReadBuffer(mode: GLenum) -> (), fn glClear(mask: GLbitfield) -> (), fn glTexImage2D( target: GLenum, level: GLint, internalFormat: GLint, width: GLsizei, height: GLsizei, border: GLint, format: GLenum, type_: GLenum, pixels: *const GLvoid ) -> (), fn glGenVertexArrays(n: GLsizei, arrays: *mut GLuint) -> (), fn glFrontFace(mode: GLenum) -> (), fn glCullFace(mode: GLenum) -> (), fn glGenTextures(n: GLsizei, textures: *mut GLuint) -> (), fn glReadPixels( x: GLint, y: GLint, width: GLsizei, height: GLsizei, format: GLenum, type_: GLenum, pixels: *mut GLvoid ) -> (), fn glBeginQuery(target: GLenum, id: GLuint) -> (), fn glDeleteQueries(n: GLsizei, ids: *const GLuint) -> (), fn glEndQuery(target: GLenum) -> (), fn glGenQueries(n: GLsizei, ids: *mut GLuint) -> (), fn glGetQueryObjectiv(id: GLuint, pname: GLenum, params: *mut GLint) -> (), fn glGetQueryObjectui64v(id: GLuint, pname: GLenum, params: *mut GLuint64) -> (), fn glFinish() -> (), fn glGenerateMipmap(target: u32) -> () );
38.412773
105
0.677466
4a9922e2a6ec4be5b3f6ad9ccb4b7c94df0bccd4
700
// move_semantics2.rs // Make me compile without changing line 13! // Execute `rustlings hint move_semantics2` for hints :) fn main() { // TODO: This doesn't work... why? // let vec0 = Vec::new().clone(); let vec0 = Vec::new(); // Vec doesn't implements the Copy trait... but we can clone it! let mut vec1 = fill_vec(vec0.clone()); // Do not change the following line! println!("{} has length {} content `{:?}`", "vec0", vec0.len(), vec0); vec1.push(88); println!("{} has length {} content `{:?}`", "vec1", vec1.len(), vec1); } fn fill_vec(vec: Vec<i32>) -> Vec<i32> { let mut vec = vec; vec.push(22); vec.push(44); vec.push(66); vec }
23.333333
74
0.578571
7164a246c4ffb672c75d848ad1010976303fd0eb
5,258
use anyhow::anyhow; use std::path::PathBuf; use aya_gen::{bindgen, write_to_file}; use crate::codegen::{Architecture, Options}; pub fn codegen(opts: &Options) -> Result<(), anyhow::Error> { codegen_internal_btf_bindings(opts)?; codegen_bindings(opts) } fn codegen_internal_btf_bindings(opts: &Options) -> Result<(), anyhow::Error> { let dir = PathBuf::from("aya"); let generated = dir.join("src/generated"); let mut bindgen = bindgen::user_builder() .clang_arg(format!( "-I{}", opts.libbpf_dir .join("include/uapi") .canonicalize() .unwrap() .to_string_lossy() )) .clang_arg(format!( "-I{}", opts.libbpf_dir .join("include") .canonicalize() .unwrap() .to_string_lossy() )) .header( opts.libbpf_dir .join("src/libbpf_internal.h") .to_string_lossy(), ) .constified_enum_module("bpf_core_relo_kind"); let types = ["bpf_core_relo", "btf_ext_header"]; for x in &types { bindgen = bindgen.allowlist_type(x); } let bindings = bindgen .generate() .map_err(|_| anyhow!("bindgen failed"))? .to_string(); // write the bindings, with the original helpers removed write_to_file(&generated.join("btf_internal_bindings.rs"), &bindings)?; Ok(()) } fn codegen_bindings(opts: &Options) -> Result<(), anyhow::Error> { let types = [ // BPF "BPF_TYPES", "bpf_cmd", "bpf_insn", "bpf_attr", "bpf_map_type", "bpf_prog_type", "bpf_attach_type", "bpf_prog_info", // BTF "btf_header", "btf_ext_info", "btf_ext_info_sec", "btf_type", "btf_enum", "btf_array", "btf_member", "btf_param", "btf_var", "btf_var_secinfo", // PERF "perf_event_attr", "perf_sw_ids", "perf_hw_id", "perf_hw_cache_id", "perf_hw_cache_op_id", "perf_hw_cache_op_result_id", "perf_event_sample_format", "perf_event_mmap_page", "perf_event_header", "perf_type_id", "perf_event_type", // NETLINK "ifinfomsg", "tcmsg", ]; let vars = [ // BPF "BPF_PSEUDO_.*", "BPF_ALU", "BPF_ALU64", "BPF_LDX", "BPF_ST", "BPF_STX", "BPF_LD", "BPF_K", "BPF_DW", "BPF_W", "BPF_H", "BPF_B", "BPF_JMP", "BPF_CALL", "SO_ATTACH_BPF", "SO_DETACH_BPF", // BTF "BTF_INT_.*", "BTF_KIND_.*", // PERF "PERF_FLAG_.*", "PERF_EVENT_.*", "PERF_MAX_.*", // see linux_wrapper.h, these are to workaround the IOC macros "AYA_PERF_EVENT_.*", // NETLINK "NLMSG_ALIGNTO", "IFLA_XDP_FD", "TCA_KIND", "TCA_OPTIONS", "TCA_BPF_FD", "TCA_BPF_NAME", "TCA_BPF_FLAGS", "TCA_BPF_FLAG_ACT_DIRECT", "XDP_FLAGS_.*", "TC_H_MAJ_MASK", "TC_H_MIN_MASK", "TC_H_UNSPEC", "TC_H_ROOT", "TC_H_INGRESS", "TC_H_CLSACT", "TC_H_MIN_PRIORITY", "TC_H_MIN_INGRESS", "TC_H_MIN_EGRESS", ]; let dir = PathBuf::from("aya"); let generated = dir.join("src/generated"); let builder = || { bindgen::user_builder() .header(dir.join("include/linux_wrapper.h").to_string_lossy()) .clang_args(&[ "-I", &*opts.libbpf_dir.join("include/uapi").to_string_lossy(), ]) .clang_args(&["-I", &*opts.libbpf_dir.join("include").to_string_lossy()]) }; for arch in Architecture::supported() { let mut bindgen = builder(); for x in &types { bindgen = bindgen.allowlist_type(x); } for x in &vars { bindgen = bindgen.allowlist_var(x).constified_enum("BTF_KIND_.*"); } // FIXME: this stuff is probably debian/ubuntu specific match arch { Architecture::X86_64 => { bindgen = bindgen.clang_args(&["-I", "/usr/include/x86_64-linux-gnu"]); } Architecture::ARMv7 => { bindgen = bindgen.clang_args(&["-I", "/usr/arm-linux-gnueabi/include"]); } Architecture::AArch64 => { bindgen = bindgen.clang_args(&["-I", "/usr/aarch64-linux-gnu/include"]); } }; for x in &types { bindgen = bindgen.allowlist_type(x); } for x in &vars { bindgen = bindgen.allowlist_var(x); } let bindings = bindgen .generate() .map_err(|_| anyhow!("bindgen failed"))? .to_string(); // write the bindings, with the original helpers removed write_to_file( &generated.join(format!("linux_bindings_{}.rs", arch)), &bindings.to_string(), )?; } Ok(()) }
26.029703
88
0.506657
ebe228a914738dbb09bcdd7c1c5cb5fe7021da74
9,391
use crate::debugger_command::DebuggerCommand; use crate::inferior::{Inferior, Status}; use crate::dwarf_data::{DwarfData, Error as DwarfError}; use rustyline::error::ReadlineError; use rustyline::Editor; use std::collections::HashMap; pub struct Debugger { target: String, history_path: String, readline: Editor<()>, inferior: Option<Inferior>, debug_data: DwarfData, breakpoints: HashMap<usize, u8> } impl Debugger { /// Initializes the debugger. pub fn new(target: &str) -> Debugger { let debug_data = match DwarfData::from_file(target) { Ok(val) => val, Err(DwarfError::ErrorOpeningFile) => { println!("Could not open file {}", target); std::process::exit(1); } Err(DwarfError::DwarfFormatError(err)) => { println!("Could not debugging symbols from {}: {:?}", target, err); std::process::exit(1); } }; debug_data.print(); let history_path = format!("{}/.deet_history", std::env::var("HOME").unwrap()); let mut readline = Editor::<()>::new(); // Attempt to load history from ~/.deet_history if it exists let _ = readline.load_history(&history_path); let breakpoints = HashMap::new(); Debugger { target: target.to_string(), history_path, readline, inferior: None, debug_data, breakpoints } } pub fn run(&mut self) { loop { match self.get_next_command() { DebuggerCommand::Run(args) => { if self.inferior.is_some() { self.inferior.as_mut().unwrap().kill(); self.inferior = None; } if let Some(inferior) = Inferior::new(&self.target, &args, &self.breakpoints) { // Create the inferior self.inferior = Some(inferior); let status = self.inferior.as_mut().unwrap().continue_run(); self.check_status(status); } else { println!("Error starting subprocess"); } } DebuggerCommand::Continue => { if self.inferior.is_some() { let status = self.inferior.as_mut().unwrap().continue_run(); self.check_status(status); } else { println!("Error no inferior running"); } } DebuggerCommand::Quit => { if self.inferior.is_some() { self.inferior.as_mut().unwrap().kill(); self.inferior = None; } return; } DebuggerCommand::Backtrace => { if self.inferior.is_some() { self.inferior.as_mut().unwrap().print_backtrace(&self.debug_data).unwrap(); } else { println!("Error no inferior running") } } DebuggerCommand::Breakpoint(location) => { let bp_addr; if location.starts_with("*") { if let Some(address) = self.parse_address(&location[1..]) { bp_addr = address; } else { println!("Invalid address"); continue; } } else if let Some(line_number) = usize::from_str_radix(&location, 10).ok() { if let Some(address) = self.debug_data.get_addr_for_line(None, line_number) { bp_addr = address; } else { println!("Invalid line number"); continue; } } else if let Some(address) = self.debug_data.get_addr_for_function(None, &location) { bp_addr = address; } else { println!("Usage: b|break|breakpoint *address|line|func"); continue; } if self.inferior.is_some() { println!("Set breakpoint {} at {:#x}", self.inferior.as_mut().unwrap().breakpoints.len(), bp_addr); self.inferior.as_mut().unwrap().set_breakpoint(bp_addr); } else { println!("Set breakpoint {} at {:#x}", self.breakpoints.len(), bp_addr); self.breakpoints.insert(bp_addr, 0); } } DebuggerCommand::Step => { if self.inferior.is_some() { self.inferior.as_mut().unwrap().step_in(&self.debug_data); } else { println!("Error no inferior running"); } } DebuggerCommand::Next => { if self.inferior.is_some() { let status = self.inferior.as_mut().unwrap().step_over(&self.debug_data); self.check_status(status); } else { println!("Error no inferior running"); } } DebuggerCommand::Finish => { if self.inferior.is_some() { let status = self.inferior.as_mut().unwrap().step_out(); self.check_status(status); } else { println!("Error no inferior running"); } } DebuggerCommand::Print(name) => { if self.inferior.is_some() { self.inferior.as_mut().unwrap().print_variable(&self.debug_data, name); } else { println!("Error no inferior running"); } } } } } fn parse_address(&self, addr: &str) -> Option<usize> { let addr_without_0x = if addr.to_lowercase().starts_with("0x") { &addr[2..] } else { &addr }; usize::from_str_radix(addr_without_0x, 16).ok() } fn check_status(&mut self, status: Result<Status, nix::Error>) { match status.unwrap() { Status::Stopped(signal, rip) => { println!("Child stopped (signal {})", signal); match self.debug_data.get_line_from_addr(rip) { Some(line) => { println!("Stopped at {}", line); self.inferior.as_mut().unwrap().print_source(&line); }, None => { println!("Stopped at {:#x}", rip) }, } }, Status::Exited(exit_code) => { println!("Child exited (status {})", exit_code); self.inferior = None; }, Status::Signaled(signal) => { println!("Child exited (signal {})", signal); self.inferior = None; }, } } /// This function prompts the user to enter a command, and continues re-prompting until the user /// enters a valid command. It uses DebuggerCommand::from_tokens to do the command parsing. /// /// You don't need to read, understand, or modify this function. fn get_next_command(&mut self) -> DebuggerCommand { loop { // Print prompt and get next line of user input match self.readline.readline("(deet) ") { Err(ReadlineError::Interrupted) => { // User pressed ctrl+c. We're going to ignore it println!("Type \"quit\" to exit"); } Err(ReadlineError::Eof) => { // User pressed ctrl+d, which is the equivalent of "quit" for our purposes return DebuggerCommand::Quit; } Err(err) => { panic!("Unexpected I/O error: {:?}", err); } Ok(line) => { if line.trim().len() == 0 { continue; } self.readline.add_history_entry(line.as_str()); if let Err(err) = self.readline.save_history(&self.history_path) { println!( "Warning: failed to save history file at {}: {}", self.history_path, err ); } let tokens: Vec<&str> = line.split_whitespace().collect(); if let Some(cmd) = DebuggerCommand::from_tokens(&tokens) { return cmd; } else { println!("Unrecognized command."); } } } } } }
40.830435
123
0.438398
d74629afa3bf10734fdbf3f837346dff1027d7d6
369
//! Tests auto-converted from "sass-spec/spec/libsass-closed-issues/issue_1419/unquoted.hrx" #[allow(unused)] fn runner() -> crate::TestRunner { super::runner() } #[test] fn test() { assert_eq!( runner().ok("foo {\ \n foo: to-upper-case(ab\\63 d);\ \n}\n"), "foo {\ \n foo: ABCD;\ \n}\n" ); }
19.421053
92
0.498645
56c6afc9accf3555133a0e43d5cd2f7ef3e6b50f
1,243
use crate::manifest::State; use crate::utils; use clap::ArgMatches; use std::path::PathBuf; /// Given a file and a script, remove the script /// from the file. pub fn rm(matches: &ArgMatches, state: &mut State) -> Result<(), Box<dyn std::error::Error>> { let target_script = PathBuf::from(matches.value_of("script").unwrap()); let target_file = PathBuf::from(matches.value_of("file").unwrap()); let specified_script = utils::relative_to_special(&utils::relative_to_script( state, &PathBuf::from(target_script), ))?; let specified_file = utils::relative_to_special(&target_file)?; // Like a birds nest. Fix? if let Some(files) = &mut state.data.files { for file in files { if file.path == specified_file { if let Some(scripts) = &mut file.scripts { scripts.retain(|e| { println!("{} == {}", e.display(), &specified_script.display()); e != &specified_script }); if scripts.len() == 0 { file.scripts = None; } } } } } state.data.populate_file(&state); Ok(()) }
31.871795
94
0.538214
1e239f0636b6117334f8275cc284e4da66a05b80
12,139
// Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License in the LICENSE-APACHE file or at: // https://www.apache.org/licenses/LICENSE-2.0 //! Gallery of all widgets //! //! This is a test-bed to demonstrate most toolkit functionality //! (excepting custom graphics). use kas::event::{Command, VoidResponse}; use kas::prelude::*; use kas::widget::*; use kas::{dir::Right, Future}; #[derive(Clone, Debug, VoidMsg)] enum Item { Button, Check(bool), Combo(i32), Radio(u32), Edit(String), Slider(i32), Scroll(i32), } #[derive(Debug)] struct Guard; impl EditGuard for Guard { type Msg = Item; fn activate(edit: &mut EditField<Self>, _: &mut Manager) -> Option<Self::Msg> { Some(Item::Edit(edit.get_string())) } fn edit(edit: &mut EditField<Self>, _: &mut Manager) -> Option<Self::Msg> { // 7a is the colour of *magic*! edit.set_error_state(edit.get_str().len() % (7 + 1) == 0); None } } #[derive(Debug, Widget)] #[widget(config=noauto)] #[layout(grid)] #[handler(handle=noauto)] struct TextEditPopup { #[widget_core] core: CoreData, #[layout_data] layout_data: <Self as kas::LayoutData>::Data, #[widget(cspan = 3)] edit: EditBox, #[widget(row = 1, col = 0)] fill: Filler, #[widget(row=1, col=1, handler = close)] cancel: TextButton<bool>, #[widget(row=1, col=2, handler = close)] save: TextButton<bool>, commit: bool, } impl TextEditPopup { fn new<S: ToString>(text: S) -> Self { TextEditPopup { core: Default::default(), layout_data: Default::default(), edit: EditBox::new(text).multi_line(true), fill: Filler::maximize(), cancel: TextButton::new_msg("&Cancel", false), save: TextButton::new_msg("&Save", true), commit: false, } } fn close(&mut self, mgr: &mut Manager, commit: bool) -> VoidResponse { self.commit = commit; mgr.send_action(TkAction::CLOSE); Response::None } } impl WidgetConfig for TextEditPopup { fn configure(&mut self, mgr: &mut Manager) { mgr.register_nav_fallback(self.id()); } } impl Handler for TextEditPopup { type Msg = VoidMsg; fn handle(&mut self, mgr: &mut Manager, event: Event) -> Response<Self::Msg> { match event { Event::Command(Command::Escape, _) => self.close(mgr, false), Event::Command(Command::Return, _) => self.close(mgr, true), _ => Response::Unhandled, } } } fn main() -> Result<(), kas_wgpu::Error> { env_logger::init(); #[cfg(feature = "stack_dst")] let theme = kas_theme::MultiTheme::builder() .add("shaded", kas_theme::ShadedTheme::new()) .add("flat", kas_theme::FlatTheme::new()) .build(); #[cfg(not(feature = "stack_dst"))] let theme = kas_theme::ShadedTheme::new(); let mut toolkit = kas_wgpu::Toolkit::new(theme)?; #[derive(Clone, Debug, VoidMsg)] enum Menu { Theme(&'static str), Colour(String), Disabled(bool), Quit, } let themes = vec![ MenuEntry::new("&Shaded", Menu::Theme("shaded")).boxed(), MenuEntry::new("&Flat", Menu::Theme("flat")).boxed(), ]; // Enumerate colour schemes. Access through the toolkit since this handles // config loading. let colours = toolkit .theme() .list_schemes() .iter() .map(|name| { let mut title = String::with_capacity(name.len() + 1); match name { &"" => title.push_str("&Default"), &"dark" => title.push_str("Dar&k"), name => { let mut iter = name.char_indices(); if let Some((_, c)) = iter.next() { title.push('&'); for c in c.to_uppercase() { title.push(c); } if let Some((i, _)) = iter.next() { title.push_str(&name[i..]); } } } } MenuEntry::new(title, Menu::Colour(name.to_string())) }) .collect(); let styles = vec![ SubMenu::right("&Colours", colours).boxed(), Separator::infer().boxed(), MenuToggle::new("&Disabled") .on_toggle(|_, state| Some(Menu::Disabled(state))) .boxed(), ]; let menubar = MenuBar::<_>::new(vec![ SubMenu::new("&App", vec![MenuEntry::new("&Quit", Menu::Quit).boxed()]), SubMenu::new("&Theme", themes), SubMenu::new("&Style", styles), ]); let popup_edit_box = make_widget! { #[layout(row)] #[handler(handle = noauto)] struct { #[widget] label: StringLabel = Label::from("Use button to edit →"), #[widget(handler = edit)] edit = TextButton::new_msg("&Edit", ()), future: Option<Future<Option<String>>> = None, } impl { fn edit(&mut self, mgr: &mut Manager, _: ()) -> VoidResponse { if self.future.is_none() { let text = self.label.get_string(); let mut window = Window::new("Edit text", TextEditPopup::new(text)); let (future, update) = window.on_drop(|w: &mut TextEditPopup| if w.commit { Some(w.edit.get_string()) } else { None }); self.future = Some(future); mgr.update_on_handle(update, self.id()); mgr.add_window(Box::new(window)); } Response::None } } impl Handler { type Msg = VoidMsg; fn handle(&mut self, mgr: &mut Manager, event: Event) -> Response<Self::Msg> { match event { Event::HandleUpdate { .. } => { // There should be no other source of this event, // so we can assume our future is finished if let Some(future) = self.future.take() { let result = future.try_finish().unwrap(); if let Some(text) = result { *mgr |= self.label.set_string(text); } } Response::None } _ => Response::Unhandled, } } } }; let radio = UpdateHandle::new(); let widgets = make_widget! { #[layout(grid)] #[handler(msg = Item)] struct { #[widget(row=0, col=0)] _ = Label::new("Label"), #[widget(row=0, col=1)] _ = Label::new("Hello world"), #[widget(row=1, col=0)] _ = Label::new("EditBox"), #[widget(row=1, col=1)] _ = EditBox::new("edit me").with_guard(Guard), #[widget(row=2, col=0)] _ = Label::new("TextButton"), #[widget(row=2, col=1)] _ = TextButton::new_msg("&Press me", Item::Button), #[widget(row=3, col=0)] _ = Label::new("CheckBox"), #[widget(row=3, col=1)] _ = CheckBox::new("&Check me") .with_state(true) .on_toggle(|_, check| Some(Item::Check(check))), #[widget(row=4, col=0)] _ = Label::new("RadioBox"), #[widget(row=4, col=1)] _ = RadioBox::new("radio box &1", radio) .on_select(|_| Some(Item::Radio(1))), #[widget(row=5, col=0)] _ = Label::new("RadioBox"), #[widget(row=5, col=1)] _ = RadioBox::new("radio box &2", radio) .with_state(true) .on_select(|_| Some(Item::Radio(2))), #[widget(row=6, col=0)] _ = Label::new("ComboBox"), #[widget(row=6, col=1)] _ = ComboBox::new(&["&One", "T&wo", "Th&ree"], 0) .on_select(|_, index| Some(Item::Combo((index + 1).cast()))), #[widget(row=7, col=0)] _ = Label::new("Slider"), #[widget(row=7, col=1, handler = handle_slider)] s = Slider::<i32, Right>::new(-2, 2, 1).with_value(0), #[widget(row=8, col=0)] _ = Label::new("ScrollBar"), #[widget(row=8, col=1, handler = handle_scroll)] sc: ScrollBar<Right> = ScrollBar::new().with_limits(100, 20), #[widget(row=9, col=1)] pg: ProgressBar<Right> = ProgressBar::new(), #[widget(row=9, col=0)] _ = Label::new("ProgressBar"), #[widget(row=10, col=0)] _ = Label::new("Image"), #[widget(row=10, col=1, align=centre)] _ = Image::new("res/rustacean-flat-happy.png").with_stretch(Stretch::Low), #[widget(row=11, col=0)] _ = Label::new("Child window"), #[widget(row=11, col=1)] _ = popup_edit_box, } impl { fn handle_slider(&mut self, _: &mut Manager, msg: i32) -> Response<Item> { Response::Msg(Item::Slider(msg)) } fn handle_scroll(&mut self, mgr: &mut Manager, msg: i32) -> Response<Item> { let ratio = msg as f32 / self.sc.max_value() as f32; *mgr |= self.pg.set_value(ratio); Response::Msg(Item::Scroll(msg)) } } }; let head = make_widget! { #[layout(row)] #[handler(msg = VoidMsg)] struct { #[widget] _ = Label::new("Widget Gallery"), #[widget] _ = Image::new("res/gallery.png"), } }; let window = Window::new( "Widget Gallery", make_widget! { #[layout(column)] #[handler(msg = VoidMsg)] struct { #[widget(handler = menu)] _ = menubar, #[widget(halign = centre)] _ = Frame::new(head), #[widget(handler = activations)] gallery: for<W: Widget<Msg = Item>> ScrollBarRegion<W> = ScrollBarRegion::new2(widgets), } impl { fn menu(&mut self, mgr: &mut Manager, msg: Menu) -> VoidResponse { match msg { Menu::Theme(name) => { println!("Theme: {:?}", name); #[cfg(not(feature = "stack_dst"))] println!("Warning: switching themes requires feature 'stack_dst'"); mgr.adjust_theme(|theme| theme.set_theme(name)); } Menu::Colour(name) => { println!("Colour scheme: {:?}", name); mgr.adjust_theme(|theme| theme.set_scheme(&name)); } Menu::Disabled(state) => { *mgr |= self.gallery.set_disabled(state); } Menu::Quit => { *mgr |= TkAction::EXIT; } } Response::None } fn activations(&mut self, _: &mut Manager, item: Item) -> VoidResponse { match item { Item::Button => println!("Clicked!"), Item::Check(b) => println!("CheckBox: {}", b), Item::Combo(c) => println!("ComboBox: {}", c), Item::Radio(id) => println!("RadioBox: {}", id), Item::Edit(s) => println!("Edited: {}", s), Item::Slider(p) => println!("Slider: {}", p), Item::Scroll(p) => println!("ScrollBar: {}", p), }; Response::None } } }, ); toolkit.add(window)?; toolkit.run() }
37.698758
95
0.476398
18a34b1567daeff13c64876dc8acbc4f6f1654ca
853
/*! Macro construction benchmarks. This is taken from [issue #28], which noted that the `bitvec![bit; rep]` expansion was horribly inefficient. This benchmark crate should be used for all macro performance recording, and compare the macros against `vec!`. While `vec!` will always be faster, because `bitvec!` does more work than `vec!`, they should at least be close. Original performance was 10,000x slower. Performance after the fix for #28 was within 20ns. [issue #28]: https://github.com/myrrlyn/bitvec/issues/28 !*/ #![feature(test)] extern crate test; use test::Bencher; use bitvec::prelude::*; #[bench] fn bitvec_init(b: &mut Bencher) { b.iter(|| bitvec![0; 16 * 16 * 9]); b.iter(|| bitvec![1; 16 * 16 * 9]); } #[bench] fn vec_init(b: &mut Bencher) { b.iter(|| vec![0u8; 16 * 16 * 9 / 8]); b.iter(|| vec![-1i8; 16 * 16 * 9 / 8]); }
25.088235
78
0.675264
d975038b010b9dabdd320349117a621a5fc8ad49
5,957
//! This module handles the relationships between "free regions", i.e., lifetime parameters. //! Ordinarily, free regions are unrelated to one another, but they can be related via implied //! or explicit bounds. In that case, we track the bounds using the `TransitiveRelation` type, //! and use that to decide when one free region outlives another, and so forth. use rustc_data_structures::transitive_relation::TransitiveRelation; use rustc_hir::def_id::DefId; use rustc_middle::ty::{self, Lift, Region, TyCtxt}; /// Combines a `region::ScopeTree` (which governs relationships between /// scopes) and a `FreeRegionMap` (which governs relationships between /// free regions) to yield a complete relation between concrete /// regions. /// /// This stuff is a bit convoluted and should be refactored, but as we /// transition to NLL, it'll all go away anyhow. pub struct RegionRelations<'a, 'tcx> { pub tcx: TyCtxt<'tcx>, /// The context used to fetch the region maps. pub context: DefId, /// Free-region relationships. pub free_regions: &'a FreeRegionMap<'tcx>, } impl<'a, 'tcx> RegionRelations<'a, 'tcx> { pub fn new(tcx: TyCtxt<'tcx>, context: DefId, free_regions: &'a FreeRegionMap<'tcx>) -> Self { Self { tcx, context, free_regions } } pub fn lub_free_regions(&self, r_a: Region<'tcx>, r_b: Region<'tcx>) -> Region<'tcx> { self.free_regions.lub_free_regions(self.tcx, r_a, r_b) } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default, HashStable)] pub struct FreeRegionMap<'tcx> { // Stores the relation `a < b`, where `a` and `b` are regions. // // Invariant: only free regions like `'x` or `'static` are stored // in this relation, not scopes. relation: TransitiveRelation<Region<'tcx>>, } impl<'tcx> FreeRegionMap<'tcx> { pub fn elements(&self) -> impl Iterator<Item = &Region<'tcx>> { self.relation.elements() } pub fn is_empty(&self) -> bool { self.relation.is_empty() } // Record that `'sup:'sub`. Or, put another way, `'sub <= 'sup`. // (with the exception that `'static: 'x` is not notable) pub fn relate_regions(&mut self, sub: Region<'tcx>, sup: Region<'tcx>) { debug!("relate_regions(sub={:?}, sup={:?})", sub, sup); if self.is_free_or_static(sub) && self.is_free(sup) { self.relation.add(sub, sup) } } /// Tests whether `r_a <= r_b`. /// /// Both regions must meet `is_free_or_static`. /// /// Subtle: one tricky case that this code gets correct is as /// follows. If we know that `r_b: 'static`, then this function /// will return true, even though we don't know anything that /// directly relates `r_a` and `r_b`. /// /// Also available through the `FreeRegionRelations` trait below. pub fn sub_free_regions( &self, tcx: TyCtxt<'tcx>, r_a: Region<'tcx>, r_b: Region<'tcx>, ) -> bool { assert!(self.is_free_or_static(r_a) && self.is_free_or_static(r_b)); let re_static = tcx.lifetimes.re_static; if self.check_relation(re_static, r_b) { // `'a <= 'static` is always true, and not stored in the // relation explicitly, so check if `'b` is `'static` (or // equivalent to it) true } else { self.check_relation(r_a, r_b) } } /// Check whether `r_a <= r_b` is found in the relation. fn check_relation(&self, r_a: Region<'tcx>, r_b: Region<'tcx>) -> bool { r_a == r_b || self.relation.contains(&r_a, &r_b) } /// True for free regions other than `'static`. pub fn is_free(&self, r: Region<'_>) -> bool { match *r { ty::ReEarlyBound(_) | ty::ReFree(_) => true, _ => false, } } /// True if `r` is a free region or static of the sort that this /// free region map can be used with. pub fn is_free_or_static(&self, r: Region<'_>) -> bool { match *r { ty::ReStatic => true, _ => self.is_free(r), } } /// Computes the least-upper-bound of two free regions. In some /// cases, this is more conservative than necessary, in order to /// avoid making arbitrary choices. See /// `TransitiveRelation::postdom_upper_bound` for more details. pub fn lub_free_regions( &self, tcx: TyCtxt<'tcx>, r_a: Region<'tcx>, r_b: Region<'tcx>, ) -> Region<'tcx> { debug!("lub_free_regions(r_a={:?}, r_b={:?})", r_a, r_b); assert!(self.is_free(r_a)); assert!(self.is_free(r_b)); let result = if r_a == r_b { r_a } else { match self.relation.postdom_upper_bound(&r_a, &r_b) { None => tcx.lifetimes.re_static, Some(r) => *r, } }; debug!("lub_free_regions(r_a={:?}, r_b={:?}) = {:?}", r_a, r_b, result); result } } /// The NLL region handling code represents free region relations in a /// slightly different way; this trait allows functions to be abstract /// over which version is in use. pub trait FreeRegionRelations<'tcx> { /// Tests whether `r_a <= r_b`. Both must be free regions or /// `'static`. fn sub_free_regions( &self, tcx: TyCtxt<'tcx>, shorter: ty::Region<'tcx>, longer: ty::Region<'tcx>, ) -> bool; } impl<'tcx> FreeRegionRelations<'tcx> for FreeRegionMap<'tcx> { fn sub_free_regions(&self, tcx: TyCtxt<'tcx>, r_a: Region<'tcx>, r_b: Region<'tcx>) -> bool { // invoke the "inherent method" self.sub_free_regions(tcx, r_a, r_b) } } impl<'a, 'tcx> Lift<'tcx> for FreeRegionMap<'a> { type Lifted = FreeRegionMap<'tcx>; fn lift_to_tcx(&self, tcx: TyCtxt<'tcx>) -> Option<FreeRegionMap<'tcx>> { self.relation.maybe_map(|&fr| tcx.lift(&fr)).map(|relation| FreeRegionMap { relation }) } }
35.670659
98
0.605674
e411f4e5aad641a28a8b4a6fd84bb21a5c695add
632
use crate::models::response::ResponseBody; use actix_web::{ HttpResponse, http::StatusCode, }; pub struct ServiceError { pub http_status: StatusCode, pub body: ResponseBody<String>, } impl ServiceError { pub fn new(http_status: StatusCode, message: String) -> ServiceError { ServiceError { http_status, body: ResponseBody { message, status: "ERROR".to_string(), data: String::new(), } } } pub fn response(&self) -> HttpResponse { HttpResponse::build(self.http_status).json(&self.body) } }
22.571429
74
0.575949
01c68ee275f585cce42b90f00b8a53a1a3df085a
333
//! The main module containing all necessary structures and traits for reading and searching //! a logfile for patterns. #[macro_use] pub mod callback; pub mod archive; pub mod config; pub mod global; pub mod logfiledef; pub mod logsource; pub mod options; pub mod pattern; pub mod script; pub mod search; pub mod tag; pub mod vars;
20.8125
92
0.765766
0ae49d8d40addc34201433019d0e15eb5b9b36d4
3,396
use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::Read; use std::path::Path; use std::slice::SliceIndex; type PolymerTemplate = String; type Rules = HashMap<(char, char), char>; #[derive(Debug, PartialEq, Clone)] struct Manual { template: PolymerTemplate, rules: Rules, } impl Manual { fn new(content: String) -> Self { let mut rules = Rules::new(); let mut split = content.split("\n\n"); let template = split.next().unwrap().to_string(); let rules_str = split.next().unwrap(); for rule in rules_str.lines() { let mut iter = rule.split(" -> "); let src: String = iter.next().unwrap().to_string().parse().unwrap(); let dest = iter.next().unwrap().to_string().parse().unwrap(); rules.insert( (src.chars().nth(0).unwrap(), src.chars().nth(1).unwrap()), dest, ); } Manual { template, rules } } fn step(&mut self) { let mut insertions = HashMap::new(); for i in 0..self.template.len() - 1 { let first = self.template[i..i + 1].chars().nth(0).unwrap(); let second = self.template[i + 1..i + 2].chars().nth(0).unwrap_or('*'); if let Some(dest) = self.rules.get(&(first, second)) { // println!("match found {} {} => {:?}", first, second, dest); // We keep the place and element that should be inserted insertions.insert(i + 1, dest); } } // insertions has positions & elemnts that should be inserted // println!("insertions {:?}", insertions); let capa = self.template.len() + insertions.len(); let mut new_template = String::with_capacity(capa); self.template.chars().enumerate().for_each(|(i, c)| { if insertions.contains_key(&i) { new_template.push(**insertions.get(&i).unwrap()); } new_template.push(c); }); self.template = new_template; } fn frequencies(&self) -> HashMap<char, u32> { let mut freq = HashMap::new(); for ch in self.template.chars() { let counter = freq.entry(ch).or_insert(0); *counter += 1; } freq } } fn main() { // Create a path to the desired file let path = Path::new("input.txt"); let display = path.display(); // Open the path in read-only mode, returns `io::Result<File>` let mut file = match File::open(&path) { Err(why) => panic!("couldn't open {}: {}", display, why), Ok(file) => file, }; // Read the file contents into a string, returns `io::Result<usize>` let mut content = String::new(); if let Err(why) = file.read_to_string(&mut content) { panic!("couldn't read {}: {}", display, why) } // Part 1 let mut manual: Manual = Manual::new(content); // println!("Part1 solution is {:?}", manual); for i in 1..=10 { manual.step(); println!("after step {:?}", i); // println!("after step {} template is {:?} ", i, manual.template); } let freq = manual.frequencies(); let max = freq.values().max().unwrap(); let min = freq.values().min().unwrap(); println!("max is {:?}, min is {:?}", max, min); println!("Part1 result is {:?}", *max - *min); }
32.342857
83
0.53828
f86af94cfec0c01a087d721ce8a4444243d70ac7
2,083
// Copyright (c) SimpleStaking, Viable Systems and Tezedge Contributors // SPDX-License-Identifier: MIT #![forbid(unsafe_code)] use rand::{distributions::Alphanumeric, prelude::*, seq::SliceRandom}; use std::iter; use tezos_encoding::binary_reader::BinaryReader; use tezos_encoding::encoding::{Encoding, Field}; use honggfuzz::fuzz; use log::debug; fn main() { loop { const ENCODING_LIFETIME: usize = 100_000; for _ in 0..ENCODING_LIFETIME { let encoding = generate_random_encoding(); fuzz!(|data: &[u8]| { if let Err(e) = BinaryReader::new().read(data, &encoding) { debug!( "BinaryReader produced error for input: {:?}\nError:\n{:?}", data, e ); } }); } } } fn generate_random_encoding() -> Encoding { // 1. start with tuple or object let is_tuple: bool = random(); if is_tuple { let mut fields = Vec::new(); for _ in 0..10 { fields.push(gen_random_member()); } Encoding::Tup(fields) } else { let mut fields = Vec::new(); for _ in 0..10 { fields.push(gen_random_field()); } Encoding::Obj("", fields) } } fn gen_random_member() -> Encoding { use Encoding::*; let mut rng = rand::thread_rng(); let encodings = [ Unit, Int8, Uint8, Int16, Uint16, Int31, Int32, Uint32, Int64, RangedInt, Z, Mutez, Float, RangedFloat, Bool, String, Bytes, Timestamp, // TODO: Add implement for complex sub-types ]; encodings.choose(&mut rng).unwrap().clone() } fn gen_random_field() -> Field { Field::new(&gen_random_name(), gen_random_member()) } fn gen_random_name() -> String { let mut rng = rand::thread_rng(); iter::repeat(()) .map(|_| rng.sample(Alphanumeric)) .take(15) .collect() }
23.670455
84
0.528565
6138da09ed588f9dce52f4e0ed29a6451a103ce2
6,094
// Copyright (c) 2016 The vulkano developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or http://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. //! Descriptor sets creation and management //! //! This module is dedicated to managing descriptor sets. There are three concepts in Vulkan //! related to descriptor sets: //! //! - A `DescriptorSetLayout` is a Vulkan object that describes to the Vulkan implementation the //! layout of a future descriptor set. When you allocate a descriptor set, you have to pass an //! instance of this object. This is represented with the `UnsafeDescriptorSetLayout` type in //! vulkano. //! - A `DescriptorPool` is a Vulkan object that holds the memory of descriptor sets and that can //! be used to allocate and free individual descriptor sets. This is represented with the //! `UnsafeDescriptorPool` type in vulkano. //! - A `DescriptorSet` contains the bindings to resources and is allocated from a pool. This is //! represented with the `UnsafeDescriptorSet` type in vulkano. //! //! In addition to this, vulkano defines the following: //! //! - The `DescriptorPool` trait can be implemented on types from which you can allocate and free //! descriptor sets. However it is different from Vulkan descriptor pools in the sense that an //! implementation of the `DescriptorPool` trait can manage multiple Vulkan descriptor pools. //! - The `StdDescriptorPool` type is a default implementation of the `DescriptorPool` trait. //! - The `DescriptorSet` trait is implemented on types that wrap around Vulkan descriptor sets in //! a safe way. A Vulkan descriptor set is inherently unsafe, so we need safe wrappers around //! them. //! - The `SimpleDescriptorSet` type is a default implementation of the `DescriptorSet` trait. //! - The `DescriptorSetsCollection` trait is implemented on collections of types that implement //! `DescriptorSet`. It is what you pass to the draw functions. use SafeDeref; use buffer::BufferAccess; use descriptor::descriptor::DescriptorDesc; use image::ImageViewAccess; pub use self::collection::DescriptorSetsCollection; pub use self::fixed_size_pool::FixedSizeDescriptorSet; pub use self::fixed_size_pool::FixedSizeDescriptorSetBuilder; pub use self::fixed_size_pool::FixedSizeDescriptorSetBuilderArray; pub use self::fixed_size_pool::FixedSizeDescriptorSetsPool; pub use self::persistent::PersistentDescriptorSet; pub use self::persistent::PersistentDescriptorSetBuf; pub use self::persistent::PersistentDescriptorSetBufView; pub use self::persistent::PersistentDescriptorSetBuildError; pub use self::persistent::PersistentDescriptorSetBuilder; pub use self::persistent::PersistentDescriptorSetBuilderArray; pub use self::persistent::PersistentDescriptorSetError; pub use self::persistent::PersistentDescriptorSetImg; pub use self::persistent::PersistentDescriptorSetSampler; pub use self::std_pool::StdDescriptorPool; pub use self::std_pool::StdDescriptorPoolAlloc; pub use self::sys::DescriptorPool; pub use self::sys::DescriptorPoolAlloc; pub use self::sys::DescriptorPoolAllocError; pub use self::sys::DescriptorWrite; pub use self::sys::DescriptorsCount; pub use self::sys::UnsafeDescriptorPool; pub use self::sys::UnsafeDescriptorPoolAllocIter; pub use self::sys::UnsafeDescriptorSet; pub use self::unsafe_layout::UnsafeDescriptorSetLayout; pub mod collection; mod fixed_size_pool; mod persistent; mod std_pool; mod sys; mod unsafe_layout; /// Trait for objects that contain a collection of resources that will be accessible by shaders. /// /// Objects of this type can be passed when submitting a draw command. pub unsafe trait DescriptorSet: DescriptorSetDesc { /// Returns the inner `UnsafeDescriptorSet`. fn inner(&self) -> &UnsafeDescriptorSet; /// Returns the number of buffers within this descriptor set. fn num_buffers(&self) -> usize; /// Returns the `index`th buffer of this descriptor set, or `None` if out of range. Also /// returns the index of the descriptor that uses this buffer. /// /// The valid range is between 0 and `num_buffers()`. fn buffer(&self, index: usize) -> Option<(&dyn BufferAccess, u32)>; /// Returns the number of images within this descriptor set. fn num_images(&self) -> usize; /// Returns the `index`th image of this descriptor set, or `None` if out of range. Also returns /// the index of the descriptor that uses this image. /// /// The valid range is between 0 and `num_images()`. fn image(&self, index: usize) -> Option<(&dyn ImageViewAccess, u32)>; } unsafe impl<T> DescriptorSet for T where T: SafeDeref, T::Target: DescriptorSet { #[inline] fn inner(&self) -> &UnsafeDescriptorSet { (**self).inner() } #[inline] fn num_buffers(&self) -> usize { (**self).num_buffers() } #[inline] fn buffer(&self, index: usize) -> Option<(&dyn BufferAccess, u32)> { (**self).buffer(index) } #[inline] fn num_images(&self) -> usize { (**self).num_images() } #[inline] fn image(&self, index: usize) -> Option<(&dyn ImageViewAccess, u32)> { (**self).image(index) } } /// Trait for objects that describe the layout of the descriptors of a set. pub unsafe trait DescriptorSetDesc { /// Returns the number of binding slots in the set. fn num_bindings(&self) -> usize; /// Returns a description of a descriptor, or `None` if out of range. fn descriptor(&self, binding: usize) -> Option<DescriptorDesc>; } unsafe impl<T> DescriptorSetDesc for T where T: SafeDeref, T::Target: DescriptorSetDesc { #[inline] fn num_bindings(&self) -> usize { (**self).num_bindings() } #[inline] fn descriptor(&self, binding: usize) -> Option<DescriptorDesc> { (**self).descriptor(binding) } }
39.064103
99
0.726124
fed2a2d8fc141438c63683a04f30ed3b3bee4b64
535
use capnp_rpc::{rpc_twoparty_capnp, twoparty, RpcSystem}; use tokio_io::{AsyncRead, AsyncWrite}; pub fn new_rpc_system<Stream>( stream: Stream, bootstrap: Option<::capnp::capability::Client>, ) -> RpcSystem<twoparty::VatId> where Stream: AsyncRead + AsyncWrite + 'static, { let (reader, writer) = stream.split(); let network = Box::new(twoparty::VatNetwork::new( reader, writer, rpc_twoparty_capnp::Side::Client, Default::default(), )); RpcSystem::new(network, bootstrap) }
26.75
57
0.659813
9cfbe361891cd7ee634847b1ee5eb4f8bc456f5f
207
fn main() { use std::env; let key = "HOME"; match env::var(key) { Ok(val) => println!("{}: {:?}", key, val), Err(e) => println!("couldn't interpret {}: {}", key, e), } }
20.7
64
0.425121
8f29119e047f31487736e05f590388c353c56b68
476
// Take a look at the license at the top of the repository in the LICENSE file. use crate::{DNDEvent, EventType}; use std::fmt; define_event! { DNDEvent, ffi::GdkDNDEvent, &[EventType::DragEnter, EventType::DragLeave, EventType::DragMotion, EventType::DropStart] } impl fmt::Debug for DNDEvent { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("DNDEvent") .field("drop", &self.drop()) .finish() } }
25.052632
94
0.634454
f4d6846427246f6c5fd3e8c599ade303c441538d
375
use bytes::BytesMut; #[test] fn bytes_memory() { let mut buffer = BytesMut::new(); buffer.extend_from_slice("hello, world".as_bytes()); let len = buffer.len(); assert_eq!(buffer.capacity(), len); { let _ = buffer.split(); buffer.extend_from_slice("hello, world".as_bytes()); } println!("buffer capacity:{}", buffer.capacity()); }
25
60
0.613333
bfe956794fafb942a37611d2608d5db899d5b0d0
1,760
use rand::{self, Rng}; use crossover::Crossover; #[derive(Copy, Clone)] pub struct PrecedencePreservative; impl PrecedencePreservative { pub fn new() -> Self { PrecedencePreservative } } impl<T> Crossover<T> for PrecedencePreservative where T: Clone + PartialEq { fn parents(&self) -> usize { 2 } fn children(&self) -> usize { 1 } fn cross<U>(&self, parents: &[U]) -> Vec<Vec<T>> where U: AsRef<[T]> { let mut rng = rand::thread_rng(); let (parent1, parent2) = (parents[0].as_ref(), parents[1].as_ref()); let length = parent2.len(); let (mut genes1, mut genes2) = (parent1.into(), parent2.into()); let mut child = Vec::with_capacity(length); for _ in 0..length { if rng.gen_range(0, 2) == 0 { precedence_preservative(&mut genes1, &mut genes2, &mut child); } else { precedence_preservative(&mut genes2, &mut genes1, &mut child); } } vec![child] } } fn precedence_preservative<T>(p0: &mut Vec<T>, p1: &mut Vec<T>, c: &mut Vec<T>) where T: Clone + PartialEq { let gene = p0[0].clone(); p0.retain(|g| *g != gene); p1.retain(|g| *g != gene); c.push(gene); } #[cfg(test)] mod tests { test_crossover_panic!(precedence_preservative_different_length, i32, PrecedencePreservative::new(), parent(8, 4, 7, 3, 6, 2, 5, 1), parent(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)); bench_crossover!(precedence_preservative_bench, i32, PrecedencePreservative::new(), parent(8, 4, 7, 3, 6, 2, 5, 1, 9, 0), parent(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)); }
28.387097
87
0.535795
d9893b0ef8eda8a4c9857c35ff1ea4916606a0eb
1,737
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license. use crate::diagnostics::Diagnostics; use crate::program_state::ProgramState; use crate::source_maps::get_orig_position; use crate::source_maps::CachedMaps; use deno_core::error::AnyError; use deno_core::serde_json; use deno_core::serde_json::json; use deno_core::serde_json::Value; use deno_core::OpState; use deno_core::ZeroCopyBuf; use serde::Deserialize; use std::collections::HashMap; use std::sync::Arc; pub fn init(rt: &mut deno_core::JsRuntime) { super::reg_json_sync(rt, "op_apply_source_map", op_apply_source_map); super::reg_json_sync(rt, "op_format_diagnostic", op_format_diagnostic); } #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct ApplySourceMap { file_name: String, line_number: i32, column_number: i32, } fn op_apply_source_map( state: &mut OpState, args: Value, _zero_copy: &mut [ZeroCopyBuf], ) -> Result<Value, AnyError> { let args: ApplySourceMap = serde_json::from_value(args)?; let mut mappings_map: CachedMaps = HashMap::new(); let program_state = state.borrow::<Arc<ProgramState>>().clone(); let (orig_file_name, orig_line_number, orig_column_number) = get_orig_position( args.file_name, args.line_number.into(), args.column_number.into(), &mut mappings_map, program_state, ); Ok(json!({ "fileName": orig_file_name, "lineNumber": orig_line_number as u32, "columnNumber": orig_column_number as u32, })) } fn op_format_diagnostic( _state: &mut OpState, args: Value, _zero_copy: &mut [ZeroCopyBuf], ) -> Result<Value, AnyError> { let diagnostic: Diagnostics = serde_json::from_value(args)?; Ok(json!(diagnostic.to_string())) }
27.140625
74
0.727691
71dc02922861370311a28f105022699eca6c1b11
770
// SPDX-License-Identifier: MIT OR Apache-2.0 // // Copyright (c) 2018-2020 Andre Richter <[email protected]> //! Memory Management. use core::ops::Range; //-------------------------------------------------------------------------------------------------- // Public Code //-------------------------------------------------------------------------------------------------- /// Zero out a memory region. /// /// # Safety /// /// - `range.start` and `range.end` must be valid. /// - `range.start` and `range.end` must be `T` aligned. pub unsafe fn zero_volatile<T>(range: Range<*mut T>) where T: From<u8>, { let mut ptr = range.start; while ptr < range.end { core::ptr::write_volatile(ptr, T::from(0)); ptr = ptr.offset(1); } }
25.666667
100
0.451948
d5a6b36f1fcf90329f69cf7d16726be3f1db3f34
7,207
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-android: FIXME(#10381) // min-lldb-version: 310 // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:run // gdb-command:print a // gdb-check:$1 = 1 // gdb-command:print b // gdb-check:$2 = false // gdb-command:print c // gdb-check:$3 = 2 // gdb-command:print d // gdb-check:$4 = 3 // gdb-command:print e // gdb-check:$5 = 4 // gdb-command:print f // gdb-check:$6 = 5 // gdb-command:print g // gdb-check:$7 = {6, 7} // gdb-command:print h // gdb-check:$8 = 8 // gdb-command:print i // gdb-check:$9 = {a = 9, b = 10} // gdb-command:print j // gdb-check:$10 = 11 // gdb-command:print k // gdb-check:$11 = 12 // gdb-command:print l // gdb-check:$12 = 13 // gdb-command:print m // gdb-check:$13 = 14 // gdb-command:print n // gdb-check:$14 = 16 // gdb-command:print o // gdb-check:$15 = 18 // gdb-command:print p // gdb-check:$16 = 19 // gdb-command:print q // gdb-check:$17 = 20 // gdb-command:print r // gdb-check:$18 = {a = 21, b = 22} // gdb-command:print s // gdb-check:$19 = 24 // gdb-command:print t // gdb-check:$20 = 23 // gdb-command:print u // gdb-check:$21 = 25 // gdb-command:print v // gdb-check:$22 = 26 // gdb-command:print w // gdb-check:$23 = 27 // gdb-command:print x // gdb-check:$24 = 28 // gdb-command:print y // gdb-check:$25 = 29 // gdb-command:print z // gdb-check:$26 = 30 // gdb-command:print ae // gdb-check:$27 = 31 // gdb-command:print oe // gdb-check:$28 = 32 // gdb-command:print ue // gdb-check:$29 = 33 // gdb-command:print aa // gdb-check:$30 = {34, 35} // gdb-command:print bb // gdb-check:$31 = {36, 37} // gdb-command:print cc // gdb-check:$32 = 38 // gdb-command:print dd // gdb-check:$33 = {40, 41, 42} // gdb-command:print *ee // gdb-check:$34 = {43, 44, 45} // gdb-command:print *ff // gdb-check:$35 = 46 // gdb-command:print gg // gdb-check:$36 = {47, 48} // gdb-command:print *hh // gdb-check:$37 = 50 // gdb-command:print ii // gdb-check:$38 = 51 // gdb-command:print *jj // gdb-check:$39 = 52 // gdb-command:print kk // gdb-check:$40 = 53 // gdb-command:print ll // gdb-check:$41 = 54 // gdb-command:print mm // gdb-check:$42 = 55 // gdb-command:print *nn // gdb-check:$43 = 56 // === LLDB TESTS ================================================================================== // lldb-command:run // lldb-command:print a // lldb-check:[...]$0 = 1 // lldb-command:print b // lldb-check:[...]$1 = false // lldb-command:print c // lldb-check:[...]$2 = 2 // lldb-command:print d // lldb-check:[...]$3 = 3 // lldb-command:print e // lldb-check:[...]$4 = 4 // lldb-command:print f // lldb-check:[...]$5 = 5 // lldb-command:print g // lldb-check:[...]$6 = (6, 7) // lldb-command:print h // lldb-check:[...]$7 = 8 // lldb-command:print i // lldb-check:[...]$8 = Struct { a: 9, b: 10 } // lldb-command:print j // lldb-check:[...]$9 = 11 // lldb-command:print k // lldb-check:[...]$10 = 12 // lldb-command:print l // lldb-check:[...]$11 = 13 // lldb-command:print m // lldb-check:[...]$12 = 14 // lldb-command:print n // lldb-check:[...]$13 = 16 // lldb-command:print o // lldb-check:[...]$14 = 18 // lldb-command:print p // lldb-check:[...]$15 = 19 // lldb-command:print q // lldb-check:[...]$16 = 20 // lldb-command:print r // lldb-check:[...]$17 = Struct { a: 21, b: 22 } // lldb-command:print s // lldb-check:[...]$18 = 24 // lldb-command:print t // lldb-check:[...]$19 = 23 // lldb-command:print u // lldb-check:[...]$20 = 25 // lldb-command:print v // lldb-check:[...]$21 = 26 // lldb-command:print w // lldb-check:[...]$22 = 27 // lldb-command:print x // lldb-check:[...]$23 = 28 // lldb-command:print y // lldb-check:[...]$24 = 29 // lldb-command:print z // lldb-check:[...]$25 = 30 // lldb-command:print ae // lldb-check:[...]$26 = 31 // lldb-command:print oe // lldb-check:[...]$27 = 32 // lldb-command:print ue // lldb-check:[...]$28 = 33 // lldb-command:print aa // lldb-check:[...]$29 = (34, 35) // lldb-command:print bb // lldb-check:[...]$30 = (36, 37) // lldb-command:print cc // lldb-check:[...]$31 = 38 // lldb-command:print dd // lldb-check:[...]$32 = (40, 41, 42) // lldb-command:print *ee // lldb-check:[...]$33 = (43, 44, 45) // lldb-command:print *ff // lldb-check:[...]$34 = 46 // lldb-command:print gg // lldb-check:[...]$35 = (47, 48) // lldb-command:print *hh // lldb-check:[...]$36 = 50 // lldb-command:print ii // lldb-check:[...]$37 = 51 // lldb-command:print *jj // lldb-check:[...]$38 = 52 // lldb-command:print kk // lldb-check:[...]$39 = 53 // lldb-command:print ll // lldb-check:[...]$40 = 54 // lldb-command:print mm // lldb-check:[...]$41 = 55 // lldb-command:print *nn // lldb-check:[...]$42 = 56 #![allow(unused_variables)] #![omit_gdb_pretty_printer_section] use self::Univariant::Unit; struct Struct { a: i64, b: i32 } enum Univariant { Unit(i32) } struct TupleStruct (f64, int); fn main() { // simple tuple let (a, b) : (int, bool) = (1, false); // nested tuple let (c, (d, e)) : (int, (u16, u16)) = (2, (3, 4)); // bind tuple-typed value to one name (destructure only first level) let (f, g) : (int, (u32, u32)) = (5, (6, 7)); // struct as tuple element let (h, i, j) : (i16, Struct, i16) = (8, Struct { a: 9, b: 10 }, 11); // struct pattern let Struct { a: k, b: l } = Struct { a: 12, b: 13 }; // ignored tuple element let (m, _, n) = (14i, 15i, 16i); // ignored struct field let Struct { b: o, .. } = Struct { a: 17, b: 18 }; // one struct destructured, one not let (Struct { a: p, b: q }, r) = (Struct { a: 19, b: 20 }, Struct { a: 21, b: 22 }); // different order of struct fields let Struct { b: s, a: t } = Struct { a: 23, b: 24 }; // complex nesting let ((u, v), ((w, (x, Struct { a: y, b: z})), Struct { a: ae, b: oe }), ue) = ((25i, 26i), ((27i, (28i, Struct { a: 29, b: 30})), Struct { a: 31, b: 32 }), 33i); // reference let &aa = &(34i, 35i); // reference let &bb = &(36i, 37i); // contained reference let (&cc, _) = (&38i, 39i); // unique pointer let box dd = box() (40i, 41i, 42i); // ref binding let ref ee = (43i, 44i, 45i); // ref binding in tuple let (ref ff, gg) = (46i, (47i, 48i)); // ref binding in struct let Struct { b: ref hh, .. } = Struct { a: 49, b: 50 }; // univariant enum let Unit(ii) = Unit(51); // univariant enum with ref binding let &Unit(ref jj) = &Unit(52); // tuple struct let &TupleStruct(kk, ll) = &TupleStruct(53.0, 54); // tuple struct with ref binding let &TupleStruct(mm, ref nn) = &TupleStruct(55.0, 56); zzz(); // #break } fn zzz() { () }
21.707831
100
0.561954
d96f055fe324709d49ca8295f64d472e7a3a4033
759
use std::vec::Vec; use std::collections::HashSet; use std::hash::Hash; use std::cmp::Eq; fn main() { let mut sample_elements = vec![0u8, 0, 1, 1, 2, 3, 2]; println!("Before removal of duplicates : {:?}", sample_elements); remove_duplicate_elements(&mut sample_elements); println!("After removal of duplicates : {:?}", sample_elements); } fn remove_duplicate_elements<T: Hash + Eq>(elements: &mut Vec<T>) { let set: HashSet<_> = elements.drain(..).collect(); elements.extend(set.into_iter()); } #[test] fn test_remove_duplicate_elements() { let mut sample_elements = vec![0u8, 0, 1, 1, 2, 3, 2]; remove_duplicate_elements(&mut sample_elements); sample_elements.sort(); assert_eq!(sample_elements, vec![0, 1, 2, 3]) }
30.36
69
0.667984
2219353b055ad9dbe069c26e9af0e28155003434
38,197
#![stable(feature = "duration_core", since = "1.25.0")] //! Temporal quantification. //! //! Example: //! //! ``` //! use std::time::Duration; //! //! let five_seconds = Duration::new(5, 0); //! // both declarations are equivalent //! assert_eq!(Duration::new(5, 0), Duration::from_secs(5)); //! ``` use crate::fmt; use crate::iter::Sum; use crate::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Sub, SubAssign}; const NANOS_PER_SEC: u32 = 1_000_000_000; const NANOS_PER_MILLI: u32 = 1_000_000; const NANOS_PER_MICRO: u32 = 1_000; const MILLIS_PER_SEC: u64 = 1_000; const MICROS_PER_SEC: u64 = 1_000_000; /// A `Duration` type to represent a span of time, typically used for system /// timeouts. /// /// Each `Duration` is composed of a whole number of seconds and a fractional part /// represented in nanoseconds. If the underlying system does not support /// nanosecond-level precision, APIs binding a system timeout will typically round up /// the number of nanoseconds. /// /// [`Duration`]s implement many common traits, including [`Add`], [`Sub`], and other /// [`ops`] traits. It implements [`Default`] by returning a zero-length `Duration`. /// /// [`ops`]: crate::ops /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let five_seconds = Duration::new(5, 0); /// let five_seconds_and_five_nanos = five_seconds + Duration::new(0, 5); /// /// assert_eq!(five_seconds_and_five_nanos.as_secs(), 5); /// assert_eq!(five_seconds_and_five_nanos.subsec_nanos(), 5); /// /// let ten_millis = Duration::from_millis(10); /// ``` /// /// # Formatting `Duration` values /// /// `Duration` intentionally does not have a `Display` impl, as there are a /// variety of ways to format spans of time for human readability. `Duration` /// provides a `Debug` impl that shows the full precision of the value. /// /// The `Debug` output uses the non-ASCII "µs" suffix for microseconds. If your /// program output may appear in contexts that cannot rely on full Unicode /// compatibility, you may wish to format `Duration` objects yourself or use a /// crate to do so. #[stable(feature = "duration", since = "1.3.0")] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] pub struct Duration { secs: u64, nanos: u32, // Always 0 <= nanos < NANOS_PER_SEC } impl Duration { /// The duration of one second. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::SECOND, Duration::from_secs(1)); /// ``` #[unstable(feature = "duration_constants", issue = "57391")] pub const SECOND: Duration = Duration::from_secs(1); /// The duration of one millisecond. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::MILLISECOND, Duration::from_millis(1)); /// ``` #[unstable(feature = "duration_constants", issue = "57391")] pub const MILLISECOND: Duration = Duration::from_millis(1); /// The duration of one microsecond. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::MICROSECOND, Duration::from_micros(1)); /// ``` #[unstable(feature = "duration_constants", issue = "57391")] pub const MICROSECOND: Duration = Duration::from_micros(1); /// The duration of one nanosecond. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::NANOSECOND, Duration::from_nanos(1)); /// ``` #[unstable(feature = "duration_constants", issue = "57391")] pub const NANOSECOND: Duration = Duration::from_nanos(1); /// A duration of zero time. /// /// # Examples /// /// ``` /// #![feature(duration_zero)] /// use std::time::Duration; /// /// let duration = Duration::ZERO; /// assert!(duration.is_zero()); /// assert_eq!(duration.as_nanos(), 0); /// ``` #[unstable(feature = "duration_zero", issue = "73544")] pub const ZERO: Duration = Duration::from_nanos(0); /// The maximum duration. /// /// It is roughly equal to a duration of 584,942,417,355 years. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::MAX, Duration::new(u64::MAX, 1_000_000_000 - 1)); /// ``` #[unstable(feature = "duration_constants", issue = "57391")] pub const MAX: Duration = Duration::new(u64::MAX, NANOS_PER_SEC - 1); /// Creates a new `Duration` from the specified number of whole seconds and /// additional nanoseconds. /// /// If the number of nanoseconds is greater than 1 billion (the number of /// nanoseconds in a second), then it will carry over into the seconds provided. /// /// # Panics /// /// This constructor will panic if the carry from the nanoseconds overflows /// the seconds counter. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let five_seconds = Duration::new(5, 0); /// ``` #[stable(feature = "duration", since = "1.3.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn new(secs: u64, nanos: u32) -> Duration { let secs = match secs.checked_add((nanos / NANOS_PER_SEC) as u64) { Some(secs) => secs, None => panic!("overflow in Duration::new"), }; let nanos = nanos % NANOS_PER_SEC; Duration { secs, nanos } } /// Creates a new `Duration` from the specified number of whole seconds. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_secs(5); /// /// assert_eq!(5, duration.as_secs()); /// assert_eq!(0, duration.subsec_nanos()); /// ``` #[stable(feature = "duration", since = "1.3.0")] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] pub const fn from_secs(secs: u64) -> Duration { Duration { secs, nanos: 0 } } /// Creates a new `Duration` from the specified number of milliseconds. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_millis(2569); /// /// assert_eq!(2, duration.as_secs()); /// assert_eq!(569_000_000, duration.subsec_nanos()); /// ``` #[stable(feature = "duration", since = "1.3.0")] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] pub const fn from_millis(millis: u64) -> Duration { Duration { secs: millis / MILLIS_PER_SEC, nanos: ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI, } } /// Creates a new `Duration` from the specified number of microseconds. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_micros(1_000_002); /// /// assert_eq!(1, duration.as_secs()); /// assert_eq!(2000, duration.subsec_nanos()); /// ``` #[stable(feature = "duration_from_micros", since = "1.27.0")] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] pub const fn from_micros(micros: u64) -> Duration { Duration { secs: micros / MICROS_PER_SEC, nanos: ((micros % MICROS_PER_SEC) as u32) * NANOS_PER_MICRO, } } /// Creates a new `Duration` from the specified number of nanoseconds. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_nanos(1_000_000_123); /// /// assert_eq!(1, duration.as_secs()); /// assert_eq!(123, duration.subsec_nanos()); /// ``` #[stable(feature = "duration_extras", since = "1.27.0")] #[inline] #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")] pub const fn from_nanos(nanos: u64) -> Duration { Duration { secs: nanos / (NANOS_PER_SEC as u64), nanos: (nanos % (NANOS_PER_SEC as u64)) as u32, } } /// Returns true if this `Duration` spans no time. /// /// # Examples /// /// ``` /// #![feature(duration_zero)] /// use std::time::Duration; /// /// assert!(Duration::ZERO.is_zero()); /// assert!(Duration::new(0, 0).is_zero()); /// assert!(Duration::from_nanos(0).is_zero()); /// assert!(Duration::from_secs(0).is_zero()); /// /// assert!(!Duration::new(1, 1).is_zero()); /// assert!(!Duration::from_nanos(1).is_zero()); /// assert!(!Duration::from_secs(1).is_zero()); /// ``` #[unstable(feature = "duration_zero", issue = "73544")] #[inline] pub const fn is_zero(&self) -> bool { self.secs == 0 && self.nanos == 0 } /// Returns the number of _whole_ seconds contained by this `Duration`. /// /// The returned value does not include the fractional (nanosecond) part of the /// duration, which can be obtained using [`subsec_nanos`]. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::new(5, 730023852); /// assert_eq!(duration.as_secs(), 5); /// ``` /// /// To determine the total number of seconds represented by the `Duration`, /// use `as_secs` in combination with [`subsec_nanos`]: /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::new(5, 730023852); /// /// assert_eq!(5.730023852, /// duration.as_secs() as f64 /// + duration.subsec_nanos() as f64 * 1e-9); /// ``` /// /// [`subsec_nanos`]: Duration::subsec_nanos #[stable(feature = "duration", since = "1.3.0")] #[rustc_const_stable(feature = "duration", since = "1.32.0")] #[inline] pub const fn as_secs(&self) -> u64 { self.secs } /// Returns the fractional part of this `Duration`, in whole milliseconds. /// /// This method does **not** return the length of the duration when /// represented by milliseconds. The returned number always represents a /// fractional portion of a second (i.e., it is less than one thousand). /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_millis(5432); /// assert_eq!(duration.as_secs(), 5); /// assert_eq!(duration.subsec_millis(), 432); /// ``` #[stable(feature = "duration_extras", since = "1.27.0")] #[rustc_const_stable(feature = "duration_extras", since = "1.32.0")] #[inline] pub const fn subsec_millis(&self) -> u32 { self.nanos / NANOS_PER_MILLI } /// Returns the fractional part of this `Duration`, in whole microseconds. /// /// This method does **not** return the length of the duration when /// represented by microseconds. The returned number always represents a /// fractional portion of a second (i.e., it is less than one million). /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_micros(1_234_567); /// assert_eq!(duration.as_secs(), 1); /// assert_eq!(duration.subsec_micros(), 234_567); /// ``` #[stable(feature = "duration_extras", since = "1.27.0")] #[rustc_const_stable(feature = "duration_extras", since = "1.32.0")] #[inline] pub const fn subsec_micros(&self) -> u32 { self.nanos / NANOS_PER_MICRO } /// Returns the fractional part of this `Duration`, in nanoseconds. /// /// This method does **not** return the length of the duration when /// represented by nanoseconds. The returned number always represents a /// fractional portion of a second (i.e., it is less than one billion). /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::from_millis(5010); /// assert_eq!(duration.as_secs(), 5); /// assert_eq!(duration.subsec_nanos(), 10_000_000); /// ``` #[stable(feature = "duration", since = "1.3.0")] #[rustc_const_stable(feature = "duration", since = "1.32.0")] #[inline] pub const fn subsec_nanos(&self) -> u32 { self.nanos } /// Returns the total number of whole milliseconds contained by this `Duration`. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::new(5, 730023852); /// assert_eq!(duration.as_millis(), 5730); /// ``` #[stable(feature = "duration_as_u128", since = "1.33.0")] #[rustc_const_stable(feature = "duration_as_u128", since = "1.33.0")] #[inline] pub const fn as_millis(&self) -> u128 { self.secs as u128 * MILLIS_PER_SEC as u128 + (self.nanos / NANOS_PER_MILLI) as u128 } /// Returns the total number of whole microseconds contained by this `Duration`. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::new(5, 730023852); /// assert_eq!(duration.as_micros(), 5730023); /// ``` #[stable(feature = "duration_as_u128", since = "1.33.0")] #[rustc_const_stable(feature = "duration_as_u128", since = "1.33.0")] #[inline] pub const fn as_micros(&self) -> u128 { self.secs as u128 * MICROS_PER_SEC as u128 + (self.nanos / NANOS_PER_MICRO) as u128 } /// Returns the total number of nanoseconds contained by this `Duration`. /// /// # Examples /// /// ``` /// use std::time::Duration; /// /// let duration = Duration::new(5, 730023852); /// assert_eq!(duration.as_nanos(), 5730023852); /// ``` #[stable(feature = "duration_as_u128", since = "1.33.0")] #[rustc_const_stable(feature = "duration_as_u128", since = "1.33.0")] #[inline] pub const fn as_nanos(&self) -> u128 { self.secs as u128 * NANOS_PER_SEC as u128 + self.nanos as u128 } /// Checked `Duration` addition. Computes `self + other`, returning [`None`] /// if overflow occurred. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)), Some(Duration::new(0, 1))); /// assert_eq!(Duration::new(1, 0).checked_add(Duration::new(u64::MAX, 0)), None); /// ``` #[stable(feature = "duration_checked_ops", since = "1.16.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn checked_add(self, rhs: Duration) -> Option<Duration> { if let Some(mut secs) = self.secs.checked_add(rhs.secs) { let mut nanos = self.nanos + rhs.nanos; if nanos >= NANOS_PER_SEC { nanos -= NANOS_PER_SEC; if let Some(new_secs) = secs.checked_add(1) { secs = new_secs; } else { return None; } } debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs, nanos }) } else { None } } /// Saturating `Duration` addition. Computes `self + other`, returning [`Duration::MAX`] /// if overflow occurred. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 0).saturating_add(Duration::new(0, 1)), Duration::new(0, 1)); /// assert_eq!(Duration::new(1, 0).saturating_add(Duration::new(u64::MAX, 0)), Duration::MAX); /// ``` #[stable(feature = "duration_saturating_ops", since = "1.53.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn saturating_add(self, rhs: Duration) -> Duration { match self.checked_add(rhs) { Some(res) => res, None => Duration::MAX, } } /// Checked `Duration` subtraction. Computes `self - other`, returning [`None`] /// if the result would be negative or if overflow occurred. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 1).checked_sub(Duration::new(0, 0)), Some(Duration::new(0, 1))); /// assert_eq!(Duration::new(0, 0).checked_sub(Duration::new(0, 1)), None); /// ``` #[stable(feature = "duration_checked_ops", since = "1.16.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn checked_sub(self, rhs: Duration) -> Option<Duration> { if let Some(mut secs) = self.secs.checked_sub(rhs.secs) { let nanos = if self.nanos >= rhs.nanos { self.nanos - rhs.nanos } else { if let Some(sub_secs) = secs.checked_sub(1) { secs = sub_secs; self.nanos + NANOS_PER_SEC - rhs.nanos } else { return None; } }; debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs, nanos }) } else { None } } /// Saturating `Duration` subtraction. Computes `self - other`, returning [`Duration::ZERO`] /// if the result would be negative or if overflow occurred. /// /// # Examples /// /// ``` /// #![feature(duration_zero)] /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 1).saturating_sub(Duration::new(0, 0)), Duration::new(0, 1)); /// assert_eq!(Duration::new(0, 0).saturating_sub(Duration::new(0, 1)), Duration::ZERO); /// ``` #[stable(feature = "duration_saturating_ops", since = "1.53.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn saturating_sub(self, rhs: Duration) -> Duration { match self.checked_sub(rhs) { Some(res) => res, None => Duration::ZERO, } } /// Checked `Duration` multiplication. Computes `self * other`, returning /// [`None`] if overflow occurred. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 500_000_001).checked_mul(2), Some(Duration::new(1, 2))); /// assert_eq!(Duration::new(u64::MAX - 1, 0).checked_mul(2), None); /// ``` #[stable(feature = "duration_checked_ops", since = "1.16.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn checked_mul(self, rhs: u32) -> Option<Duration> { // Multiply nanoseconds as u64, because it cannot overflow that way. let total_nanos = self.nanos as u64 * rhs as u64; let extra_secs = total_nanos / (NANOS_PER_SEC as u64); let nanos = (total_nanos % (NANOS_PER_SEC as u64)) as u32; if let Some(s) = self.secs.checked_mul(rhs as u64) { if let Some(secs) = s.checked_add(extra_secs) { debug_assert!(nanos < NANOS_PER_SEC); return Some(Duration { secs, nanos }); } } None } /// Saturating `Duration` multiplication. Computes `self * other`, returning /// [`Duration::MAX`] if overflow occurred. /// /// # Examples /// /// ``` /// #![feature(duration_constants)] /// use std::time::Duration; /// /// assert_eq!(Duration::new(0, 500_000_001).saturating_mul(2), Duration::new(1, 2)); /// assert_eq!(Duration::new(u64::MAX - 1, 0).saturating_mul(2), Duration::MAX); /// ``` #[stable(feature = "duration_saturating_ops", since = "1.53.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn saturating_mul(self, rhs: u32) -> Duration { match self.checked_mul(rhs) { Some(res) => res, None => Duration::MAX, } } /// Checked `Duration` division. Computes `self / other`, returning [`None`] /// if `other == 0`. /// /// # Examples /// /// Basic usage: /// /// ``` /// use std::time::Duration; /// /// assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0))); /// assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000))); /// assert_eq!(Duration::new(2, 0).checked_div(0), None); /// ``` #[stable(feature = "duration_checked_ops", since = "1.16.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn checked_div(self, rhs: u32) -> Option<Duration> { if rhs != 0 { let secs = self.secs / (rhs as u64); let carry = self.secs - secs * (rhs as u64); let extra_nanos = carry * (NANOS_PER_SEC as u64) / (rhs as u64); let nanos = self.nanos / rhs + (extra_nanos as u32); debug_assert!(nanos < NANOS_PER_SEC); Some(Duration { secs, nanos }) } else { None } } /// Returns the number of seconds contained by this `Duration` as `f64`. /// /// The returned value does include the fractional (nanosecond) part of the duration. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::new(2, 700_000_000); /// assert_eq!(dur.as_secs_f64(), 2.7); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn as_secs_f64(&self) -> f64 { (self.secs as f64) + (self.nanos as f64) / (NANOS_PER_SEC as f64) } /// Returns the number of seconds contained by this `Duration` as `f32`. /// /// The returned value does include the fractional (nanosecond) part of the duration. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::new(2, 700_000_000); /// assert_eq!(dur.as_secs_f32(), 2.7); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn as_secs_f32(&self) -> f32 { (self.secs as f32) + (self.nanos as f32) / (NANOS_PER_SEC as f32) } /// Creates a new `Duration` from the specified number of seconds represented /// as `f64`. /// /// # Panics /// This constructor will panic if `secs` is not finite, negative or overflows `Duration`. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::from_secs_f64(2.7); /// assert_eq!(dur, Duration::new(2, 700_000_000)); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn from_secs_f64(secs: f64) -> Duration { const MAX_NANOS_F64: f64 = ((u64::MAX as u128 + 1) * (NANOS_PER_SEC as u128)) as f64; let nanos = secs * (NANOS_PER_SEC as f64); if !nanos.is_finite() { panic!("got non-finite value when converting float to duration"); } if nanos >= MAX_NANOS_F64 { panic!("overflow when converting float to duration"); } if nanos < 0.0 { panic!("underflow when converting float to duration"); } let nanos = nanos as u128; Duration { secs: (nanos / (NANOS_PER_SEC as u128)) as u64, nanos: (nanos % (NANOS_PER_SEC as u128)) as u32, } } /// Creates a new `Duration` from the specified number of seconds represented /// as `f32`. /// /// # Panics /// This constructor will panic if `secs` is not finite, negative or overflows `Duration`. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::from_secs_f32(2.7); /// assert_eq!(dur, Duration::new(2, 700_000_000)); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn from_secs_f32(secs: f32) -> Duration { const MAX_NANOS_F32: f32 = ((u64::MAX as u128 + 1) * (NANOS_PER_SEC as u128)) as f32; let nanos = secs * (NANOS_PER_SEC as f32); if !nanos.is_finite() { panic!("got non-finite value when converting float to duration"); } if nanos >= MAX_NANOS_F32 { panic!("overflow when converting float to duration"); } if nanos < 0.0 { panic!("underflow when converting float to duration"); } let nanos = nanos as u128; Duration { secs: (nanos / (NANOS_PER_SEC as u128)) as u64, nanos: (nanos % (NANOS_PER_SEC as u128)) as u32, } } /// Multiplies `Duration` by `f64`. /// /// # Panics /// This method will panic if result is not finite, negative or overflows `Duration`. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::new(2, 700_000_000); /// assert_eq!(dur.mul_f64(3.14), Duration::new(8, 478_000_000)); /// assert_eq!(dur.mul_f64(3.14e5), Duration::new(847_800, 0)); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn mul_f64(self, rhs: f64) -> Duration { Duration::from_secs_f64(rhs * self.as_secs_f64()) } /// Multiplies `Duration` by `f32`. /// /// # Panics /// This method will panic if result is not finite, negative or overflows `Duration`. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::new(2, 700_000_000); /// // note that due to rounding errors result is slightly different /// // from 8.478 and 847800.0 /// assert_eq!(dur.mul_f32(3.14), Duration::new(8, 478_000_640)); /// assert_eq!(dur.mul_f32(3.14e5), Duration::new(847799, 969_120_256)); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn mul_f32(self, rhs: f32) -> Duration { Duration::from_secs_f32(rhs * self.as_secs_f32()) } /// Divide `Duration` by `f64`. /// /// # Panics /// This method will panic if result is not finite, negative or overflows `Duration`. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::new(2, 700_000_000); /// assert_eq!(dur.div_f64(3.14), Duration::new(0, 859_872_611)); /// // note that truncation is used, not rounding /// assert_eq!(dur.div_f64(3.14e5), Duration::new(0, 8_598)); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn div_f64(self, rhs: f64) -> Duration { Duration::from_secs_f64(self.as_secs_f64() / rhs) } /// Divide `Duration` by `f32`. /// /// # Panics /// This method will panic if result is not finite, negative or overflows `Duration`. /// /// # Examples /// ``` /// use std::time::Duration; /// /// let dur = Duration::new(2, 700_000_000); /// // note that due to rounding errors result is slightly /// // different from 0.859_872_611 /// assert_eq!(dur.div_f32(3.14), Duration::new(0, 859_872_576)); /// // note that truncation is used, not rounding /// assert_eq!(dur.div_f32(3.14e5), Duration::new(0, 8_598)); /// ``` #[stable(feature = "duration_float", since = "1.38.0")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn div_f32(self, rhs: f32) -> Duration { Duration::from_secs_f32(self.as_secs_f32() / rhs) } /// Divide `Duration` by `Duration` and return `f64`. /// /// # Examples /// ``` /// #![feature(div_duration)] /// use std::time::Duration; /// /// let dur1 = Duration::new(2, 700_000_000); /// let dur2 = Duration::new(5, 400_000_000); /// assert_eq!(dur1.div_duration_f64(dur2), 0.5); /// ``` #[unstable(feature = "div_duration", issue = "63139")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn div_duration_f64(self, rhs: Duration) -> f64 { self.as_secs_f64() / rhs.as_secs_f64() } /// Divide `Duration` by `Duration` and return `f32`. /// /// # Examples /// ``` /// #![feature(div_duration)] /// use std::time::Duration; /// /// let dur1 = Duration::new(2, 700_000_000); /// let dur2 = Duration::new(5, 400_000_000); /// assert_eq!(dur1.div_duration_f32(dur2), 0.5); /// ``` #[unstable(feature = "div_duration", issue = "63139")] #[inline] #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")] pub const fn div_duration_f32(self, rhs: Duration) -> f32 { self.as_secs_f32() / rhs.as_secs_f32() } } #[stable(feature = "duration", since = "1.3.0")] impl Add for Duration { type Output = Duration; fn add(self, rhs: Duration) -> Duration { self.checked_add(rhs).expect("overflow when adding durations") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl AddAssign for Duration { fn add_assign(&mut self, rhs: Duration) { *self = *self + rhs; } } #[stable(feature = "duration", since = "1.3.0")] impl Sub for Duration { type Output = Duration; fn sub(self, rhs: Duration) -> Duration { self.checked_sub(rhs).expect("overflow when subtracting durations") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl SubAssign for Duration { fn sub_assign(&mut self, rhs: Duration) { *self = *self - rhs; } } #[stable(feature = "duration", since = "1.3.0")] impl Mul<u32> for Duration { type Output = Duration; fn mul(self, rhs: u32) -> Duration { self.checked_mul(rhs).expect("overflow when multiplying duration by scalar") } } #[stable(feature = "symmetric_u32_duration_mul", since = "1.31.0")] impl Mul<Duration> for u32 { type Output = Duration; fn mul(self, rhs: Duration) -> Duration { rhs * self } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl MulAssign<u32> for Duration { fn mul_assign(&mut self, rhs: u32) { *self = *self * rhs; } } #[stable(feature = "duration", since = "1.3.0")] impl Div<u32> for Duration { type Output = Duration; fn div(self, rhs: u32) -> Duration { self.checked_div(rhs).expect("divide by zero error when dividing duration by scalar") } } #[stable(feature = "time_augmented_assignment", since = "1.9.0")] impl DivAssign<u32> for Duration { fn div_assign(&mut self, rhs: u32) { *self = *self / rhs; } } macro_rules! sum_durations { ($iter:expr) => {{ let mut total_secs: u64 = 0; let mut total_nanos: u64 = 0; for entry in $iter { total_secs = total_secs.checked_add(entry.secs).expect("overflow in iter::sum over durations"); total_nanos = match total_nanos.checked_add(entry.nanos as u64) { Some(n) => n, None => { total_secs = total_secs .checked_add(total_nanos / NANOS_PER_SEC as u64) .expect("overflow in iter::sum over durations"); (total_nanos % NANOS_PER_SEC as u64) + entry.nanos as u64 } }; } total_secs = total_secs .checked_add(total_nanos / NANOS_PER_SEC as u64) .expect("overflow in iter::sum over durations"); total_nanos = total_nanos % NANOS_PER_SEC as u64; Duration { secs: total_secs, nanos: total_nanos as u32 } }}; } #[stable(feature = "duration_sum", since = "1.16.0")] impl Sum for Duration { fn sum<I: Iterator<Item = Duration>>(iter: I) -> Duration { sum_durations!(iter) } } #[stable(feature = "duration_sum", since = "1.16.0")] impl<'a> Sum<&'a Duration> for Duration { fn sum<I: Iterator<Item = &'a Duration>>(iter: I) -> Duration { sum_durations!(iter) } } #[stable(feature = "duration_debug_impl", since = "1.27.0")] impl fmt::Debug for Duration { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { /// Formats a floating point number in decimal notation. /// /// The number is given as the `integer_part` and a fractional part. /// The value of the fractional part is `fractional_part / divisor`. So /// `integer_part` = 3, `fractional_part` = 12 and `divisor` = 100 /// represents the number `3.012`. Trailing zeros are omitted. /// /// `divisor` must not be above 100_000_000. It also should be a power /// of 10, everything else doesn't make sense. `fractional_part` has /// to be less than `10 * divisor`! fn fmt_decimal( f: &mut fmt::Formatter<'_>, mut integer_part: u64, mut fractional_part: u32, mut divisor: u32, ) -> fmt::Result { // Encode the fractional part into a temporary buffer. The buffer // only need to hold 9 elements, because `fractional_part` has to // be smaller than 10^9. The buffer is prefilled with '0' digits // to simplify the code below. let mut buf = [b'0'; 9]; // The next digit is written at this position let mut pos = 0; // We keep writing digits into the buffer while there are non-zero // digits left and we haven't written enough digits yet. while fractional_part > 0 && pos < f.precision().unwrap_or(9) { // Write new digit into the buffer buf[pos] = b'0' + (fractional_part / divisor) as u8; fractional_part %= divisor; divisor /= 10; pos += 1; } // If a precision < 9 was specified, there may be some non-zero // digits left that weren't written into the buffer. In that case we // need to perform rounding to match the semantics of printing // normal floating point numbers. However, we only need to do work // when rounding up. This happens if the first digit of the // remaining ones is >= 5. if fractional_part > 0 && fractional_part >= divisor * 5 { // Round up the number contained in the buffer. We go through // the buffer backwards and keep track of the carry. let mut rev_pos = pos; let mut carry = true; while carry && rev_pos > 0 { rev_pos -= 1; // If the digit in the buffer is not '9', we just need to // increment it and can stop then (since we don't have a // carry anymore). Otherwise, we set it to '0' (overflow) // and continue. if buf[rev_pos] < b'9' { buf[rev_pos] += 1; carry = false; } else { buf[rev_pos] = b'0'; } } // If we still have the carry bit set, that means that we set // the whole buffer to '0's and need to increment the integer // part. if carry { integer_part += 1; } } // Determine the end of the buffer: if precision is set, we just // use as many digits from the buffer (capped to 9). If it isn't // set, we only use all digits up to the last non-zero one. let end = f.precision().map(|p| crate::cmp::min(p, 9)).unwrap_or(pos); // If we haven't emitted a single fractional digit and the precision // wasn't set to a non-zero value, we don't print the decimal point. if end == 0 { write!(f, "{}", integer_part) } else { // SAFETY: We are only writing ASCII digits into the buffer and it was // initialized with '0's, so it contains valid UTF8. let s = unsafe { crate::str::from_utf8_unchecked(&buf[..end]) }; // If the user request a precision > 9, we pad '0's at the end. let w = f.precision().unwrap_or(pos); write!(f, "{}.{:0<width$}", integer_part, s, width = w) } } // Print leading '+' sign if requested if f.sign_plus() { write!(f, "+")?; } if self.secs > 0 { fmt_decimal(f, self.secs, self.nanos, NANOS_PER_SEC / 10)?; f.write_str("s") } else if self.nanos >= NANOS_PER_MILLI { fmt_decimal( f, (self.nanos / NANOS_PER_MILLI) as u64, self.nanos % NANOS_PER_MILLI, NANOS_PER_MILLI / 10, )?; f.write_str("ms") } else if self.nanos >= NANOS_PER_MICRO { fmt_decimal( f, (self.nanos / NANOS_PER_MICRO) as u64, self.nanos % NANOS_PER_MICRO, NANOS_PER_MICRO / 10, )?; f.write_str("µs") } else { fmt_decimal(f, self.nanos as u64, 0, 1)?; f.write_str("ns") } } }
34.661525
100
0.564652
90881ccf819007f0df05343d12fb533214d2e563
29,662
use ash::vk; use std::num::NonZeroU32; impl super::PrivateCapabilities { pub fn map_texture_format(&self, format: wgt::TextureFormat) -> vk::Format { use ash::vk::Format as F; use wgt::TextureFormat as Tf; match format { Tf::R8Unorm => F::R8_UNORM, Tf::R8Snorm => F::R8_SNORM, Tf::R8Uint => F::R8_UINT, Tf::R8Sint => F::R8_SINT, Tf::R16Uint => F::R16_UINT, Tf::R16Sint => F::R16_SINT, Tf::R16Float => F::R16_SFLOAT, Tf::Rg8Unorm => F::R8G8_UNORM, Tf::Rg8Snorm => F::R8G8_SNORM, Tf::Rg8Uint => F::R8G8_UINT, Tf::Rg8Sint => F::R8G8_SINT, Tf::R32Uint => F::R32_UINT, Tf::R32Sint => F::R32_SINT, Tf::R32Float => F::R32_SFLOAT, Tf::Rg16Uint => F::R16G16_UINT, Tf::Rg16Sint => F::R16G16_SINT, Tf::Rg16Float => F::R16G16_SFLOAT, Tf::Rgba8Unorm => F::R8G8B8A8_UNORM, Tf::Rgba8UnormSrgb => F::R8G8B8A8_SRGB, Tf::Bgra8UnormSrgb => F::B8G8R8A8_SRGB, Tf::Rgba8Snorm => F::R8G8B8A8_SNORM, Tf::Bgra8Unorm => F::B8G8R8A8_UNORM, Tf::Rgba8Uint => F::R8G8B8A8_UINT, Tf::Rgba8Sint => F::R8G8B8A8_SINT, Tf::Rgb10a2Unorm => F::A2B10G10R10_UNORM_PACK32, Tf::Rg11b10Float => F::B10G11R11_UFLOAT_PACK32, Tf::Rg32Uint => F::R32G32_UINT, Tf::Rg32Sint => F::R32G32_SINT, Tf::Rg32Float => F::R32G32_SFLOAT, Tf::Rgba16Uint => F::R16G16B16A16_UINT, Tf::Rgba16Sint => F::R16G16B16A16_SINT, Tf::Rgba16Float => F::R16G16B16A16_SFLOAT, Tf::Rgba32Uint => F::R32G32B32A32_UINT, Tf::Rgba32Sint => F::R32G32B32A32_SINT, Tf::Rgba32Float => F::R32G32B32A32_SFLOAT, Tf::Depth32Float => F::D32_SFLOAT, Tf::Depth24Plus => { if self.texture_d24 { F::X8_D24_UNORM_PACK32 } else { F::D32_SFLOAT } } Tf::Depth24PlusStencil8 => { if self.texture_d24_s8 { F::D24_UNORM_S8_UINT } else { F::D32_SFLOAT_S8_UINT } } Tf::Rgb9e5Ufloat => F::E5B9G9R9_UFLOAT_PACK32, Tf::Bc1RgbaUnorm => F::BC1_RGBA_UNORM_BLOCK, Tf::Bc1RgbaUnormSrgb => F::BC1_RGBA_SRGB_BLOCK, Tf::Bc2RgbaUnorm => F::BC2_UNORM_BLOCK, Tf::Bc2RgbaUnormSrgb => F::BC2_SRGB_BLOCK, Tf::Bc3RgbaUnorm => F::BC3_UNORM_BLOCK, Tf::Bc3RgbaUnormSrgb => F::BC3_SRGB_BLOCK, Tf::Bc4RUnorm => F::BC4_UNORM_BLOCK, Tf::Bc4RSnorm => F::BC4_SNORM_BLOCK, Tf::Bc5RgUnorm => F::BC5_UNORM_BLOCK, Tf::Bc5RgSnorm => F::BC5_SNORM_BLOCK, Tf::Bc6hRgbUfloat => F::BC6H_UFLOAT_BLOCK, Tf::Bc6hRgbSfloat => F::BC6H_SFLOAT_BLOCK, Tf::Bc7RgbaUnorm => F::BC7_UNORM_BLOCK, Tf::Bc7RgbaUnormSrgb => F::BC7_SRGB_BLOCK, Tf::Etc2RgbUnorm => F::ETC2_R8G8B8_UNORM_BLOCK, Tf::Etc2RgbUnormSrgb => F::ETC2_R8G8B8_SRGB_BLOCK, Tf::Etc2RgbA1Unorm => F::ETC2_R8G8B8A1_UNORM_BLOCK, Tf::Etc2RgbA1UnormSrgb => F::ETC2_R8G8B8A1_SRGB_BLOCK, Tf::EacRUnorm => F::EAC_R11_UNORM_BLOCK, Tf::EacRSnorm => F::EAC_R11_SNORM_BLOCK, Tf::EacRgUnorm => F::EAC_R11G11_UNORM_BLOCK, Tf::EacRgSnorm => F::EAC_R11G11_SNORM_BLOCK, Tf::Astc4x4RgbaUnorm => F::ASTC_4X4_UNORM_BLOCK, Tf::Astc4x4RgbaUnormSrgb => F::ASTC_4X4_SRGB_BLOCK, Tf::Astc5x4RgbaUnorm => F::ASTC_5X4_UNORM_BLOCK, Tf::Astc5x4RgbaUnormSrgb => F::ASTC_5X4_SRGB_BLOCK, Tf::Astc5x5RgbaUnorm => F::ASTC_5X5_UNORM_BLOCK, Tf::Astc5x5RgbaUnormSrgb => F::ASTC_5X5_SRGB_BLOCK, Tf::Astc6x5RgbaUnorm => F::ASTC_6X5_UNORM_BLOCK, Tf::Astc6x5RgbaUnormSrgb => F::ASTC_6X5_SRGB_BLOCK, Tf::Astc6x6RgbaUnorm => F::ASTC_6X6_UNORM_BLOCK, Tf::Astc6x6RgbaUnormSrgb => F::ASTC_6X6_SRGB_BLOCK, Tf::Astc8x5RgbaUnorm => F::ASTC_8X5_UNORM_BLOCK, Tf::Astc8x5RgbaUnormSrgb => F::ASTC_8X5_SRGB_BLOCK, Tf::Astc8x6RgbaUnorm => F::ASTC_8X6_UNORM_BLOCK, Tf::Astc8x6RgbaUnormSrgb => F::ASTC_8X6_SRGB_BLOCK, Tf::Astc10x5RgbaUnorm => F::ASTC_8X8_UNORM_BLOCK, Tf::Astc10x5RgbaUnormSrgb => F::ASTC_8X8_SRGB_BLOCK, Tf::Astc10x6RgbaUnorm => F::ASTC_10X5_UNORM_BLOCK, Tf::Astc10x6RgbaUnormSrgb => F::ASTC_10X5_SRGB_BLOCK, Tf::Astc8x8RgbaUnorm => F::ASTC_10X6_UNORM_BLOCK, Tf::Astc8x8RgbaUnormSrgb => F::ASTC_10X6_SRGB_BLOCK, Tf::Astc10x8RgbaUnorm => F::ASTC_10X8_UNORM_BLOCK, Tf::Astc10x8RgbaUnormSrgb => F::ASTC_10X8_SRGB_BLOCK, Tf::Astc10x10RgbaUnorm => F::ASTC_10X10_UNORM_BLOCK, Tf::Astc10x10RgbaUnormSrgb => F::ASTC_10X10_SRGB_BLOCK, Tf::Astc12x10RgbaUnorm => F::ASTC_12X10_UNORM_BLOCK, Tf::Astc12x10RgbaUnormSrgb => F::ASTC_12X10_SRGB_BLOCK, Tf::Astc12x12RgbaUnorm => F::ASTC_12X12_UNORM_BLOCK, Tf::Astc12x12RgbaUnormSrgb => F::ASTC_12X12_SRGB_BLOCK, } } } impl crate::Attachment<'_, super::Api> { pub(super) fn make_attachment_key( &self, ops: crate::AttachmentOps, caps: &super::PrivateCapabilities, ) -> super::AttachmentKey { let aspects = self.view.aspects(); super::AttachmentKey { format: caps.map_texture_format(self.view.attachment.view_format), layout: derive_image_layout(self.usage, aspects), ops, } } } impl crate::ColorAttachment<'_, super::Api> { pub(super) unsafe fn make_vk_clear_color(&self) -> vk::ClearColorValue { let cv = &self.clear_value; match self .target .view .attachment .view_format .describe() .sample_type { wgt::TextureSampleType::Float { .. } | wgt::TextureSampleType::Depth => { vk::ClearColorValue { float32: [cv.r as f32, cv.g as f32, cv.b as f32, cv.a as f32], } } wgt::TextureSampleType::Sint => vk::ClearColorValue { int32: [cv.r as i32, cv.g as i32, cv.b as i32, cv.a as i32], }, wgt::TextureSampleType::Uint => vk::ClearColorValue { uint32: [cv.r as u32, cv.g as u32, cv.b as u32, cv.a as u32], }, } } } pub fn derive_image_layout( usage: crate::TextureUses, aspects: crate::FormatAspects, ) -> vk::ImageLayout { //Note: depth textures are always sampled with RODS layout let is_color = aspects.contains(crate::FormatAspects::COLOR); match usage { crate::TextureUses::UNINITIALIZED => vk::ImageLayout::UNDEFINED, crate::TextureUses::COPY_SRC => vk::ImageLayout::TRANSFER_SRC_OPTIMAL, crate::TextureUses::COPY_DST => vk::ImageLayout::TRANSFER_DST_OPTIMAL, crate::TextureUses::RESOURCE if is_color => vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL, crate::TextureUses::COLOR_TARGET => vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL, crate::TextureUses::DEPTH_STENCIL_WRITE => { vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL } _ => { if usage.is_empty() { vk::ImageLayout::PRESENT_SRC_KHR } else if is_color { vk::ImageLayout::GENERAL } else { vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL } } } } pub fn map_texture_usage(usage: crate::TextureUses) -> vk::ImageUsageFlags { let mut flags = vk::ImageUsageFlags::empty(); if usage.contains(crate::TextureUses::COPY_SRC) { flags |= vk::ImageUsageFlags::TRANSFER_SRC; } if usage.contains(crate::TextureUses::COPY_DST) { flags |= vk::ImageUsageFlags::TRANSFER_DST; } if usage.contains(crate::TextureUses::RESOURCE) { flags |= vk::ImageUsageFlags::SAMPLED; } if usage.contains(crate::TextureUses::COLOR_TARGET) { flags |= vk::ImageUsageFlags::COLOR_ATTACHMENT; } if usage.intersects( crate::TextureUses::DEPTH_STENCIL_READ | crate::TextureUses::DEPTH_STENCIL_WRITE, ) { flags |= vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT; } if usage.intersects(crate::TextureUses::STORAGE_READ | crate::TextureUses::STORAGE_WRITE) { flags |= vk::ImageUsageFlags::STORAGE; } flags } pub fn map_texture_usage_to_barrier( usage: crate::TextureUses, ) -> (vk::PipelineStageFlags, vk::AccessFlags) { let mut stages = vk::PipelineStageFlags::empty(); let mut access = vk::AccessFlags::empty(); let shader_stages = vk::PipelineStageFlags::VERTEX_SHADER | vk::PipelineStageFlags::FRAGMENT_SHADER | vk::PipelineStageFlags::COMPUTE_SHADER; if usage.contains(crate::TextureUses::COPY_SRC) { stages |= vk::PipelineStageFlags::TRANSFER; access |= vk::AccessFlags::TRANSFER_READ; } if usage.contains(crate::TextureUses::COPY_DST) { stages |= vk::PipelineStageFlags::TRANSFER; access |= vk::AccessFlags::TRANSFER_WRITE; } if usage.contains(crate::TextureUses::RESOURCE) { stages |= shader_stages; access |= vk::AccessFlags::SHADER_READ; } if usage.contains(crate::TextureUses::COLOR_TARGET) { stages |= vk::PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT; access |= vk::AccessFlags::COLOR_ATTACHMENT_READ | vk::AccessFlags::COLOR_ATTACHMENT_WRITE; } if usage.intersects(crate::TextureUses::DEPTH_STENCIL_READ) { stages |= vk::PipelineStageFlags::EARLY_FRAGMENT_TESTS | vk::PipelineStageFlags::LATE_FRAGMENT_TESTS; access |= vk::AccessFlags::DEPTH_STENCIL_ATTACHMENT_READ; } if usage.intersects(crate::TextureUses::DEPTH_STENCIL_WRITE) { stages |= vk::PipelineStageFlags::EARLY_FRAGMENT_TESTS | vk::PipelineStageFlags::LATE_FRAGMENT_TESTS; access |= vk::AccessFlags::DEPTH_STENCIL_ATTACHMENT_READ | vk::AccessFlags::DEPTH_STENCIL_ATTACHMENT_WRITE; } if usage.contains(crate::TextureUses::STORAGE_READ) { stages |= shader_stages; access |= vk::AccessFlags::SHADER_READ; } if usage.contains(crate::TextureUses::STORAGE_WRITE) { stages |= shader_stages; access |= vk::AccessFlags::SHADER_WRITE; } if usage == crate::TextureUses::UNINITIALIZED || usage.is_empty() { ( vk::PipelineStageFlags::TOP_OF_PIPE, vk::AccessFlags::empty(), ) } else { (stages, access) } } pub fn map_vk_image_usage(usage: vk::ImageUsageFlags) -> crate::TextureUses { let mut bits = crate::TextureUses::empty(); if usage.contains(vk::ImageUsageFlags::TRANSFER_SRC) { bits |= crate::TextureUses::COPY_SRC; } if usage.contains(vk::ImageUsageFlags::TRANSFER_DST) { bits |= crate::TextureUses::COPY_DST; } if usage.contains(vk::ImageUsageFlags::SAMPLED) { bits |= crate::TextureUses::RESOURCE; } if usage.contains(vk::ImageUsageFlags::COLOR_ATTACHMENT) { bits |= crate::TextureUses::COLOR_TARGET; } if usage.contains(vk::ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT) { bits |= crate::TextureUses::DEPTH_STENCIL_READ | crate::TextureUses::DEPTH_STENCIL_WRITE; } if usage.contains(vk::ImageUsageFlags::STORAGE) { bits |= crate::TextureUses::STORAGE_READ | crate::TextureUses::STORAGE_WRITE; } bits } pub fn map_texture_dimension(dim: wgt::TextureDimension) -> vk::ImageType { match dim { wgt::TextureDimension::D1 => vk::ImageType::TYPE_1D, wgt::TextureDimension::D2 => vk::ImageType::TYPE_2D, wgt::TextureDimension::D3 => vk::ImageType::TYPE_3D, } } pub fn map_index_format(index_format: wgt::IndexFormat) -> vk::IndexType { match index_format { wgt::IndexFormat::Uint16 => vk::IndexType::UINT16, wgt::IndexFormat::Uint32 => vk::IndexType::UINT32, } } pub fn map_vertex_format(vertex_format: wgt::VertexFormat) -> vk::Format { use wgt::VertexFormat as Vf; match vertex_format { Vf::Uint8x2 => vk::Format::R8G8_UINT, Vf::Uint8x4 => vk::Format::R8G8B8A8_UINT, Vf::Sint8x2 => vk::Format::R8G8_SINT, Vf::Sint8x4 => vk::Format::R8G8B8A8_SINT, Vf::Unorm8x2 => vk::Format::R8G8_UNORM, Vf::Unorm8x4 => vk::Format::R8G8B8A8_UNORM, Vf::Snorm8x2 => vk::Format::R8G8_SNORM, Vf::Snorm8x4 => vk::Format::R8G8B8A8_SNORM, Vf::Uint16x2 => vk::Format::R16G16_UINT, Vf::Uint16x4 => vk::Format::R16G16B16A16_UINT, Vf::Sint16x2 => vk::Format::R16G16_SINT, Vf::Sint16x4 => vk::Format::R16G16B16A16_SINT, Vf::Unorm16x2 => vk::Format::R16G16_UNORM, Vf::Unorm16x4 => vk::Format::R16G16B16A16_UNORM, Vf::Snorm16x2 => vk::Format::R16G16_SNORM, Vf::Snorm16x4 => vk::Format::R16G16B16A16_SNORM, Vf::Float16x2 => vk::Format::R16G16_SFLOAT, Vf::Float16x4 => vk::Format::R16G16B16A16_SFLOAT, Vf::Float32 => vk::Format::R32_SFLOAT, Vf::Float32x2 => vk::Format::R32G32_SFLOAT, Vf::Float32x3 => vk::Format::R32G32B32_SFLOAT, Vf::Float32x4 => vk::Format::R32G32B32A32_SFLOAT, Vf::Uint32 => vk::Format::R32_UINT, Vf::Uint32x2 => vk::Format::R32G32_UINT, Vf::Uint32x3 => vk::Format::R32G32B32_UINT, Vf::Uint32x4 => vk::Format::R32G32B32A32_UINT, Vf::Sint32 => vk::Format::R32_SINT, Vf::Sint32x2 => vk::Format::R32G32_SINT, Vf::Sint32x3 => vk::Format::R32G32B32_SINT, Vf::Sint32x4 => vk::Format::R32G32B32A32_SINT, Vf::Float64 => vk::Format::R64_SFLOAT, Vf::Float64x2 => vk::Format::R64G64_SFLOAT, Vf::Float64x3 => vk::Format::R64G64B64_SFLOAT, Vf::Float64x4 => vk::Format::R64G64B64A64_SFLOAT, } } pub fn map_aspects(aspects: crate::FormatAspects) -> vk::ImageAspectFlags { let mut flags = vk::ImageAspectFlags::empty(); if aspects.contains(crate::FormatAspects::COLOR) { flags |= vk::ImageAspectFlags::COLOR; } if aspects.contains(crate::FormatAspects::DEPTH) { flags |= vk::ImageAspectFlags::DEPTH; } if aspects.contains(crate::FormatAspects::STENCIL) { flags |= vk::ImageAspectFlags::STENCIL; } flags } pub fn map_attachment_ops( op: crate::AttachmentOps, ) -> (vk::AttachmentLoadOp, vk::AttachmentStoreOp) { let load_op = if op.contains(crate::AttachmentOps::LOAD) { vk::AttachmentLoadOp::LOAD } else { vk::AttachmentLoadOp::CLEAR }; let store_op = if op.contains(crate::AttachmentOps::STORE) { vk::AttachmentStoreOp::STORE } else { vk::AttachmentStoreOp::DONT_CARE }; (load_op, store_op) } pub fn map_present_mode(mode: wgt::PresentMode) -> vk::PresentModeKHR { match mode { wgt::PresentMode::Immediate => vk::PresentModeKHR::IMMEDIATE, wgt::PresentMode::Mailbox => vk::PresentModeKHR::MAILBOX, wgt::PresentMode::Fifo => vk::PresentModeKHR::FIFO, //wgt::PresentMode::Relaxed => vk::PresentModeKHR::FIFO_RELAXED, } } pub fn map_vk_present_mode(mode: vk::PresentModeKHR) -> Option<wgt::PresentMode> { if mode == vk::PresentModeKHR::IMMEDIATE { Some(wgt::PresentMode::Immediate) } else if mode == vk::PresentModeKHR::MAILBOX { Some(wgt::PresentMode::Mailbox) } else if mode == vk::PresentModeKHR::FIFO { Some(wgt::PresentMode::Fifo) } else if mode == vk::PresentModeKHR::FIFO_RELAXED { //Some(wgt::PresentMode::Relaxed) None } else { log::warn!("Unrecognized present mode {:?}", mode); None } } pub fn map_composite_alpha_mode(mode: crate::CompositeAlphaMode) -> vk::CompositeAlphaFlagsKHR { match mode { crate::CompositeAlphaMode::Opaque => vk::CompositeAlphaFlagsKHR::OPAQUE, crate::CompositeAlphaMode::PostMultiplied => vk::CompositeAlphaFlagsKHR::POST_MULTIPLIED, crate::CompositeAlphaMode::PreMultiplied => vk::CompositeAlphaFlagsKHR::PRE_MULTIPLIED, } } pub fn map_vk_composite_alpha(flags: vk::CompositeAlphaFlagsKHR) -> Vec<crate::CompositeAlphaMode> { let mut modes = Vec::new(); if flags.contains(vk::CompositeAlphaFlagsKHR::OPAQUE) { modes.push(crate::CompositeAlphaMode::Opaque); } if flags.contains(vk::CompositeAlphaFlagsKHR::POST_MULTIPLIED) { modes.push(crate::CompositeAlphaMode::PostMultiplied); } if flags.contains(vk::CompositeAlphaFlagsKHR::PRE_MULTIPLIED) { modes.push(crate::CompositeAlphaMode::PreMultiplied); } modes } pub fn map_buffer_usage(usage: crate::BufferUses) -> vk::BufferUsageFlags { let mut flags = vk::BufferUsageFlags::empty(); if usage.contains(crate::BufferUses::COPY_SRC) { flags |= vk::BufferUsageFlags::TRANSFER_SRC; } if usage.contains(crate::BufferUses::COPY_DST) { flags |= vk::BufferUsageFlags::TRANSFER_DST; } if usage.contains(crate::BufferUses::UNIFORM) { flags |= vk::BufferUsageFlags::UNIFORM_BUFFER; } if usage.intersects(crate::BufferUses::STORAGE_READ | crate::BufferUses::STORAGE_WRITE) { flags |= vk::BufferUsageFlags::STORAGE_BUFFER; } if usage.contains(crate::BufferUses::INDEX) { flags |= vk::BufferUsageFlags::INDEX_BUFFER; } if usage.contains(crate::BufferUses::VERTEX) { flags |= vk::BufferUsageFlags::VERTEX_BUFFER; } if usage.contains(crate::BufferUses::INDIRECT) { flags |= vk::BufferUsageFlags::INDIRECT_BUFFER; } flags } pub fn map_buffer_usage_to_barrier( usage: crate::BufferUses, ) -> (vk::PipelineStageFlags, vk::AccessFlags) { let mut stages = vk::PipelineStageFlags::empty(); let mut access = vk::AccessFlags::empty(); let shader_stages = vk::PipelineStageFlags::VERTEX_SHADER | vk::PipelineStageFlags::FRAGMENT_SHADER | vk::PipelineStageFlags::COMPUTE_SHADER; if usage.contains(crate::BufferUses::MAP_READ) { stages |= vk::PipelineStageFlags::HOST; access |= vk::AccessFlags::HOST_READ; } if usage.contains(crate::BufferUses::MAP_WRITE) { stages |= vk::PipelineStageFlags::HOST; access |= vk::AccessFlags::HOST_WRITE; } if usage.contains(crate::BufferUses::COPY_SRC) { stages |= vk::PipelineStageFlags::TRANSFER; access |= vk::AccessFlags::TRANSFER_READ; } if usage.contains(crate::BufferUses::COPY_DST) { stages |= vk::PipelineStageFlags::TRANSFER; access |= vk::AccessFlags::TRANSFER_WRITE; } if usage.contains(crate::BufferUses::UNIFORM) { stages |= shader_stages; access |= vk::AccessFlags::UNIFORM_READ; } if usage.intersects(crate::BufferUses::STORAGE_READ) { stages |= shader_stages; access |= vk::AccessFlags::SHADER_READ; } if usage.intersects(crate::BufferUses::STORAGE_WRITE) { stages |= shader_stages; access |= vk::AccessFlags::SHADER_WRITE; } if usage.contains(crate::BufferUses::INDEX) { stages |= vk::PipelineStageFlags::VERTEX_INPUT; access |= vk::AccessFlags::INDEX_READ; } if usage.contains(crate::BufferUses::VERTEX) { stages |= vk::PipelineStageFlags::VERTEX_INPUT; access |= vk::AccessFlags::VERTEX_ATTRIBUTE_READ; } if usage.contains(crate::BufferUses::INDIRECT) { stages |= vk::PipelineStageFlags::DRAW_INDIRECT; access |= vk::AccessFlags::INDIRECT_COMMAND_READ; } (stages, access) } pub fn map_view_dimension(dim: wgt::TextureViewDimension) -> vk::ImageViewType { match dim { wgt::TextureViewDimension::D1 => vk::ImageViewType::TYPE_1D, wgt::TextureViewDimension::D2 => vk::ImageViewType::TYPE_2D, wgt::TextureViewDimension::D2Array => vk::ImageViewType::TYPE_2D_ARRAY, wgt::TextureViewDimension::Cube => vk::ImageViewType::CUBE, wgt::TextureViewDimension::CubeArray => vk::ImageViewType::CUBE_ARRAY, wgt::TextureViewDimension::D3 => vk::ImageViewType::TYPE_3D, } } pub fn map_copy_extent(extent: &crate::CopyExtent) -> vk::Extent3D { vk::Extent3D { width: extent.width, height: extent.height, depth: extent.depth, } } pub fn map_subresource_range( range: &wgt::ImageSubresourceRange, texture_aspect: crate::FormatAspects, ) -> vk::ImageSubresourceRange { vk::ImageSubresourceRange { aspect_mask: map_aspects(crate::FormatAspects::from(range.aspect) & texture_aspect), base_mip_level: range.base_mip_level, level_count: range .mip_level_count .map_or(vk::REMAINING_MIP_LEVELS, NonZeroU32::get), base_array_layer: range.base_array_layer, layer_count: range .array_layer_count .map_or(vk::REMAINING_ARRAY_LAYERS, NonZeroU32::get), } } pub fn map_subresource_layers( base: &crate::TextureCopyBase, texture_aspect: crate::FormatAspects, ) -> (vk::ImageSubresourceLayers, vk::Offset3D) { let offset = vk::Offset3D { x: base.origin.x as i32, y: base.origin.y as i32, z: base.origin.z as i32, }; let subresource = vk::ImageSubresourceLayers { aspect_mask: map_aspects(base.aspect & texture_aspect), mip_level: base.mip_level, base_array_layer: base.array_layer, layer_count: 1, }; (subresource, offset) } pub fn map_filter_mode(mode: wgt::FilterMode) -> vk::Filter { match mode { wgt::FilterMode::Nearest => vk::Filter::NEAREST, wgt::FilterMode::Linear => vk::Filter::LINEAR, } } pub fn map_mip_filter_mode(mode: wgt::FilterMode) -> vk::SamplerMipmapMode { match mode { wgt::FilterMode::Nearest => vk::SamplerMipmapMode::NEAREST, wgt::FilterMode::Linear => vk::SamplerMipmapMode::LINEAR, } } pub fn map_address_mode(mode: wgt::AddressMode) -> vk::SamplerAddressMode { match mode { wgt::AddressMode::ClampToEdge => vk::SamplerAddressMode::CLAMP_TO_EDGE, wgt::AddressMode::Repeat => vk::SamplerAddressMode::REPEAT, wgt::AddressMode::MirrorRepeat => vk::SamplerAddressMode::MIRRORED_REPEAT, wgt::AddressMode::ClampToBorder => vk::SamplerAddressMode::CLAMP_TO_BORDER, //wgt::AddressMode::MirrorClamp => vk::SamplerAddressMode::MIRROR_CLAMP_TO_EDGE, } } pub fn map_border_color(border_color: wgt::SamplerBorderColor) -> vk::BorderColor { match border_color { wgt::SamplerBorderColor::TransparentBlack => vk::BorderColor::FLOAT_TRANSPARENT_BLACK, wgt::SamplerBorderColor::OpaqueBlack => vk::BorderColor::FLOAT_OPAQUE_BLACK, wgt::SamplerBorderColor::OpaqueWhite => vk::BorderColor::FLOAT_OPAQUE_WHITE, } } pub fn map_comparison(fun: wgt::CompareFunction) -> vk::CompareOp { use wgt::CompareFunction as Cf; match fun { Cf::Never => vk::CompareOp::NEVER, Cf::Less => vk::CompareOp::LESS, Cf::LessEqual => vk::CompareOp::LESS_OR_EQUAL, Cf::Equal => vk::CompareOp::EQUAL, Cf::GreaterEqual => vk::CompareOp::GREATER_OR_EQUAL, Cf::Greater => vk::CompareOp::GREATER, Cf::NotEqual => vk::CompareOp::NOT_EQUAL, Cf::Always => vk::CompareOp::ALWAYS, } } pub fn map_shader_stage(stage: wgt::ShaderStages) -> vk::ShaderStageFlags { let mut flags = vk::ShaderStageFlags::empty(); if stage.contains(wgt::ShaderStages::VERTEX) { flags |= vk::ShaderStageFlags::VERTEX; } if stage.contains(wgt::ShaderStages::FRAGMENT) { flags |= vk::ShaderStageFlags::FRAGMENT; } if stage.contains(wgt::ShaderStages::COMPUTE) { flags |= vk::ShaderStageFlags::COMPUTE; } flags } pub fn map_binding_type(ty: wgt::BindingType) -> vk::DescriptorType { match ty { wgt::BindingType::Buffer { ty, has_dynamic_offset, .. } => match ty { wgt::BufferBindingType::Storage { .. } => match has_dynamic_offset { true => vk::DescriptorType::STORAGE_BUFFER_DYNAMIC, false => vk::DescriptorType::STORAGE_BUFFER, }, wgt::BufferBindingType::Uniform => match has_dynamic_offset { true => vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC, false => vk::DescriptorType::UNIFORM_BUFFER, }, }, wgt::BindingType::Sampler { .. } => vk::DescriptorType::SAMPLER, wgt::BindingType::Texture { .. } => vk::DescriptorType::SAMPLED_IMAGE, wgt::BindingType::StorageTexture { .. } => vk::DescriptorType::STORAGE_IMAGE, } } pub fn map_topology(topology: wgt::PrimitiveTopology) -> vk::PrimitiveTopology { use wgt::PrimitiveTopology as Pt; match topology { Pt::PointList => vk::PrimitiveTopology::POINT_LIST, Pt::LineList => vk::PrimitiveTopology::LINE_LIST, Pt::LineStrip => vk::PrimitiveTopology::LINE_STRIP, Pt::TriangleList => vk::PrimitiveTopology::TRIANGLE_LIST, Pt::TriangleStrip => vk::PrimitiveTopology::TRIANGLE_STRIP, } } pub fn map_polygon_mode(mode: wgt::PolygonMode) -> vk::PolygonMode { match mode { wgt::PolygonMode::Fill => vk::PolygonMode::FILL, wgt::PolygonMode::Line => vk::PolygonMode::LINE, wgt::PolygonMode::Point => vk::PolygonMode::POINT, } } pub fn map_front_face(front_face: wgt::FrontFace) -> vk::FrontFace { match front_face { wgt::FrontFace::Cw => vk::FrontFace::CLOCKWISE, wgt::FrontFace::Ccw => vk::FrontFace::COUNTER_CLOCKWISE, } } pub fn map_cull_face(face: wgt::Face) -> vk::CullModeFlags { match face { wgt::Face::Front => vk::CullModeFlags::FRONT, wgt::Face::Back => vk::CullModeFlags::BACK, } } pub fn map_stencil_op(op: wgt::StencilOperation) -> vk::StencilOp { use wgt::StencilOperation as So; match op { So::Keep => vk::StencilOp::KEEP, So::Zero => vk::StencilOp::ZERO, So::Replace => vk::StencilOp::REPLACE, So::Invert => vk::StencilOp::INVERT, So::IncrementClamp => vk::StencilOp::INCREMENT_AND_CLAMP, So::IncrementWrap => vk::StencilOp::INCREMENT_AND_WRAP, So::DecrementClamp => vk::StencilOp::DECREMENT_AND_CLAMP, So::DecrementWrap => vk::StencilOp::DECREMENT_AND_WRAP, } } pub fn map_stencil_face(face: &wgt::StencilFaceState) -> vk::StencilOpState { vk::StencilOpState { fail_op: map_stencil_op(face.fail_op), pass_op: map_stencil_op(face.pass_op), depth_fail_op: map_stencil_op(face.depth_fail_op), compare_op: map_comparison(face.compare), compare_mask: !0, write_mask: !0, reference: 0, } } fn map_blend_factor(factor: wgt::BlendFactor) -> vk::BlendFactor { use wgt::BlendFactor as Bf; match factor { Bf::Zero => vk::BlendFactor::ZERO, Bf::One => vk::BlendFactor::ONE, Bf::Src => vk::BlendFactor::SRC_COLOR, Bf::OneMinusSrc => vk::BlendFactor::ONE_MINUS_SRC_COLOR, Bf::SrcAlpha => vk::BlendFactor::SRC_ALPHA, Bf::OneMinusSrcAlpha => vk::BlendFactor::ONE_MINUS_SRC_ALPHA, Bf::Dst => vk::BlendFactor::DST_COLOR, Bf::OneMinusDst => vk::BlendFactor::ONE_MINUS_DST_COLOR, Bf::DstAlpha => vk::BlendFactor::DST_ALPHA, Bf::OneMinusDstAlpha => vk::BlendFactor::ONE_MINUS_DST_ALPHA, Bf::SrcAlphaSaturated => vk::BlendFactor::SRC_ALPHA_SATURATE, Bf::Constant => vk::BlendFactor::CONSTANT_COLOR, Bf::OneMinusConstant => vk::BlendFactor::ONE_MINUS_CONSTANT_COLOR, } } fn map_blend_op(operation: wgt::BlendOperation) -> vk::BlendOp { use wgt::BlendOperation as Bo; match operation { Bo::Add => vk::BlendOp::ADD, Bo::Subtract => vk::BlendOp::SUBTRACT, Bo::ReverseSubtract => vk::BlendOp::REVERSE_SUBTRACT, Bo::Min => vk::BlendOp::MIN, Bo::Max => vk::BlendOp::MAX, } } pub fn map_blend_component( component: &wgt::BlendComponent, ) -> (vk::BlendOp, vk::BlendFactor, vk::BlendFactor) { let op = map_blend_op(component.operation); let src = map_blend_factor(component.src_factor); let dst = map_blend_factor(component.dst_factor); (op, src, dst) } pub fn map_pipeline_statistics( types: wgt::PipelineStatisticsTypes, ) -> vk::QueryPipelineStatisticFlags { use wgt::PipelineStatisticsTypes as Pst; let mut flags = vk::QueryPipelineStatisticFlags::empty(); if types.contains(Pst::VERTEX_SHADER_INVOCATIONS) { flags |= vk::QueryPipelineStatisticFlags::VERTEX_SHADER_INVOCATIONS; } if types.contains(Pst::CLIPPER_INVOCATIONS) { flags |= vk::QueryPipelineStatisticFlags::CLIPPING_INVOCATIONS; } if types.contains(Pst::CLIPPER_PRIMITIVES_OUT) { flags |= vk::QueryPipelineStatisticFlags::CLIPPING_PRIMITIVES; } if types.contains(Pst::FRAGMENT_SHADER_INVOCATIONS) { flags |= vk::QueryPipelineStatisticFlags::FRAGMENT_SHADER_INVOCATIONS; } if types.contains(Pst::COMPUTE_SHADER_INVOCATIONS) { flags |= vk::QueryPipelineStatisticFlags::COMPUTE_SHADER_INVOCATIONS; } flags }
39.444149
100
0.635325