hexsha
stringlengths
40
40
size
int64
4
1.05M
content
stringlengths
4
1.05M
avg_line_length
float64
1.33
100
max_line_length
int64
1
1k
alphanum_fraction
float64
0.25
1
39cfda23a5ec2fada86fe1048206eb5e78043004
613
//! @ Conversely, a one-word node is recycled by calling |free_avail|. //! This routine is part of \TeX's ``inner loop,'' so we want it to be fast. //! @^inner loop@> // // @d free_avail(#)== {single-word node liberation} /// single-word node liberation pub(crate) macro free_avail($globals:expr, $ptr:expr) {{ // begin link(#):=avail; avail:=#; link!($globals, $ptr) = $globals.avail; $globals.avail = $ptr; // @!stat decr(dyn_used);@+tats@/ crate::region_stat! { decr!($globals.dyn_used); use crate::section_0016::decr; } // end use crate::section_0118::link; }}
32.263158
76
0.613377
1da7238ddfb7bd06c4ad482a0130bf00a52672e1
543
#![feature(plugin)] #![plugin(stateful)] #![allow(unused_mut)] #[generator] fn gen<'a, T>(items: &'a [T]) -> Box<Iterator<Item=&'a T> + 'a> { let mut iter = items.iter(); loop { match iter.next() { Some(item) => { yield_!(item); } None => { break; let x = y; //~ ERROR unresolved value `y` [E0425] } }; }; } fn main() { let items = &[1, 2, 3]; for value in gen(items) { println!("{}", value); } }
20.111111
65
0.412523
e947ace5169e5ad09c9f4e7b676e4680aec8bc81
631
#![deny(missing_docs)] //! # Rpc //! //! RPC 相关,包括注册中心、服务端各模块间 RPC 调用的报文定义 /// 配置 pub mod config; mod protocol; /// 注册中心 pub mod registry; mod status; pub use status::*; #[allow(missing_docs)] pub mod domain { pub mod message { tonic::include_proto!("domain.message"); } } #[allow(missing_docs)] pub mod comet { tonic::include_proto!("comet"); } #[allow(missing_docs)] pub mod receiver { tonic::include_proto!("receiver"); } #[allow(missing_docs)] pub mod authorizer { tonic::include_proto!("authorizer"); } #[allow(missing_docs)] #[cfg(test)] pub mod test { tonic::include_proto!("test"); }
15.02381
48
0.648177
bb211ba001173f079e70f2c4040dc367a5603f6c
1,841
use super::digest::{sha256, SHA256_TYPE}; use mbedtls::x509::Certificate; use mbedtls::alloc::{Box, List}; use std::fs::File; use std::io::Read; use std::path::Path; #[derive(Debug)] pub struct X509Cert { inner: Box<Certificate>, } impl X509Cert { pub fn new_from_der(x509_der: &[u8]) -> super::Result<Self> { let inner = Certificate::from_der(x509_der)?; Ok(Self { inner }) } /// Input must be NULL-terminated pub fn new_from_pem(x509_pem: &[u8]) -> super::Result<Self> { let inner = Certificate::from_pem(x509_pem)?; Ok(Self { inner }) } pub fn new_from_der_file(x509_der_path: &Path) -> super::Result<Self> { let mut file = File::open(x509_der_path)?; let mut buf = Vec::new(); file.read_to_end(&mut buf)?; Self::new_from_pem(&buf[..]) } pub fn new_from_pem_file(x509_pem_path: &Path) -> super::Result<Self> { let mut file = File::open(x509_pem_path)?; let mut buf = Vec::new(); file.read_to_end(&mut buf)?; buf.push(0); Self::new_from_pem(&buf[..]) } pub fn verify_this_certificate(&mut self, trust_ca: &mut Self) -> super::Result<()> { let mut list_1 = List::new(); let mut list_2 = List::new(); list_1.push((&self.inner).clone()); list_2.push((&trust_ca.inner).clone()); Certificate::verify(&mut list_1, &mut list_2, None)?; Ok(()) } pub fn verify_signature(&mut self, message: &[u8], signature: &[u8]) -> super::Result<()> { let hash = sha256(message)?; self.inner .public_key_mut() .verify(SHA256_TYPE, &hash[..], signature)?; Ok(()) } } impl PartialEq for X509Cert { fn eq(&self, other: &Self) -> bool { self.inner.as_der() == other.inner.as_der() } }
29.222222
95
0.579033
918dbc84e6d0449f3662fd1404138c56c47f1969
2,189
use num::integer::gcd; fn main() { // Compute the totient of the first 25 natural integers println!("N\t phi(n)\t Prime"); for n in 1..26 { let phi_n = phi(n); println!("{}\t {}\t {:?}", n, phi_n, phi_n == n - 1); } // Compute the number of prime numbers for various steps [1, 100, 1000, 10000, 100000] .windows(2) .scan(0, |acc, window| { let (lower, upper) = (window[0], window[1]); *acc += (lower..=upper).filter(is_prime).count(); Some((upper, *acc)) }) .for_each(|x| println!("Until {}: {} prime numbers", x.0, x.1)); } fn is_prime(n: &usize) -> bool { phi(*n) == *n - 1 } fn phi(n: usize) -> usize { (1..=n).filter(|&x| gcd(n, x) == 1).count() } #[cfg(test)] mod tests { use super::{is_prime, phi}; #[test] fn test_totient_primes() { let results = vec![ (1, 1, false), (2, 1, true), (3, 2, true), (4, 2, false), (5, 4, true), (6, 2, false), (7, 6, true), (8, 4, false), (9, 6, false), (10, 4, false), (11, 10, true), (12, 4, false), (13, 12, true), (14, 6, false), (15, 8, false), (16, 8, false), (17, 16, true), (18, 6, false), (19, 18, true), (20, 8, false), (21, 12, false), (22, 10, false), (23, 22, true), (24, 8, false), (25, 20, false), ]; for n in 1..26 { let phi_n = phi(n); assert_eq!((n, phi_n, phi_n == n - 1), results[n - 1]); } } #[test] #[ignore] fn test_totient_prime_list() { let results: Vec<usize> = [1, 100, 1000, 10000, 100000] .windows(2) .scan(0, |acc, window| { let (lower, upper) = (window[0], window[1]); *acc += (lower..=upper).filter(is_prime).count(); Some(*acc) }) .collect(); assert_eq!(results, vec![25, 168, 1229, 9592]); } }
26.059524
72
0.409776
87fef4c308df7f81124d654cf5fcc46e8045a2ea
1,169
/* * Hurl (https://hurl.dev) * Copyright (C) 2022 Orange * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ pub use self::client::{Client, HttpError}; pub use self::core::{Cookie, Header, Param, RequestCookie}; pub use self::options::ClientOptions; pub use self::request::Request; #[cfg(test)] pub use self::request_spec::tests::*; pub use self::request_spec::{Body, FileParam, Method, MultipartParam, RequestSpec}; #[cfg(test)] pub use self::response::tests::*; pub use self::response::{Response, Version}; pub use self::version::libcurl_version_info; mod client; mod core; mod options; mod request; mod request_spec; mod response; mod version;
30.763158
83
0.729683
26e317424dd4e234decfc019bc86151a3f006963
2,370
mod lexer; lalrpop_mod!( #[allow(unused_imports)] parser ); use thiserror::Error; use lalrpop_util::{lalrpop_mod, ParseError}; use pijama_ast::node::Block; use pijama_common::location::{LocatedError, Location}; use lexer::{LexError, Lexer}; use parser::ProgParser; pub type ParsingResult<T> = Result<T, ParsingError>; pub type ParsingError = LocatedError<ParsingErrorKind>; #[derive(Error, Debug, Eq, PartialEq)] pub enum ParsingErrorKind { #[error("Unexpected token \"{found}\", expected {}, ...", expected[..5.min(expected.len())].join(", "))] UnexpectedToken { found: String, expected: Vec<String>, }, #[error("Invalid token")] InvalidToken, #[error("Extra token")] ExtraToken, #[error("{0}")] Custom(&'static str), } pub fn parse(input: &str) -> ParsingResult<Block> { let lexer = Lexer::from_input(input); let result = ProgParser::new().parse(input, lexer); match result { Ok(block) => Ok(block), Err(err) => Err(match err { ParseError::InvalidToken { location } => ParsingError::new( ParsingErrorKind::InvalidToken, Location::new(location, location), ), ParseError::UnrecognizedEOF { location, expected } => ParsingError::new( ParsingErrorKind::UnexpectedToken { found: "EOF".to_string(), expected, }, Location::new(location, location), ), ParseError::UnrecognizedToken { token: (start, token, end), expected, } => ParsingError::new( ParsingErrorKind::UnexpectedToken { found: token.to_string(), expected, }, Location::new(start, end), ), ParseError::ExtraToken { token: (start, _, end), } => ParsingError::new(ParsingErrorKind::ExtraToken, Location::new(start, end)), ParseError::User { error } => { let msg = match error.content { LexError::Internal => "Unrecognized token", LexError::Custom(msg) => msg, }; ParsingError::new(ParsingErrorKind::Custom(msg), error.loc) } }), } }
31.184211
108
0.540928
2fbd33d8f4db0ccf922e5a1732300ddef6b86996
6,349
#![cfg(not(feature = "luau"))] use std::cell::RefCell; use std::ops::Deref; use std::str; use std::sync::atomic::{AtomicI64, Ordering}; use std::sync::{Arc, Mutex}; use mlua::{DebugEvent, Error, HookTriggers, Lua, Result, Value}; #[test] fn test_hook_triggers_bitor() { let trigger = HookTriggers::on_calls() | HookTriggers::on_returns() | HookTriggers::every_line() | HookTriggers::every_nth_instruction(5); assert!(trigger.on_calls); assert!(trigger.on_returns); assert!(trigger.every_line); assert_eq!(trigger.every_nth_instruction, Some(5)); } #[test] fn test_line_counts() -> Result<()> { let output = Arc::new(Mutex::new(Vec::new())); let hook_output = output.clone(); let lua = Lua::new(); lua.set_hook(HookTriggers::every_line(), move |_lua, debug| { assert_eq!(debug.event(), DebugEvent::Line); hook_output.lock().unwrap().push(debug.curr_line()); Ok(()) })?; lua.load( r#" local x = 2 + 3 local y = x * 63 local z = string.len(x..", "..y) "#, ) .exec()?; lua.remove_hook(); let output = output.lock().unwrap(); if cfg!(feature = "luajit") && lua.load("jit.version_num").eval::<i64>()? >= 20100 { assert_eq!(*output, vec![2, 3, 4, 0, 4]); } else { assert_eq!(*output, vec![2, 3, 4]); } Ok(()) } #[test] fn test_function_calls() -> Result<()> { let output = Arc::new(Mutex::new(Vec::new())); let hook_output = output.clone(); let lua = Lua::new(); lua.set_hook(HookTriggers::on_calls(), move |_lua, debug| { assert_eq!(debug.event(), DebugEvent::Call); let names = debug.names(); let source = debug.source(); let name = names.name.map(|s| str::from_utf8(s).unwrap().to_owned()); let what = source.what.map(|s| str::from_utf8(s).unwrap().to_owned()); hook_output.lock().unwrap().push((name, what)); Ok(()) })?; lua.load( r#" local v = string.len("Hello World") "#, ) .exec()?; lua.remove_hook(); let output = output.lock().unwrap(); if cfg!(feature = "luajit") && lua.load("jit.version_num").eval::<i64>()? >= 20100 { assert_eq!( *output, vec![ (None, Some("main".to_string())), (Some("len".to_string()), Some("Lua".to_string())) ] ); } else { assert_eq!( *output, vec![ (None, Some("main".to_string())), (Some("len".to_string()), Some("C".to_string())) ] ); } Ok(()) } #[test] fn test_error_within_hook() -> Result<()> { let lua = Lua::new(); lua.set_hook(HookTriggers::every_line(), |_lua, _debug| { Err(Error::RuntimeError( "Something happened in there!".to_string(), )) })?; let err = lua .load("x = 1") .exec() .expect_err("panic didn't propagate"); match err { Error::CallbackError { cause, .. } => match cause.deref() { Error::RuntimeError(s) => assert_eq!(s, "Something happened in there!"), _ => panic!("wrong callback error kind caught"), }, _ => panic!("wrong error kind caught"), }; Ok(()) } #[test] fn test_limit_execution_instructions() -> Result<()> { let lua = Lua::new(); // For LuaJIT disable JIT, as compiled code does not trigger hooks #[cfg(feature = "luajit")] lua.load("jit.off()").exec()?; let max_instructions = AtomicI64::new(10000); lua.set_hook( HookTriggers::every_nth_instruction(30), move |_lua, debug| { assert_eq!(debug.event(), DebugEvent::Count); if max_instructions.fetch_sub(30, Ordering::Relaxed) <= 30 { Err(Error::RuntimeError("time's up".to_string())) } else { Ok(()) } }, )?; lua.globals().set("x", Value::Integer(0))?; let _ = lua .load( r#" for i = 1, 10000 do x = x + 1 end "#, ) .exec() .expect_err("instruction limit didn't occur"); Ok(()) } #[test] fn test_hook_removal() -> Result<()> { let lua = Lua::new(); lua.set_hook(HookTriggers::every_nth_instruction(1), |_lua, _debug| { Err(Error::RuntimeError( "this hook should've been removed by this time".to_string(), )) })?; assert!(lua.load("local x = 1").exec().is_err()); lua.remove_hook(); assert!(lua.load("local x = 1").exec().is_ok()); Ok(()) } #[test] fn test_hook_swap_within_hook() -> Result<()> { thread_local! { static TL_LUA: RefCell<Option<Lua>> = RefCell::new(None); } TL_LUA.with(|tl| { *tl.borrow_mut() = Some(Lua::new()); }); TL_LUA.with(|tl| { tl.borrow() .as_ref() .unwrap() .set_hook(HookTriggers::every_line(), move |lua, _debug| { lua.globals().set("ok", 1i64)?; TL_LUA.with(|tl| { tl.borrow().as_ref().unwrap().set_hook( HookTriggers::every_line(), move |lua, _debug| { lua.load( r#" if ok ~= nil then ok = ok + 1 end "#, ) .exec() .expect("exec failure within hook"); TL_LUA.with(|tl| { tl.borrow().as_ref().unwrap().remove_hook(); }); Ok(()) }, ) }) }) })?; TL_LUA.with(|tl| { let tl = tl.borrow(); let lua = tl.as_ref().unwrap(); lua.load( r#" local x = 1 x = 2 local y = 3 "#, ) .exec()?; assert_eq!(lua.globals().get::<_, i64>("ok")?, 2); Ok(()) }) }
26.902542
88
0.467003
2fb50179b3f52a0c416f549c07e2cc7e9ce7efce
12,491
#![crate_name = "uu_nl"] /* * This file is part of the uutils coreutils package. * * (c) Tobias Bohumir Schottdorf <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. * */ extern crate aho_corasick; extern crate getopts; extern crate memchr; extern crate regex; extern crate regex_syntax; #[macro_use] extern crate uucore; use std::fs::File; use std::io::{stdin, BufRead, BufReader, Read}; use std::iter::repeat; use std::path::Path; mod helper; static NAME: &'static str = "nl"; static VERSION: &'static str = env!("CARGO_PKG_VERSION"); static USAGE: &'static str = "nl [OPTION]... [FILE]..."; // A regular expression matching everything. // Settings store options used by nl to produce its output. pub struct Settings { // The variables corresponding to the options -h, -b, and -f. header_numbering: NumberingStyle, body_numbering: NumberingStyle, footer_numbering: NumberingStyle, // The variable corresponding to -d section_delimiter: [char; 2], // The variables corresponding to the options -v, -i, -l, -w. starting_line_number: u64, line_increment: u64, join_blank_lines: u64, number_width: usize, // Used with String::from_char, hence usize. // The format of the number and the (default value for) // renumbering each page. number_format: NumberFormat, renumber: bool, // The string appended to each line number output. number_separator: String, } // NumberingStyle stores which lines are to be numbered. // The possible options are: // 1. Number all lines // 2. Number only nonempty lines // 3. Don't number any lines at all // 4. Number all lines that match a basic regular expression. enum NumberingStyle { NumberForAll, NumberForNonEmpty, NumberForNone, NumberForRegularExpression(regex::Regex), } // NumberFormat specifies how line numbers are output within their allocated // space. They are justified to the left or right, in the latter case with // the option of having all unused space to its left turned into leading zeroes. enum NumberFormat { Left, Right, RightZero, } pub fn uumain(args: Vec<String>) -> i32 { let mut opts = getopts::Options::new(); opts.optopt( "b", "body-numbering", "use STYLE for numbering body lines", "STYLE", ); opts.optopt( "d", "section-delimiter", "use CC for separating logical pages", "CC", ); opts.optopt( "f", "footer-numbering", "use STYLE for numbering footer lines", "STYLE", ); opts.optopt( "h", "header-numbering", "use STYLE for numbering header lines", "STYLE", ); opts.optopt( "i", "line-increment", "line number increment at each line", "", ); opts.optopt( "l", "join-blank-lines", "group of NUMBER empty lines counted as one", "NUMBER", ); opts.optopt( "n", "number-format", "insert line numbers according to FORMAT", "FORMAT", ); opts.optflag( "p", "no-renumber", "do not reset line numbers at logical pages", ); opts.optopt( "s", "number-separator", "add STRING after (possible) line number", "STRING", ); opts.optopt( "v", "starting-line-number", "first line number on each logical page", "NUMBER", ); opts.optopt( "w", "number-width", "use NUMBER columns for line numbers", "NUMBER", ); opts.optflag("", "help", "display this help and exit"); opts.optflag("V", "version", "version"); // A mutable settings object, initialized with the defaults. let mut settings = Settings { header_numbering: NumberingStyle::NumberForNone, body_numbering: NumberingStyle::NumberForAll, footer_numbering: NumberingStyle::NumberForNone, section_delimiter: ['\\', ':'], starting_line_number: 1, line_increment: 1, join_blank_lines: 1, number_width: 6, number_format: NumberFormat::Right, renumber: true, number_separator: String::from("\t"), }; let given_options = match opts.parse(&args[1..]) { Ok(m) => m, Err(f) => { show_error!("{}", f); print_usage(&opts); return 1; } }; if given_options.opt_present("help") { print_usage(&opts); return 0; } if given_options.opt_present("version") { version(); return 0; } // Update the settings from the command line options, and terminate the // program if some options could not successfully be parsed. let parse_errors = helper::parse_options(&mut settings, &given_options); if !parse_errors.is_empty() { show_error!("Invalid arguments supplied."); for message in &parse_errors { println!("{}", message); } return 1; } let files = given_options.free; let mut read_stdin = files.is_empty(); for file in &files { if file == "-" { // If both file names and '-' are specified, we choose to treat first all // regular files, and then read from stdin last. read_stdin = true; continue; } let path = Path::new(file); let reader = File::open(path).unwrap(); let mut buffer = BufReader::new(reader); nl(&mut buffer, &settings); } if read_stdin { let mut buffer = BufReader::new(stdin()); nl(&mut buffer, &settings); } 0 } // nl implements the main functionality for an individual buffer. fn nl<T: Read>(reader: &mut BufReader<T>, settings: &Settings) { let regexp: regex::Regex = regex::Regex::new(r".?").unwrap(); let mut line_no = settings.starting_line_number; // The current line number's width as a string. Using to_string is inefficient // but since we only do it once, it should not hurt. let mut line_no_width = line_no.to_string().len(); let line_no_width_initial = line_no_width; // Stores the smallest integer with one more digit than line_no, so that // when line_no >= line_no_threshold, we need to use one more digit. let mut line_no_threshold = 10u64.pow(line_no_width as u32); let mut empty_line_count: u64 = 0; let fill_char = match settings.number_format { NumberFormat::RightZero => '0', _ => ' ', }; // Initially, we use the body's line counting settings let mut regex_filter = match settings.body_numbering { NumberingStyle::NumberForRegularExpression(ref re) => re, _ => &regexp, }; let mut line_filter: fn(&str, &regex::Regex) -> bool = pass_regex; for mut l in reader.lines().map(|r| r.unwrap()) { // Sanitize the string. We want to print the newline ourselves. if !l.is_empty() && l.chars().rev().next().unwrap() == '\n' { l.pop(); } // Next we iterate through the individual chars to see if this // is one of the special lines starting a new "section" in the // document. let line = l; let mut odd = false; // matched_group counts how many copies of section_delimiter // this string consists of (0 if there's anything else) let mut matched_groups = 0u8; for c in line.chars() { // If this is a newline character, the loop should end. if c == '\n' { break; } // If we have already seen three groups (corresponding to // a header) or the current char does not form part of // a new group, then this line is not a segment indicator. if matched_groups >= 3 || settings.section_delimiter[if odd { 1 } else { 0 }] != c { matched_groups = 0; break; } if odd { // We have seen a new group and count it. matched_groups += 1; } odd = !odd; } // See how many groups we matched. That will tell us if this is // a line starting a new segment, and the number of groups // indicates what type of segment. if matched_groups > 0 { // The current line is a section delimiter, so we output // a blank line. println!(""); // However the line does not count as a blank line, so we // reset the counter used for --join-blank-lines. empty_line_count = 0; match *match matched_groups { 3 => { // This is a header, so we may need to reset the // line number and the line width if settings.renumber { line_no = settings.starting_line_number; line_no_width = line_no_width_initial; line_no_threshold = 10u64.pow(line_no_width as u32); } &settings.header_numbering } 1 => &settings.footer_numbering, // The only option left is 2, but rust wants // a catch-all here. _ => &settings.body_numbering, } { NumberingStyle::NumberForAll => { line_filter = pass_all; } NumberingStyle::NumberForNonEmpty => { line_filter = pass_nonempty; } NumberingStyle::NumberForNone => { line_filter = pass_none; } NumberingStyle::NumberForRegularExpression(ref re) => { line_filter = pass_regex; regex_filter = re; } } continue; } // From this point on we format and print a "regular" line. if line == "" { // The line is empty, which means that we have to care // about the --join-blank-lines parameter. empty_line_count += 1; } else { // This saves us from having to check for an empty string // in the next selector. empty_line_count = 0; } if !line_filter(&line, regex_filter) || (empty_line_count > 0 && empty_line_count < settings.join_blank_lines) { // No number is printed for this line. Either we did not // want to print one in the first place, or it is a blank // line but we are still collecting more blank lines via // the option --join-blank-lines. println!("{}", line); continue; } // If we make it here, then either we are printing a non-empty // line or assigning a line number to an empty line. Either // way, start counting empties from zero once more. empty_line_count = 0; // A line number is to be printed. let mut w: usize = 0; if settings.number_width > line_no_width { w = settings.number_width - line_no_width; } let fill: String = repeat(fill_char).take(w).collect(); match settings.number_format { NumberFormat::Left => println!( "{1}{0}{2}{3}", fill, line_no, settings.number_separator, line ), _ => println!( "{0}{1}{2}{3}", fill, line_no, settings.number_separator, line ), } // Now update the variables for the (potential) next // line. line_no += settings.line_increment; while line_no >= line_no_threshold { // The line number just got longer. line_no_threshold *= 10; line_no_width += 1; } } } fn pass_regex(line: &str, re: &regex::Regex) -> bool { re.is_match(line) } fn pass_nonempty(line: &str, _: &regex::Regex) -> bool { line.len() > 0 } fn pass_none(_: &str, _: &regex::Regex) -> bool { false } fn pass_all(_: &str, _: &regex::Regex) -> bool { true } fn print_usage(opts: &getopts::Options) { println!("{}", opts.usage(USAGE)); } fn version() { println!("{} {}", NAME, VERSION); }
32.444156
96
0.57009
236a284b86728e3b06da07c213c0cf17990447b1
100
pub mod models; pub mod operations; #[allow(dead_code)] pub const API_VERSION: &str = "2020-06-25";
20
43
0.72
386a22383031d7edd4950a9057b6805516116f51
1,066
#[doc = "Reader of register DAP[%s]"] pub type R = crate::R<u32, super::DAP>; #[doc = "Writer for register DAP[%s]"] pub type W = crate::W<u32, super::DAP>; #[doc = "Register DAP[%s] `reset()`'s with value 0"] impl crate::ResetValue for super::DAP { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `DAP`"] pub type DAP_R = crate::R<u16, u16>; #[doc = "Write proxy for field `DAP`"] pub struct DAP_W<'a> { w: &'a mut W, } impl<'a> DAP_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff); self.w } } impl R { #[doc = "Bits 0:15 - Device address prefix n"] #[inline(always)] pub fn dap(&self) -> DAP_R { DAP_R::new((self.bits & 0xffff) as u16) } } impl W { #[doc = "Bits 0:15 - Device address prefix n"] #[inline(always)] pub fn dap(&mut self) -> DAP_W { DAP_W { w: self } } }
25.380952
74
0.549719
7a49c61ac1c656e74bf18a21952574e2e6f90a00
555
impl Solution { pub fn is_palindrome(x: i32) -> bool { if x.is_negative() { return false; } let mut y = x; let mut z = 0; while y != 0 { z *= 10; z += y % 10; y /= 10; } x == z } } pub struct Solution; #[cfg(test)] mod test { use super::Solution; #[test] fn test() { assert_eq!(Solution::is_palindrome(121), true); assert_eq!(Solution::is_palindrome(-121), false); assert_eq!(Solution::is_palindrome(10), false); } }
17.903226
57
0.481081
ebc7d8ff94e9b36a6385942d778dee75e2f472ee
6,548
//! BIP39 and BIP44 mnemonic seed phrase. use crate::encoding::to_hex; use crate::Error; use crate::Private; use bip39::Mnemonic; pub use bip39::MnemonicType; use ed25519_dalek_bip32::{DerivationPath, ExtendedSecretKey}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::convert::TryFrom; use std::fmt::{Display, Formatter}; use std::str::FromStr; static LANGUAGES: &str = "en, zh-hans, zh-hant, fr, it, ja, ko, es"; /// The language the phrase is in. /// /// This is copied from [bip39::Language] because I need it to be Serialize/Deserialize. It should /// act like the [crate::bip39] implementation. #[derive(Clone, Debug, Serialize, Deserialize)] pub enum Language { English, ChineseSimplified, ChineseTraditional, French, Italian, Japanese, Korean, Spanish, } impl Language { pub fn from_language_code(language_code: &str) -> Option<Self> { bip39::Language::from_language_code(language_code).map(|x| x.into()) } } impl FromStr for Language { type Err = Error; fn from_str(s: &str) -> Result<Self, Self::Err> { let language = Language::from_language_code(s).ok_or(Error::LanguageError(LANGUAGES.to_string()))?; Ok(language) } } impl From<bip39::Language> for Language { fn from(lang: bip39::Language) -> Self { match lang { bip39::Language::English => Language::English, bip39::Language::ChineseSimplified => Language::ChineseSimplified, bip39::Language::ChineseTraditional => Language::ChineseTraditional, bip39::Language::French => Language::French, bip39::Language::Italian => Language::Italian, bip39::Language::Japanese => Language::Japanese, bip39::Language::Korean => Language::Korean, bip39::Language::Spanish => Language::Spanish, } } } impl Into<bip39::Language> for Language { fn into(self) -> bip39::Language { match self { Language::English => bip39::Language::English, Language::ChineseSimplified => bip39::Language::ChineseSimplified, Language::ChineseTraditional => bip39::Language::ChineseTraditional, Language::French => bip39::Language::French, Language::Italian => bip39::Language::Italian, Language::Japanese => bip39::Language::Japanese, Language::Korean => bip39::Language::Korean, Language::Spanish => bip39::Language::Spanish, } } } /// A wrapper for Entropy so it can be serialized as hex, and have its own type instead of Vec<u8>. // TODO: This should probably "act" more like the other [u8] structs. #[derive(Debug, Clone)] struct Entropy(Vec<u8>); impl Serialize for Entropy { fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error> where S: Serializer, { serializer.serialize_str(to_hex(&self.0).as_str()) } } impl<'de> Deserialize<'de> for Entropy { fn deserialize<D>(deserializer: D) -> Result<Self, <D as Deserializer<'de>>::Error> where D: Deserializer<'de>, { let s: String = Deserialize::deserialize(deserializer)?; Ok(Self( hex::decode(s.as_bytes()).map_err(serde::de::Error::custom)?, )) } } /// BIP39 and BIP44 mnemonic seed phrase that can generate keys. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Phrase { language: Language, entropy: Entropy, } impl Phrase { pub fn random(len: MnemonicType, language: Language) -> Self { let m = Mnemonic::new(len, language.to_owned().into()); Self { entropy: Entropy(m.entropy().to_vec()), language: language.into(), } } pub fn to_mnemonic(&self) -> Result<Mnemonic, Error> { Ok(Mnemonic::from_entropy( &self.entropy.0, self.language.to_owned().into(), )?) } pub fn to_bip39_seed(&self, passphrase: &str) -> Result<bip39::Seed, Error> { Ok(bip39::Seed::new(&self.to_mnemonic()?, passphrase)) } pub fn to_bip32_ext_key( &self, account: u32, passphrase: &str, ) -> Result<ExtendedSecretKey, Error> { let bip39_seed = self.to_bip39_seed(passphrase)?; let key = ExtendedSecretKey::from_seed(bip39_seed.as_bytes())?; let path = format!("m/44'/165'/{}'", account); let path: DerivationPath = path.parse().unwrap(); let derived = key.derive(&path)?; Ok(derived) } pub fn to_private(&self, account: u32, passphrase: &str) -> Result<Private, Error> { let ext_key = self.to_bip32_ext_key(account, passphrase)?; let bip39_seed = ext_key.secret_key.as_ref(); Ok(Private::try_from(bip39_seed)?) } pub fn from_words(language: Language, words: &str) -> Result<Self, Error> { let m = Mnemonic::from_phrase(words, language.to_owned().into())?; Ok(Self { language, entropy: Entropy(m.entropy().to_vec()), }) } } impl Display for Phrase { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { // TODO: remove unwrap let mnemonic = self.to_mnemonic().unwrap(); let p = mnemonic.phrase(); write!(f, "{}", &p) } } #[cfg(test)] mod tests { use super::*; #[test] fn conversions() { // Example taken from: // https://docs.nano.org/integration-guides/key-management/#mnemonic-seed let phrase = Phrase::from_words( Language::English, "edge defense waste choose enrich upon flee junk siren film clown finish \ luggage leader kid quick brick print evidence swap drill paddle truly occur", ) .unwrap(); let bip39_seed = phrase.to_bip39_seed("some password").unwrap(); assert_eq!( format!("{:X}", bip39_seed), "0DC285FDE768F7FF29B66CE7252D56ED92FE003B605907F7A4F683C3DC8586D3\ 4A914D3C71FC099BB38EE4A59E5B081A3497B7A323E90CC68F67B5837690310C" ); let private = phrase.to_private(0, "some password").unwrap(); assert_eq!( format!("{:0X}", private), "3BE4FC2EF3F3B7374E6FC4FB6E7BB153F8A2998B3B3DAB50853EABE128024143" ); let address = private.to_public().unwrap().to_address(); assert_eq!( address.to_string(), "nano_1pu7p5n3ghq1i1p4rhmek41f5add1uh34xpb94nkbxe8g4a6x1p69emk8y1d" ); } }
32.256158
100
0.618815
bfb9c078889d8f116dfc89d2ad49da88b597922a
13,440
use crate::distribution::{self, poisson, Discrete, DiscreteCDF}; use crate::function::{beta, gamma}; use crate::statistics::*; use crate::{Result, StatsError}; use rand::Rng; use std::f64; /// Implements the /// [NegativeBinomial](http://en.wikipedia.org/wiki/Negative_binomial_distribution) /// distribution /// /// # Examples /// /// ``` /// use statrs::distribution::{NegativeBinomial, Discrete}; /// use statrs::statistics::DiscreteDistribution; /// use statrs::prec::almost_eq; /// /// let r = NegativeBinomial::new(4.0, 0.5).unwrap(); /// assert_eq!(r.mean().unwrap(), 4.0); /// assert!(almost_eq(r.pmf(0), 0.0625, 1e-8)); /// assert!(almost_eq(r.pmf(3), 0.15625, 1e-8)); /// ``` #[derive(Debug, Copy, Clone, PartialEq)] pub struct NegativeBinomial { r: f64, p: f64, } impl NegativeBinomial { /// Constructs a new negative binomial distribution /// with a given `p` probability of the number of successes `r` /// /// # Errors /// /// Returns an error if `p` is `NaN`, less than `0.0`, /// greater than `1.0`, or if `r` is `NaN` or less than `0` /// /// # Examples /// /// ``` /// use statrs::distribution::NegativeBinomial; /// /// let mut result = NegativeBinomial::new(4.0, 0.5); /// assert!(result.is_ok()); /// /// result = NegativeBinomial::new(-0.5, 5.0); /// assert!(result.is_err()); /// ``` pub fn new(r: f64, p: f64) -> Result<NegativeBinomial> { if p.is_nan() || p < 0.0 || p > 1.0 || r.is_nan() || r < 0.0 { Err(StatsError::BadParams) } else { Ok(NegativeBinomial { r, p }) } } /// Returns the probability of success `p` of /// the negative binomial distribution. /// /// # Examples /// /// ``` /// use statrs::distribution::NegativeBinomial; /// /// let r = NegativeBinomial::new(5.0, 0.5).unwrap(); /// assert_eq!(r.p(), 0.5); /// ``` pub fn p(&self) -> f64 { self.p } /// Returns the number `r` of success of this negative /// binomial distribution /// /// # Examples /// /// ``` /// use statrs::distribution::NegativeBinomial; /// /// let r = NegativeBinomial::new(5.0, 0.5).unwrap(); /// assert_eq!(r.r(), 5.0); /// ``` pub fn r(&self) -> f64 { self.r } } impl ::rand::distributions::Distribution<u64> for NegativeBinomial { fn sample<R: Rng + ?Sized>(&self, r: &mut R) -> u64 { let lambda = distribution::gamma::sample_unchecked(r, self.r, (1.0 - self.p) / self.p); poisson::sample_unchecked(r, lambda).floor() as u64 } } impl DiscreteCDF<u64, f64> for NegativeBinomial { /// Calculates the cumulative distribution function for the /// negative binomial distribution at `x` /// /// Note that due to extending the distribution to the reals /// (allowing positive real values for `r`), while still technically /// a discrete distribution the CDF behaves more like that of a /// continuous distribution rather than a discrete distribution /// (i.e. a smooth graph rather than a step-ladder) /// /// # Formula /// /// ```ignore /// 1 - I_(1 - p)(x + 1, r) /// ``` /// /// where `I_(x)(a, b)` is the regularized incomplete beta function fn cdf(&self, x: u64) -> f64 { 1.0 - beta::beta_reg(x as f64 + 1.0, self.r, 1.0 - self.p) } } impl Min<u64> for NegativeBinomial { /// Returns the minimum value in the domain of the /// negative binomial distribution representable by a 64-bit /// integer /// /// # Formula /// /// ```ignore /// 0 /// ``` fn min(&self) -> u64 { 0 } } impl Max<u64> for NegativeBinomial { /// Returns the maximum value in the domain of the /// negative binomial distribution representable by a 64-bit /// integer /// /// # Formula /// /// ```ignore /// u64::MAX /// ``` fn max(&self) -> u64 { std::u64::MAX } } impl DiscreteDistribution<f64> for NegativeBinomial { /// Returns the mean of the negative binomial distribution /// /// # Formula /// /// ```ignore /// r * (1-p) / p /// ``` fn mean(&self) -> Option<f64> { Some(self.r * (1.0 - self.p) / self.p) } /// Returns the variance of the negative binomial distribution /// /// # Formula /// /// ```ignore /// r * (1-p) / p^2 /// ``` fn variance(&self) -> Option<f64> { Some(self.r * (1.0 - self.p) / (self.p * self.p)) } /// Returns the skewness of the negative binomial distribution /// /// # Formula /// /// ```ignore /// (2-p) / sqrt(r * (1-p)) /// ``` fn skewness(&self) -> Option<f64> { Some((2.0 - self.p) / f64::sqrt(self.r * (1.0 - self.p))) } } impl Mode<Option<f64>> for NegativeBinomial { /// Returns the mode for the negative binomial distribution /// /// # Formula /// /// ```ignore /// if r > 1 then /// floor((r - 1) * (1-p / p)) /// else /// 0 /// ``` fn mode(&self) -> Option<f64> { let mode = if self.r > 1.0 { f64::floor((self.r - 1.0) * (1.0 - self.p) / self.p) } else { 0.0 }; Some(mode) } } impl Discrete<u64, f64> for NegativeBinomial { /// Calculates the probability mass function for the negative binomial /// distribution at `x` /// /// # Formula /// /// ```ignore /// (x + r - 1 choose k) * (1 - p)^x * p^r /// ``` fn pmf(&self, x: u64) -> f64 { self.ln_pmf(x).exp() } /// Calculates the log probability mass function for the negative binomial /// distribution at `x` /// /// # Formula /// /// ```ignore /// ln(x + r - 1 choose k) * (1 - p)^x * p^r)) /// ``` fn ln_pmf(&self, x: u64) -> f64 { let k = x as f64; gamma::ln_gamma(self.r + k) - gamma::ln_gamma(self.r) - gamma::ln_gamma(k + 1.0) + (self.r * self.p.ln()) + (k * (1.0 - self.p).ln()) } } #[rustfmt::skip] #[cfg(all(test, feature = "nightly"))] mod tests { use std::fmt::Debug; use crate::statistics::*; use crate::distribution::{DiscreteCDF, Discrete, NegativeBinomial}; use crate::consts::ACC; fn try_create(r: f64, p: f64) -> NegativeBinomial { let r = NegativeBinomial::new(r, p); assert!(r.is_ok()); r.unwrap() } fn create_case(r: f64, p: f64) { let dist = try_create(r, p); assert_eq!(p, dist.p()); assert_eq!(r, dist.r()); } fn bad_create_case(r: f64, p: f64) { let r = NegativeBinomial::new(r, p); assert!(r.is_err()); } fn get_value<T, F>(r: f64, p: f64, eval: F) -> T where T: PartialEq + Debug, F: Fn(NegativeBinomial) -> T { let r = try_create(r, p); eval(r) } fn test_case<T, F>(r: f64, p: f64, expected: T, eval: F) where T: PartialEq + Debug, F: Fn(NegativeBinomial) -> T { let x = get_value(r, p, eval); assert_eq!(expected, x); } fn test_case_or_nan<F>(r: f64, p: f64, expected: f64, eval: F) where F: Fn(NegativeBinomial) -> f64 { let x = get_value(r, p, eval); if expected.is_nan() { assert!(x.is_nan()) } else { assert_eq!(expected, x); } } fn test_almost<F>(r: f64, p: f64, expected: f64, acc: f64, eval: F) where F: Fn(NegativeBinomial) -> f64 { let x = get_value(r, p, eval); assert_almost_eq!(expected, x, acc); } #[test] fn test_create() { create_case(0.0, 0.0); create_case(0.3, 0.4); create_case(1.0, 0.3); } #[test] fn test_bad_create() { bad_create_case(f64::NAN, 1.0); bad_create_case(0.0, f64::NAN); bad_create_case(-1.0, 1.0); bad_create_case(2.0, 2.0); } #[test] fn test_mean() { let mean = |x: NegativeBinomial| x.mean().unwrap(); test_case(4.0, 0.0, f64::INFINITY, mean); test_almost(3.0, 0.3, 7.0, 1e-15 , mean); test_case(2.0, 1.0, 0.0, mean); } #[test] fn test_variance() { let variance = |x: NegativeBinomial| x.variance().unwrap(); test_case(4.0, 0.0, f64::INFINITY, variance); test_almost(3.0, 0.3, 23.333333333333, 1e-12, variance); test_case(2.0, 1.0, 0.0, variance); } #[test] fn test_skewness() { let skewness = |x: NegativeBinomial| x.skewness().unwrap(); test_case(0.0, 0.0, f64::INFINITY, skewness); test_almost(0.1, 0.3, 6.425396041, 1e-09, skewness); test_case(1.0, 1.0, f64::INFINITY, skewness); } #[test] fn test_mode() { let mode = |x: NegativeBinomial| x.mode().unwrap(); test_case(0.0, 0.0, 0.0, mode); test_case(0.3, 0.0, 0.0, mode); test_case(1.0, 1.0, 0.0, mode); test_case(10.0, 0.01, 891.0, mode); } #[test] fn test_min_max() { let min = |x: NegativeBinomial| x.min(); let max = |x: NegativeBinomial| x.max(); test_case(1.0, 0.5, 0, min); test_case(1.0, 0.3, std::u64::MAX, max); } #[test] fn test_pmf() { let pmf = |arg: u64| move |x: NegativeBinomial| x.pmf(arg); test_almost(4.0, 0.5, 0.0625, 1e-8, pmf(0)); test_almost(4.0, 0.5, 0.15625, 1e-8, pmf(3)); test_case(1.0, 0.0, 0.0, pmf(0)); test_case(1.0, 0.0, 0.0, pmf(1)); test_almost(3.0, 0.2, 0.008, 1e-15, pmf(0)); test_almost(3.0, 0.2, 0.0192, 1e-15, pmf(1)); test_almost(3.0, 0.2, 0.04096, 1e-15, pmf(3)); test_almost(10.0, 0.2, 1.024e-07, 1e-07, pmf(0)); test_almost(10.0, 0.2, 8.192e-07, 1e-07, pmf(1)); test_almost(10.0, 0.2, 0.001015706852, 1e-07, pmf(10)); test_almost(1.0, 0.3, 0.3, 1e-15, pmf(0)); test_almost(1.0, 0.3, 0.21, 1e-15, pmf(1)); test_almost(3.0, 0.3, 0.027, 1e-15, pmf(0)); test_case(0.3, 1.0, 0.0, pmf(1)); test_case(0.3, 1.0, 0.0, pmf(3)); test_case_or_nan(0.3, 1.0, f64::NAN, pmf(0)); test_case(0.3, 1.0, 0.0, pmf(1)); test_case(0.3, 1.0, 0.0, pmf(10)); test_case_or_nan(1.0, 1.0, f64::NAN, pmf(0)); test_case(1.0, 1.0, 0.0, pmf(1)); test_case_or_nan(3.0, 1.0, f64::NAN, pmf(0)); test_case(3.0, 1.0, 0.0, pmf(1)); test_case(3.0, 1.0, 0.0, pmf(3)); test_case_or_nan(10.0, 1.0, f64::NAN, pmf(0)); test_case(10.0, 1.0, 0.0, pmf(1)); test_case(10.0, 1.0, 0.0, pmf(10)); } #[test] fn test_ln_pmf() { let ln_pmf = |arg: u64| move |x: NegativeBinomial| x.ln_pmf(arg); test_case(1.0, 0.0, f64::NEG_INFINITY, ln_pmf(0)); test_case(1.0, 0.0, f64::NEG_INFINITY, ln_pmf(1)); test_almost(3.0, 0.2, -4.828313737, 1e-08, ln_pmf(0)); test_almost(3.0, 0.2, -3.952845, 1e-08, ln_pmf(1)); test_almost(3.0, 0.2, -3.195159298, 1e-08, ln_pmf(3)); test_almost(10.0, 0.2, -16.09437912, 1e-08, ln_pmf(0)); test_almost(10.0, 0.2, -14.01493758, 1e-08, ln_pmf(1)); test_almost(10.0, 0.2, -6.892170503, 1e-08, ln_pmf(10)); test_almost(1.0, 0.3, -1.203972804, 1e-08, ln_pmf(0)); test_almost(1.0, 0.3, -1.560647748, 1e-08, ln_pmf(1)); test_almost(3.0, 0.3, -3.611918413, 1e-08, ln_pmf(0)); test_case(0.3, 1.0, f64::NEG_INFINITY, ln_pmf(1)); test_case(0.3, 1.0, f64::NEG_INFINITY, ln_pmf(3)); test_case_or_nan(0.3, 1.0, f64::NAN, ln_pmf(0)); test_case(0.3, 1.0, f64::NEG_INFINITY, ln_pmf(1)); test_case(0.3, 1.0, f64::NEG_INFINITY, ln_pmf(10)); test_case_or_nan(1.0, 1.0, f64::NAN, ln_pmf(0)); test_case(1.0, 1.0, f64::NEG_INFINITY, ln_pmf(1)); test_case_or_nan(3.0, 1.0, f64::NAN, ln_pmf(0)); test_case(3.0, 1.0, f64::NEG_INFINITY, ln_pmf(1)); test_case(3.0, 1.0, f64::NEG_INFINITY, ln_pmf(3)); test_case_or_nan(10.0, 1.0, f64::NAN, ln_pmf(0)); test_case(10.0, 1.0, f64::NEG_INFINITY, ln_pmf(1)); test_case(10.0, 1.0, f64::NEG_INFINITY, ln_pmf(10)); } #[test] fn test_cdf() { let cdf = |arg: u64| move |x: NegativeBinomial| x.cdf(arg); test_almost(1.0, 0.3, 0.3, 1e-08, cdf(0)); test_almost(1.0, 0.3, 0.51, 1e-08, cdf(1)); test_almost(1.0, 0.3, 0.83193, 1e-08, cdf(4)); test_almost(1.0, 0.3, 0.9802267326, 1e-08, cdf(10)); test_case(1.0, 1.0, 1.0, cdf(0)); test_case(1.0, 1.0, 1.0, cdf(1)); test_almost(10.0, 0.75, 0.05631351471, 1e-08, cdf(0)); test_almost(10.0, 0.75, 0.1970973015, 1e-08, cdf(1)); test_almost(10.0, 0.75, 0.9960578583, 1e-08, cdf(10)); } #[test] fn test_cdf_upper_bound() { let cdf = |arg: u64| move |x: NegativeBinomial| x.cdf(arg); test_case(3.0, 0.5, 1.0, cdf(100)); } // TODO: figure out the best way to re-implement this test. We currently // do not have a good way to characterize a discrete distribution with a // CDF that is continuous // // #[test] // fn test_discrete() { // test::check_discrete_distribution(&try_create(5.0, 0.3), 35); // test::check_discrete_distribution(&try_create(10.0, 0.7), 21); // } }
30.755149
95
0.533185
5bcb9d8badc6597354c05555679d0f9867134b1e
6,420
use std::{ fs::{create_dir_all, File}, io::{Read, Result, Seek, SeekFrom, Write}, path::{Component, Path, PathBuf}, }; use byteorder::{LittleEndian, ReadBytesExt}; const ATG_HEADER: &[u8] = "ATG CORE CEMENT LIBRARY\0\0\0\0\0\0\0\0\0".as_bytes(); const BUFFER_SIZE: usize = 1024 * 1024 * 4; type LE = LittleEndian; fn next_multiple(n: i32, target: i32) -> i32 { if n % target == 0 { return n; } let prev = n / target; target * (prev + 1) } fn nm_to_skip(current: i32, target: i32) -> i32 { next_multiple(current, target) - current } fn skip_to_multiple(infile: &mut File, target: i32) -> Result<usize> { let current = infile.stream_position().unwrap(); let to_skip = nm_to_skip(current as i32, target); infile.seek(SeekFrom::Current(to_skip as i64))?; Ok(to_skip as usize) } struct ArchiveHeader { pub header: [u8; 32], _unknown: u32, pub directory_offset: u32, pub directory_length: u32, pub offset_to_filename_directory: u32, pub filename_directory_length: u32, _null: u32, pub number_of_files: u32, } struct RCFile { pub offset: u32, pub length: u32, } impl ArchiveHeader { fn new() -> ArchiveHeader { ArchiveHeader { header: ['\0' as u8; 32], _unknown: 0, directory_offset: 0, directory_length: 0, offset_to_filename_directory: 0, filename_directory_length: 0, _null: 0, number_of_files: 0, } } } fn read_archive_header(infile: &mut File) -> Result<ArchiveHeader> { let mut a_head = ArchiveHeader::new(); infile.read_exact(&mut a_head.header)?; if a_head.header == ATG_HEADER { println!("ATG header detected!"); } infile.seek(SeekFrom::Current(4))?; a_head.directory_offset = infile.read_u32::<LE>()?; a_head.directory_length = infile.read_u32::<LE>()?; a_head.offset_to_filename_directory = infile.read_u32::<LE>()?; a_head.filename_directory_length = infile.read_u32::<LE>()?; infile.seek(SeekFrom::Current(4))?; a_head.number_of_files = infile.read_u32::<LE>()?; println!("Number of files: {}", a_head.number_of_files); Ok(a_head) } fn read_number_of_files(infile: &mut File, number: usize) -> Result<Vec<RCFile>> { let mut files = Vec::new(); for _ in 0..number { infile.seek(SeekFrom::Current(4))?; let file = RCFile { offset: infile.read_u32::<LE>()?, length: infile.read_u32::<LE>()?, }; println!("{}, length: {}", file.offset, file.length); files.push(file); } files.sort_by(|a, b| a.offset.cmp(&b.offset)); Ok(files) } fn read_filenames(infile: &mut File, number: usize) -> Result<Vec<String>> { let mut filenames = Vec::new(); println!("num {}", number); for _ in 0..number { infile.seek(SeekFrom::Current(4 * 3))?; let fn_length = infile.read_u32::<LE>()?; // println!("Filename length: {}", fn_length); let mut filename = vec![0u8; fn_length as usize - 1]; // exclude null terminator infile.read_exact(&mut filename)?; // let fpos = infile.stream_position().unwrap(); // let to_skip = nm_to_skip(fpos as i32, 4) - 1; let to_skip = 4; // println!("Current: {}. To skip: {}.", fpos, to_skip); infile.seek(SeekFrom::Current(to_skip as i64))?; match String::from_utf8(filename) { Ok(v) => { println!("{}", &v); filenames.push(v); } Err(e) => panic!("Malformed filename in filenames directory! {}", e), } } Ok(filenames) } fn file_from_path(winpath: &String) -> Result<File> { let wpath = winpath.replace("\\", "/"); let wpath = Path::new(wpath.as_str()); let mut wpath: Vec<Component> = wpath.components().collect(); let result = wpath .pop() .expect(format!("Filename empty! Filename provided: {}", winpath).as_str()) .as_os_str(); let mut newpath = PathBuf::new(); for dirs in wpath { match dirs { Component::Prefix(_) => (), Component::RootDir => (), Component::CurDir => (), Component::ParentDir => (), Component::Normal(v) => newpath.push(v), } } let newpath = newpath.as_path(); create_dir_all(&newpath)?; let newpath = newpath.join(result); let newfile = File::create(newpath.as_path())?; Ok(newfile) } fn main() -> Result<()> { let input_file = std::env::args().nth(1); let input_file = input_file.expect("Missing input file!"); let mut handle = File::open(&input_file).expect(format!("Unable to open file: {}", &input_file).as_str()); let a_head = read_archive_header(&mut handle).unwrap(); let num_files = a_head.number_of_files as usize; let files = read_number_of_files(&mut handle, num_files)?; skip_to_multiple(&mut handle, 2048)?; handle.seek(SeekFrom::Current(4 + 4))?; let filenames = read_filenames(&mut handle, num_files).unwrap(); skip_to_multiple(&mut handle, 2048)?; let mut in_buffer = [0u8; BUFFER_SIZE]; for (file, filename) in files.into_iter().zip(filenames.into_iter()) { // let filename = filename.replace("\\", "_"); let filesize: usize = file.length as usize; let mut to_read = filesize; // let mut fhandle = File::create(&filename).unwrap(); let mut fhandle = file_from_path(&filename).unwrap(); loop { if to_read <= BUFFER_SIZE { println!("File {} remaining bytes to read {}", filename, to_read); let mut remainder = vec![0u8; to_read]; let read_bytes = handle.read(&mut remainder).unwrap(); if read_bytes != to_read { panic!("Incorrect amount of bytes read at file {}", filename); } fhandle.write_all(remainder.as_slice()).unwrap(); break; } else { println!("File {} remaining bytes to read {}", filename, to_read); let read = handle.read(&mut in_buffer).unwrap(); fhandle.write_all(&in_buffer).unwrap(); to_read = to_read - read; } } skip_to_multiple(&mut handle, 2048)?; } Ok(()) }
31.470588
97
0.581464
87de1d7ac0193872bb50a0095bc213c1ea87ea83
30,462
// Copyright 2016 Joe Wilm, The Alacritty Project Contributors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! A specialized 2d grid implementation optimized for use in a terminal. use std::cmp::{max, min, Ordering}; use std::ops::{Deref, Index, IndexMut, Range, RangeFrom, RangeFull, RangeTo}; use serde::{Deserialize, Serialize}; use crate::index::{self, Column, IndexRange, Line, Point}; use crate::selection::Selection; mod row; pub use self::row::Row; #[cfg(test)] mod tests; mod storage; use self::storage::Storage; const MIN_INIT_SIZE: usize = 1_000; /// Bidirection iterator pub trait BidirectionalIterator: Iterator { fn prev(&mut self) -> Option<Self::Item>; } /// An item in the grid along with its Line and Column. pub struct Indexed<T> { pub inner: T, pub line: Line, pub column: Column, } impl<T> Deref for Indexed<T> { type Target = T; #[inline] fn deref(&self) -> &T { &self.inner } } impl<T: PartialEq> ::std::cmp::PartialEq for Grid<T> { fn eq(&self, other: &Self) -> bool { // Compare struct fields and check result of grid comparison self.raw.eq(&other.raw) && self.cols.eq(&other.cols) && self.lines.eq(&other.lines) && self.display_offset.eq(&other.display_offset) && self.scroll_limit.eq(&other.scroll_limit) && self.selection.eq(&other.selection) } } pub trait GridCell { fn is_empty(&self) -> bool; fn is_wrap(&self) -> bool; fn set_wrap(&mut self, wrap: bool); /// Fast equality approximation. /// /// This is a faster alternative to [`PartialEq`], /// but might report inequal cells as equal. fn fast_eq(&self, other: Self) -> bool; } /// Represents the terminal display contents /// /// ```notrust /// ┌─────────────────────────┐ <-- max_scroll_limit + lines /// │ │ /// │ UNINITIALIZED │ /// │ │ /// ├─────────────────────────┤ <-- raw.len() /// │ │ /// │ RESIZE BUFFER │ /// │ │ /// ├─────────────────────────┤ <-- scroll_limit + lines /// │ │ /// │ SCROLLUP REGION │ /// │ │ /// ├─────────────────────────┤v lines /// │ │| /// │ VISIBLE REGION │| /// │ │| /// ├─────────────────────────┤^ <-- display_offset /// │ │ /// │ SCROLLDOWN REGION │ /// │ │ /// └─────────────────────────┘ <-- zero /// ^ /// cols /// ``` #[derive(Clone, Debug, Deserialize, Serialize)] pub struct Grid<T> { /// Lines in the grid. Each row holds a list of cells corresponding to the /// columns in that row. raw: Storage<T>, /// Number of columns cols: index::Column, /// Number of visible lines. lines: index::Line, /// Offset of displayed area /// /// If the displayed region isn't at the bottom of the screen, it stays /// stationary while more text is emitted. The scrolling implementation /// updates this offset accordingly. display_offset: usize, /// An limit on how far back it's possible to scroll scroll_limit: usize, /// Selected region #[serde(skip)] pub selection: Option<Selection>, max_scroll_limit: usize, } #[derive(Copy, Clone)] pub enum Scroll { Lines(isize), PageUp, PageDown, Top, Bottom, } impl<T: GridCell + PartialEq + Copy> Grid<T> { pub fn new(lines: index::Line, cols: index::Column, scrollback: usize, template: T) -> Grid<T> { let raw = Storage::with_capacity(lines, Row::new(cols, &template)); Grid { raw, cols, lines, display_offset: 0, scroll_limit: 0, selection: None, max_scroll_limit: scrollback, } } pub fn buffer_to_visible(&self, point: impl Into<Point<usize>>) -> Point<usize> { let mut point = point.into(); let offset = point.line.saturating_sub(self.display_offset); if point.line < self.display_offset { point.col = self.num_cols(); point.line = self.num_lines().0 - 1; } else if offset >= *self.num_lines() { point.col = Column(0); point.line = 0; } else { point.line = self.lines.0 - offset - 1; } point } pub fn visible_to_buffer(&self, point: Point) -> Point<usize> { Point { line: self.visible_line_to_buffer(point.line), col: point.col } } fn visible_line_to_buffer(&self, line: Line) -> usize { self.line_to_offset(line) + self.display_offset } /// Update the size of the scrollback history pub fn update_history(&mut self, history_size: usize, template: &T) { self.raw.update_history(history_size, Row::new(self.cols, &template)); self.max_scroll_limit = history_size; self.scroll_limit = min(self.scroll_limit, history_size); self.display_offset = min(self.display_offset, self.scroll_limit); } pub fn scroll_display(&mut self, scroll: Scroll) { match scroll { Scroll::Lines(count) => { self.display_offset = min( max((self.display_offset as isize) + count, 0isize) as usize, self.scroll_limit, ); }, Scroll::PageUp => { self.display_offset = min(self.display_offset + self.lines.0, self.scroll_limit); }, Scroll::PageDown => { self.display_offset -= min(self.display_offset, self.lines.0); }, Scroll::Top => self.display_offset = self.scroll_limit, Scroll::Bottom => self.display_offset = 0, } } pub fn resize( &mut self, reflow: bool, lines: index::Line, cols: index::Column, cursor_pos: &mut Point, template: &T, ) { // Check that there's actually work to do and return early if not if lines == self.lines && cols == self.cols { return; } match self.lines.cmp(&lines) { Ordering::Less => self.grow_lines(lines, template), Ordering::Greater => self.shrink_lines(lines), Ordering::Equal => (), } match self.cols.cmp(&cols) { Ordering::Less => self.grow_cols(reflow, cols, cursor_pos, template), Ordering::Greater => self.shrink_cols(reflow, cols, template), Ordering::Equal => (), } } fn increase_scroll_limit(&mut self, count: usize, template: &T) { self.scroll_limit = min(self.scroll_limit + count, self.max_scroll_limit); // Initialize new lines when the history buffer is smaller than the scroll limit let history_size = self.raw.len().saturating_sub(*self.lines); if history_size < self.scroll_limit { let new = min( max(self.scroll_limit - history_size, MIN_INIT_SIZE), self.max_scroll_limit - history_size, ); self.raw.initialize(new, Row::new(self.cols, template)); } } fn decrease_scroll_limit(&mut self, count: usize) { self.scroll_limit = self.scroll_limit.saturating_sub(count); } /// Add lines to the visible area /// /// Alacritty keeps the cursor at the bottom of the terminal as long as there /// is scrollback available. Once scrollback is exhausted, new lines are /// simply added to the bottom of the screen. fn grow_lines(&mut self, new_line_count: index::Line, template: &T) { let lines_added = new_line_count - self.lines; // Need to "resize" before updating buffer self.raw.grow_visible_lines(new_line_count, Row::new(self.cols, template)); self.lines = new_line_count; // Move existing lines up if there is no scrollback to fill new lines if lines_added.0 > self.scroll_limit { let scroll_lines = lines_added - self.scroll_limit; self.scroll_up(&(Line(0)..new_line_count), scroll_lines, template); } self.scroll_limit = self.scroll_limit.saturating_sub(*lines_added); self.display_offset = self.display_offset.saturating_sub(*lines_added); } fn grow_cols( &mut self, reflow: bool, cols: index::Column, cursor_pos: &mut Point, template: &T, ) { let mut new_empty_lines = 0; let mut new_raw: Vec<Row<T>> = Vec::with_capacity(self.raw.len()); for (i, mut row) in self.raw.drain().enumerate().rev() { if let Some(last_row) = new_raw.last_mut() { // Grow the current line if there's wrapped content available if reflow && last_row.len() < cols.0 && last_row.last().map(GridCell::is_wrap) == Some(true) { // Remove wrap flag before appending additional cells if let Some(cell) = last_row.last_mut() { cell.set_wrap(false); } // Append as many cells from the next line as possible let len = min(row.len(), cols.0 - last_row.len()); let mut cells = row.front_split_off(len); last_row.append(&mut cells); if row.is_empty() { let raw_len = i + 1 + new_raw.len(); if raw_len < self.lines.0 || self.scroll_limit == 0 { // Add new line and move lines up if we can't pull from history cursor_pos.line = Line(cursor_pos.line.saturating_sub(1)); new_empty_lines += 1; } else { // Make sure viewport doesn't move if line is outside of the visible // area if i < self.display_offset { self.display_offset = self.display_offset.saturating_sub(1); } // Remove one line from scrollback, since we just moved it to the // viewport self.scroll_limit = self.scroll_limit.saturating_sub(1); self.display_offset = min(self.display_offset, self.scroll_limit); } // Don't push line into the new buffer continue; } else if let Some(cell) = last_row.last_mut() { // Set wrap flag if next line still has cells cell.set_wrap(true); } } } new_raw.push(row); } // Add padding lines new_raw.append(&mut vec![Row::new(cols, template); new_empty_lines]); // Fill remaining cells and reverse iterator let mut reversed = Vec::with_capacity(new_raw.len()); for mut row in new_raw.drain(..).rev() { if row.len() < cols.0 { row.grow(cols, template); } reversed.push(row); } self.raw.replace_inner(reversed); self.cols = cols; } fn shrink_cols(&mut self, reflow: bool, cols: index::Column, template: &T) { let mut new_raw = Vec::with_capacity(self.raw.len()); let mut buffered = None; for (i, mut row) in self.raw.drain().enumerate().rev() { if let Some(buffered) = buffered.take() { row.append_front(buffered); } let mut wrapped = row.shrink(cols); new_raw.push(row); while let (Some(mut wrapped_cells), true) = (wrapped.take(), reflow) { // Set line as wrapped if cells got removed if let Some(cell) = new_raw.last_mut().and_then(|r| r.last_mut()) { cell.set_wrap(true); } if Some(true) == wrapped_cells.last().map(|c| c.is_wrap() && i >= 1) && wrapped_cells.len() < cols.0 { // Make sure previous wrap flag doesn't linger around if let Some(cell) = wrapped_cells.last_mut() { cell.set_wrap(false); } // Add removed cells to start of next row buffered = Some(wrapped_cells); } else { // Make sure viewport doesn't move if line is outside of the visible area if i < self.display_offset { self.display_offset = min(self.display_offset + 1, self.max_scroll_limit); } // Make sure new row is at least as long as new width let occ = wrapped_cells.len(); if occ < cols.0 { wrapped_cells.append(&mut vec![*template; cols.0 - occ]); } let mut row = Row::from_vec(wrapped_cells, occ); // Since inserted might exceed cols, we need to check it again wrapped = row.shrink(cols); // Add new row with all removed cells new_raw.push(row); // Increase scrollback history self.scroll_limit = min(self.scroll_limit + 1, self.max_scroll_limit); } } } let mut reversed: Vec<Row<T>> = new_raw.drain(..).rev().collect(); reversed.truncate(self.max_scroll_limit + self.lines.0); self.raw.replace_inner(reversed); self.cols = cols; } /// Remove lines from the visible area /// /// The behavior in Terminal.app and iTerm.app is to keep the cursor at the /// bottom of the screen. This is achieved by pushing history "out the top" /// of the terminal window. /// /// Alacritty takes the same approach. fn shrink_lines(&mut self, target: index::Line) { let prev = self.lines; self.selection = None; self.raw.rotate(*prev as isize - *target as isize); self.raw.shrink_visible_lines(target); self.lines = target; } /// Convert a Line index (active region) to a buffer offset /// /// # Panics /// /// This method will panic if `Line` is larger than the grid dimensions pub fn line_to_offset(&self, line: index::Line) -> usize { assert!(line < self.num_lines()); *(self.num_lines() - line - 1) } #[inline] pub fn scroll_down( &mut self, region: &Range<index::Line>, positions: index::Line, template: &T, ) { // Whether or not there is a scrolling region active, as long as it // starts at the top, we can do a full rotation which just involves // changing the start index. // // To accommodate scroll regions, rows are reordered at the end. if region.start == Line(0) { // Rotate the entire line buffer. If there's a scrolling region // active, the bottom lines are restored in the next step. self.raw.rotate_up(*positions); if let Some(ref mut selection) = self.selection { selection.rotate(-(*positions as isize)); } self.decrease_scroll_limit(*positions); // Now, restore any scroll region lines let lines = self.lines; for i in IndexRange(region.end..lines) { self.raw.swap_lines(i, i + positions); } // Finally, reset recycled lines for i in IndexRange(Line(0)..positions) { self.raw[i].reset(&template); } } else { // Subregion rotation for line in IndexRange((region.start + positions)..region.end).rev() { self.raw.swap_lines(line, line - positions); } for line in IndexRange(region.start..(region.start + positions)) { self.raw[line].reset(&template); } } } /// scroll_up moves lines at the bottom towards the top /// /// This is the performance-sensitive part of scrolling. pub fn scroll_up(&mut self, region: &Range<index::Line>, positions: index::Line, template: &T) { if region.start == Line(0) { // Update display offset when not pinned to active area if self.display_offset != 0 { self.display_offset = min(self.display_offset + *positions, self.len() - self.num_lines().0); } self.increase_scroll_limit(*positions, template); // Rotate the entire line buffer. If there's a scrolling region // active, the bottom lines are restored in the next step. self.raw.rotate(-(*positions as isize)); if let Some(ref mut selection) = self.selection { selection.rotate(*positions as isize); } // This next loop swaps "fixed" lines outside of a scroll region // back into place after the rotation. The work is done in buffer- // space rather than terminal-space to avoid redundant // transformations. let fixed_lines = *self.num_lines() - *region.end; for i in 0..fixed_lines { self.raw.swap(i, i + *positions); } // Finally, reset recycled lines // // Recycled lines are just above the end of the scrolling region. for i in 0..*positions { self.raw[i + fixed_lines].reset(&template); } } else { // Subregion rotation for line in IndexRange(region.start..(region.end - positions)) { self.raw.swap_lines(line, line + positions); } // Clear reused lines for line in IndexRange((region.end - positions)..region.end) { self.raw[line].reset(&template); } } } pub fn clear_viewport(&mut self, template: &T) { // Determine how many lines to scroll up by. let end = Point { line: 0, col: self.num_cols() }; let mut iter = self.iter_from(end); while let Some(cell) = iter.prev() { if !cell.is_empty() || iter.cur.line >= *self.lines { break; } } debug_assert!(iter.cur.line <= *self.lines); let positions = self.lines - iter.cur.line; let region = Line(0)..self.num_lines(); // Reset display offset self.display_offset = 0; // Clear the viewport self.scroll_up(&region, positions, template); // Reset rotated lines for i in positions.0..self.lines.0 { self.raw[i].reset(&template); } } // Completely reset the grid state pub fn reset(&mut self, template: &T) { self.clear_history(); // Reset all visible lines for row in 0..self.raw.len() { self.raw[row].reset(template); } self.display_offset = 0; self.selection = None; } } #[allow(clippy::len_without_is_empty)] impl<T> Grid<T> { #[inline] pub fn num_lines(&self) -> index::Line { self.lines } pub fn display_iter(&self) -> DisplayIter<'_, T> { DisplayIter::new(self) } #[inline] pub fn num_cols(&self) -> index::Column { self.cols } pub fn clear_history(&mut self) { // Explicitly purge all lines from history let shrinkage = self.raw.len() - self.lines.0; self.raw.shrink_lines(shrinkage); self.scroll_limit = 0; } #[inline] pub fn scroll_limit(&self) -> usize { self.scroll_limit } /// Total number of lines in the buffer, this includes scrollback + visible lines #[inline] pub fn len(&self) -> usize { self.raw.len() } #[inline] pub fn history_size(&self) -> usize { self.raw.len().saturating_sub(*self.lines) } /// This is used only for initializing after loading ref-tests pub fn initialize_all(&mut self, template: &T) where T: Copy + GridCell, { let history_size = self.raw.len().saturating_sub(*self.lines); self.raw.initialize(self.max_scroll_limit - history_size, Row::new(self.cols, template)); } /// This is used only for truncating before saving ref-tests pub fn truncate(&mut self) { self.raw.truncate(); } pub fn iter_from(&self, point: Point<usize>) -> GridIterator<'_, T> { GridIterator { grid: self, cur: point } } #[inline] pub fn contains(&self, point: &Point) -> bool { self.lines > point.line && self.cols > point.col } #[inline] pub fn display_offset(&self) -> usize { self.display_offset } } pub struct GridIterator<'a, T> { /// Immutable grid reference grid: &'a Grid<T>, /// Current position of the iterator within the grid. cur: Point<usize>, } impl<'a, T> GridIterator<'a, T> { pub fn point(&self) -> Point<usize> { self.cur } pub fn cell(&self) -> &'a T { &self.grid[self.cur.line][self.cur.col] } } impl<'a, T> Iterator for GridIterator<'a, T> { type Item = &'a T; fn next(&mut self) -> Option<Self::Item> { let last_col = self.grid.num_cols() - Column(1); match self.cur { Point { line, col } if line == 0 && col == last_col => None, Point { col, .. } if (col == last_col) => { self.cur.line -= 1; self.cur.col = Column(0); Some(&self.grid[self.cur.line][self.cur.col]) }, _ => { self.cur.col += Column(1); Some(&self.grid[self.cur.line][self.cur.col]) }, } } } impl<'a, T> BidirectionalIterator for GridIterator<'a, T> { fn prev(&mut self) -> Option<Self::Item> { let num_cols = self.grid.num_cols(); match self.cur { Point { line, col: Column(0) } if line == self.grid.len() - 1 => None, Point { col: Column(0), .. } => { self.cur.line += 1; self.cur.col = num_cols - Column(1); Some(&self.grid[self.cur.line][self.cur.col]) }, _ => { self.cur.col -= Column(1); Some(&self.grid[self.cur.line][self.cur.col]) }, } } } /// Index active region by line impl<T> Index<index::Line> for Grid<T> { type Output = Row<T>; #[inline] fn index(&self, index: index::Line) -> &Row<T> { &self.raw[index] } } /// Index with buffer offset impl<T> Index<usize> for Grid<T> { type Output = Row<T>; #[inline] fn index(&self, index: usize) -> &Row<T> { &self.raw[index] } } impl<T> IndexMut<index::Line> for Grid<T> { #[inline] fn index_mut(&mut self, index: index::Line) -> &mut Row<T> { &mut self.raw[index] } } impl<T> IndexMut<usize> for Grid<T> { #[inline] fn index_mut(&mut self, index: usize) -> &mut Row<T> { &mut self.raw[index] } } impl<'point, T> Index<&'point Point> for Grid<T> { type Output = T; #[inline] fn index<'a>(&'a self, point: &Point) -> &'a T { &self[point.line][point.col] } } impl<'point, T> IndexMut<&'point Point> for Grid<T> { #[inline] fn index_mut<'a, 'b>(&'a mut self, point: &'b Point) -> &'a mut T { &mut self[point.line][point.col] } } // ------------------------------------------------------------------------------------------------- // REGIONS // ------------------------------------------------------------------------------------------------- /// A subset of lines in the grid /// /// May be constructed using Grid::region(..) pub struct Region<'a, T> { start: Line, end: Line, raw: &'a Storage<T>, } /// A mutable subset of lines in the grid /// /// May be constructed using Grid::region_mut(..) pub struct RegionMut<'a, T> { start: Line, end: Line, raw: &'a mut Storage<T>, } impl<'a, T> RegionMut<'a, T> { /// Call the provided function for every item in this region pub fn each<F: Fn(&mut T)>(self, func: F) { for row in self { for item in row { func(item) } } } } pub trait IndexRegion<I, T> { /// Get an immutable region of Self fn region(&self, _: I) -> Region<'_, T>; /// Get a mutable region of Self fn region_mut(&mut self, _: I) -> RegionMut<'_, T>; } impl<T> IndexRegion<Range<Line>, T> for Grid<T> { fn region(&self, index: Range<Line>) -> Region<'_, T> { assert!(index.start < self.num_lines()); assert!(index.end <= self.num_lines()); assert!(index.start <= index.end); Region { start: index.start, end: index.end, raw: &self.raw } } fn region_mut(&mut self, index: Range<Line>) -> RegionMut<'_, T> { assert!(index.start < self.num_lines()); assert!(index.end <= self.num_lines()); assert!(index.start <= index.end); RegionMut { start: index.start, end: index.end, raw: &mut self.raw } } } impl<T> IndexRegion<RangeTo<Line>, T> for Grid<T> { fn region(&self, index: RangeTo<Line>) -> Region<'_, T> { assert!(index.end <= self.num_lines()); Region { start: Line(0), end: index.end, raw: &self.raw } } fn region_mut(&mut self, index: RangeTo<Line>) -> RegionMut<'_, T> { assert!(index.end <= self.num_lines()); RegionMut { start: Line(0), end: index.end, raw: &mut self.raw } } } impl<T> IndexRegion<RangeFrom<Line>, T> for Grid<T> { fn region(&self, index: RangeFrom<Line>) -> Region<'_, T> { assert!(index.start < self.num_lines()); Region { start: index.start, end: self.num_lines(), raw: &self.raw } } fn region_mut(&mut self, index: RangeFrom<Line>) -> RegionMut<'_, T> { assert!(index.start < self.num_lines()); RegionMut { start: index.start, end: self.num_lines(), raw: &mut self.raw } } } impl<T> IndexRegion<RangeFull, T> for Grid<T> { fn region(&self, _: RangeFull) -> Region<'_, T> { Region { start: Line(0), end: self.num_lines(), raw: &self.raw } } fn region_mut(&mut self, _: RangeFull) -> RegionMut<'_, T> { RegionMut { start: Line(0), end: self.num_lines(), raw: &mut self.raw } } } pub struct RegionIter<'a, T> { end: Line, cur: Line, raw: &'a Storage<T>, } pub struct RegionIterMut<'a, T> { end: Line, cur: Line, raw: &'a mut Storage<T>, } impl<'a, T> IntoIterator for Region<'a, T> { type IntoIter = RegionIter<'a, T>; type Item = &'a Row<T>; fn into_iter(self) -> Self::IntoIter { RegionIter { end: self.end, cur: self.start, raw: self.raw } } } impl<'a, T> IntoIterator for RegionMut<'a, T> { type IntoIter = RegionIterMut<'a, T>; type Item = &'a mut Row<T>; fn into_iter(self) -> Self::IntoIter { RegionIterMut { end: self.end, cur: self.start, raw: self.raw } } } impl<'a, T> Iterator for RegionIter<'a, T> { type Item = &'a Row<T>; fn next(&mut self) -> Option<Self::Item> { if self.cur < self.end { let index = self.cur; self.cur += 1; Some(&self.raw[index]) } else { None } } } impl<'a, T> Iterator for RegionIterMut<'a, T> { type Item = &'a mut Row<T>; fn next(&mut self) -> Option<Self::Item> { if self.cur < self.end { let index = self.cur; self.cur += 1; unsafe { Some(&mut *(&mut self.raw[index] as *mut _)) } } else { None } } } // ------------------------------------------------------------------------------------------------- // DISPLAY ITERATOR // ------------------------------------------------------------------------------------------------- /// Iterates over the visible area accounting for buffer transform pub struct DisplayIter<'a, T> { grid: &'a Grid<T>, offset: usize, limit: usize, col: Column, line: Line, } impl<'a, T: 'a> DisplayIter<'a, T> { pub fn new(grid: &'a Grid<T>) -> DisplayIter<'a, T> { let offset = grid.display_offset + *grid.num_lines() - 1; let limit = grid.display_offset; let col = Column(0); let line = Line(0); DisplayIter { grid, offset, col, limit, line } } pub fn offset(&self) -> usize { self.offset } pub fn column(&self) -> Column { self.col } pub fn line(&self) -> Line { self.line } } impl<'a, T: Copy + 'a> Iterator for DisplayIter<'a, T> { type Item = Indexed<T>; #[inline] fn next(&mut self) -> Option<Self::Item> { // Return None if we've reached the end. if self.offset == self.limit && self.grid.num_cols() == self.col { return None; } // Get the next item. let item = Some(Indexed { inner: self.grid.raw[self.offset][self.col], line: self.line, column: self.col, }); // Update line/col to point to next item self.col += 1; if self.col == self.grid.num_cols() && self.offset != self.limit { self.offset -= 1; self.col = Column(0); self.line = Line(*self.grid.lines - 1 - (self.offset - self.limit)); } item } }
31.764338
100
0.532237
67ba4a3c7ebc662d1b4cc34992593724dc24b4c3
1,158
#[doc = "Reader of register SPI_MEM_W8"] pub type R = crate::R<u32, super::SPI_MEM_W8>; #[doc = "Writer for register SPI_MEM_W8"] pub type W = crate::W<u32, super::SPI_MEM_W8>; #[doc = "Register SPI_MEM_W8 `reset()`'s with value 0"] impl crate::ResetValue for super::SPI_MEM_W8 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SPI_MEM_BUF8`"] pub type SPI_MEM_BUF8_R = crate::R<u32, u32>; #[doc = "Write proxy for field `SPI_MEM_BUF8`"] pub struct SPI_MEM_BUF8_W<'a> { w: &'a mut W, } impl<'a> SPI_MEM_BUF8_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff); self.w } } impl R { #[doc = "Bits 0:31"] #[inline(always)] pub fn spi_mem_buf8(&self) -> SPI_MEM_BUF8_R { SPI_MEM_BUF8_R::new((self.bits & 0xffff_ffff) as u32) } } impl W { #[doc = "Bits 0:31"] #[inline(always)] pub fn spi_mem_buf8(&mut self) -> SPI_MEM_BUF8_W { SPI_MEM_BUF8_W { w: self } } }
28.243902
84
0.603627
bb34ef74549b00f3e9b08549dd9a3655a5679dab
657
// This powerful wrapper provides the ability to store a positive integer value. // Rewrite it using generics so that it supports wrapping ANY type. struct Wrapper<T> { value: T, } impl<T> Wrapper<T> { pub fn new(value: T) -> Self { Wrapper { value } } } #[cfg(test)] mod tests { use super::*; #[test] fn store_u32_in_wrapper() { assert_eq!(Wrapper::new(42).value, 42); } #[test] fn store_str_in_wrapper() { // TODO: Delete this assert and uncomment the one below once you have finished the exercise. // assert!(false); assert_eq!(Wrapper::new("Foo").value, "Foo"); } }
21.9
102
0.605784
61abff1a21efb1aea499267d7944229bd4e0265f
4,037
#![cfg(test)] #![feature(async_await)] // ANCHOR: imports use { hyper::{ // Miscellaneous types from Hyper for working with HTTP. Body, Client, Request, Response, Server, Uri, // This function turns a closure which returns a future into an // implementation of the the Hyper `Service` trait, which is an // asynchronous function from a generic `Request` to a `Response`. service::service_fn, // A function which runs a future to completion using the Hyper runtime. rt::run, }, futures::{ // Extension trait for futures 0.1 futures, adding the `.compat()` method // which allows us to use `.await` on 0.1 futures. compat::Future01CompatExt, // Extension traits providing additional methods on futures. // `FutureExt` adds methods that work for all futures, whereas // `TryFutureExt` adds methods to futures that return `Result` types. future::{FutureExt, TryFutureExt}, }, std::net::SocketAddr, }; // ANCHOR_END: imports // ANCHOR: boilerplate async fn serve_req(_req: Request<Body>) -> Result<Response<Body>, hyper::Error> { // Always return successfully with a response containing a body with // a friendly greeting ;) Ok(Response::new(Body::from("hello, world!"))) } async fn run_server(addr: SocketAddr) { println!("Listening on http://{}", addr); // Create a server bound on the provided address let serve_future = Server::bind(&addr) // Serve requests using our `async serve_req` function. // `serve` takes a closure which returns a type implementing the // `Service` trait. `service_fn` returns a value implementing the // `Service` trait, and accepts a closure which goes from request // to a future of the response. To use our `serve_req` function with // Hyper, we have to box it and put it in a compatability // wrapper to go from a futures 0.3 future (the kind returned by // `async fn`) to a futures 0.1 future (the kind used by Hyper). .serve(|| service_fn(|req| serve_req(req).boxed().compat())); // Wait for the server to complete serving or exit with an error. // If an error occurred, print it to stderr. if let Err(e) = serve_future.compat().await { eprintln!("server error: {}", e); } } fn main() { // Set the address to run our socket on. let addr = SocketAddr::from(([127, 0, 0, 1], 3000)); // Call our `run_server` function, which returns a future. // As with every `async fn`, for `run_server` to do anything, // the returned future needs to be run. Additionally, // we need to convert the returned future from a futures 0.3 future into a // futures 0.1 future. let futures_03_future = run_server(addr); let futures_01_future = futures_03_future.unit_error().boxed().compat(); // Finally, we can run the future to completion using the `run` function // provided by Hyper. run(futures_01_future); } // ANCHOR_END: boilerplate #[test] fn run_main_and_query_http() -> Result<(), failure::Error> { std::thread::spawn(|| main()); // Unfortunately, there's no good way for us to detect when the server // has come up, so we sleep for an amount that should hopefully be // sufficient :( std::thread::sleep(std::time::Duration::from_secs(5)); let response = reqwest::get("http://localhost:3000")?.text()?; assert_eq!(response, "hello, world!"); Ok(()) } mod proxy { use super::*; #[allow(unused)] async fn serve_req(_req: Request<Body>) -> Result<Response<Body>, hyper::Error> { // ANCHOR: parse_url let url_str = "http://www.rust-lang.org/en-US/"; let url = url_str.parse::<Uri>().expect("failed to parse URL"); // ANCHOR_END: parse_url // ANCHOR: get_request let res = Client::new().get(url).compat().await; // Return the result of the request directly to the user println!("request finished-- returning response"); res // ANCHOR_END: get_request } }
37.728972
85
0.655437
67fd61635759730fde4265747b6f992159c7e0a6
671
pub type PlatformTelemetryRegistrationResult = *mut ::core::ffi::c_void; pub type PlatformTelemetryRegistrationSettings = *mut ::core::ffi::c_void; #[doc = "*Required features: `\"System_Diagnostics_Telemetry\"`*"] #[repr(transparent)] pub struct PlatformTelemetryRegistrationStatus(pub i32); impl PlatformTelemetryRegistrationStatus { pub const Success: Self = Self(0i32); pub const SettingsOutOfRange: Self = Self(1i32); pub const UnknownFailure: Self = Self(2i32); } impl ::core::marker::Copy for PlatformTelemetryRegistrationStatus {} impl ::core::clone::Clone for PlatformTelemetryRegistrationStatus { fn clone(&self) -> Self { *self } }
39.470588
74
0.743666
f97309acbe3b6b371a5bd2f8df6d409bcbf460f9
632
use std::io::{Read, BufReader, BufRead}; use types::VSProject; use regex::Regex; pub fn parse_sln<T>(source: T) -> Vec<VSProject> where T: Read { lazy_static! { static ref PROJECT_RE: Regex = Regex::new(r#"Project\([^)]+\)[ ]*=[ ]*"([^"]+)"[ ]*,[ ]*"([^"]+)"[ ]*,[ ]"\{([a-zA-Z0-9\-]+)\}""#).unwrap(); } let reader = BufReader::new(source); let mut projects = Vec::new(); for line in reader.lines().map(|l| l.unwrap()) { match PROJECT_RE.captures(&line) { Some(caps) => if caps.len() == 4 { projects.push(VSProject::new(&caps[3], &caps[1], &caps[2])); }, _ => () } } projects }
28.727273
144
0.531646
4a61c776e580fb549ea426f18c3112225068adc0
3,695
use cache::Cache; use gen::legal_moves; use mv_list::{MoveCounter, MoveVec}; use num_cpus; use position::Position; use std::sync::mpsc::channel; use threadpool::ThreadPool; pub fn perft( position: &mut Position, depth: usize, multi_threading_enabled: bool, cache_bytes_per_thread: usize, ) -> usize { if depth == 0 { return 1; } if depth <= 3 { return perft_inner(position, depth); } if !multi_threading_enabled { if cache_bytes_per_thread > 0 { let mut cache = Cache::new(cache_bytes_per_thread).unwrap(); return perft_with_cache_inner(position, depth, &mut cache); } else { return perft_inner(position, depth); } } let pool = ThreadPool::new(num_cpus::get()); let (tx, rx) = channel(); let mut moves = MoveVec::new(); legal_moves(&position, &mut moves); let moves_len = moves.len(); for &mv in moves.iter() { let tx = tx.clone(); let mut position_local = position.clone(); pool.execute(move || { position_local.make(mv); let count: usize; if cache_bytes_per_thread > 0 { let mut cache = Cache::new(cache_bytes_per_thread).unwrap(); count = perft_with_cache_inner(&mut position_local, depth - 1, &mut cache); } else { count = perft_inner(&mut position_local, depth - 1); } tx.send(count).unwrap(); }); } return rx.iter().take(moves_len).sum(); } pub fn perft_inner(position: &mut Position, depth: usize) -> usize { if depth == 0 { return 1; } let mut moves = MoveVec::new(); legal_moves(&position, &mut moves); let state = position.state().clone(); let key = position.hash_key(); let mut count = 0; for &mv in moves.iter() { let capture = position.make(mv); count += perft_inner(position, depth - 1); position.unmake(mv, capture, &state, key); } count } fn perft_with_cache_inner(position: &mut Position, depth: usize, cache: &mut Cache) -> usize { let key = position.hash_key(); let ret = cache.probe(key, depth); if ret.is_some() { return ret.unwrap(); } let mut count = 0; if depth == 1 { let mut counter = MoveCounter::new(); legal_moves(&position, &mut counter); count = counter.moves as usize; } else { let mut moves = MoveVec::new(); legal_moves(&position, &mut moves); let state = position.state().clone(); let key = position.hash_key(); for &mv in moves.iter() { let capture = position.make(mv); count += perft_with_cache_inner(position, depth - 1, cache); position.unmake(mv, capture, &state, key); } } cache.save(key, count, depth as i16); count } #[cfg(test)] mod test { use super::*; use position::{Position, STARTING_POSITION_FEN}; use test; #[test] fn perft_test_3() { let mut position = Position::from_fen(STARTING_POSITION_FEN).unwrap(); assert_eq!(perft(&mut position, 3, false, 0), 8902); } #[test] fn perft_test_4() { let mut position = Position::from_fen(STARTING_POSITION_FEN).unwrap(); assert_eq!(perft(&mut position, 4, false, 0), 197281); } #[test] fn perft_with_cache_test_3() { let mut position = Position::from_fen(STARTING_POSITION_FEN).unwrap(); assert_eq!(perft(&mut position, 3, false, 1024 * 1024), 8902); } #[test] fn perft_with_cache_test_4() { let mut position = Position::from_fen(STARTING_POSITION_FEN).unwrap(); assert_eq!(perft(&mut position, 4, false, 1024 * 1024), 197281); } #[bench] fn perft_bench_starting_position(b: &mut test::Bencher) { let mut position = Position::from_fen(STARTING_POSITION_FEN).unwrap(); b.iter(|| -> usize { perft(&mut position, 2, false, 0) }); } }
23.535032
94
0.641137
6933accc6cc01df48da031e5f524ce90c37b9e9f
749
#![feature(test)] extern crate vterm_sys; extern crate test; use vterm_sys::*; use std::io::prelude::*; use test::Bencher; // This seems pretty fast! 17,000ns per write. #[bench] fn bench_get_screen_damage_event(b: &mut Bencher) { let mut vterm: VTerm = VTerm::new(&Size { height: 24, width: 80, }); vterm.screen_receive_events(&ScreenCallbacksConfig::all()); vterm.screen_set_damage_merge(ffi::VTermDamageSize::VTermDamageRow); let rx = vterm.screen_event_rx.take().unwrap(); b.iter(|| { println!("\n"); vterm.write(b"\x1b[Hhi there").unwrap(); vterm.screen_flush_damage(); while let Some(msg) = rx.try_recv().ok() { println!("{:?}", msg); } }); }
24.966667
72
0.611482
160380a67aadece10826dd5ed9a0dafce00c379d
1,003
use std::{ any::Any, collections::{HashMap, HashSet}, }; use crate::{channel::ChannelType, Id}; #[derive(Default)] pub struct TemplateRegistry<I: Id> { notifications: HashMap<I, HashSet<ChannelType>>, templates: HashMap<(I, ChannelType), Box<dyn Any>>, } impl<I: Id> TemplateRegistry<I> { pub fn new() -> Self { Self { notifications: HashMap::new(), templates: HashMap::new(), } } pub fn register( &mut self, notification_id: I, channel_type: ChannelType, template: Box<dyn Any>, ) { let entry = self.notifications.entry(notification_id).or_default(); entry.insert(channel_type); self.templates .insert((notification_id, channel_type), template); } pub fn get_template( &self, notification_id: I, channel_type: ChannelType, ) -> Option<&Box<dyn Any>> { self.templates.get(&(notification_id, channel_type)) } }
23.880952
75
0.588235
ff3f8438e4c072dc06168b3335a688fdf8282d04
548
use std::ffi::OsString; /// \[[docs.microsoft.com](https://docs.microsoft.com/en-us/windows/win32/api/xinput/nf-xinput-xinputgetaudiodeviceids)\] /// Audio device ids retrieved with [get_audio_device_ids](crate::xinput::get_audio_device_ids) #[derive(Clone, Debug)] #[derive(Default)] pub struct AudioDeviceIds { /// Windows Core Audio device ID string for render (speakers). pub render_device_id: Option<OsString>, /// Windows Core Audio device ID string for capture (microphone). pub capture_device_id: Option<OsString>, }
34.25
121
0.737226
f9a138f8fd7af603ef9c1f441cc49d87ee1b71a6
7,376
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. pub use self::MaybeTyped::*; use rustc_lint; use rustc_driver::{driver, target_features}; use rustc::session::{self, config}; use rustc::middle::def_id::DefId; use rustc::middle::ty; use rustc::front::map as hir_map; use rustc::lint; use rustc::util::nodemap::DefIdSet; use rustc_trans::back::link; use rustc_resolve as resolve; use rustc_front::lowering::{lower_crate, LoweringContext}; use syntax::{ast, codemap, diagnostic}; use syntax::feature_gate::UnstableFeatures; use std::cell::{RefCell, Cell}; use std::collections::{HashMap, HashSet}; use visit_ast::RustdocVisitor; use clean; use clean::Clean; pub use rustc::session::config::Input; pub use rustc::session::search_paths::SearchPaths; /// Are we generating documentation (`Typed`) or tests (`NotTyped`)? pub enum MaybeTyped<'a, 'tcx: 'a> { Typed(&'a ty::ctxt<'tcx>), NotTyped(&'a session::Session) } pub type ExternalPaths = RefCell<Option<HashMap<DefId, (Vec<String>, clean::TypeKind)>>>; pub struct DocContext<'a, 'tcx: 'a> { pub map: &'a hir_map::Map<'tcx>, pub maybe_typed: MaybeTyped<'a, 'tcx>, pub input: Input, pub external_paths: ExternalPaths, pub external_traits: RefCell<Option<HashMap<DefId, clean::Trait>>>, pub external_typarams: RefCell<Option<HashMap<DefId, String>>>, pub inlined: RefCell<Option<HashSet<DefId>>>, pub populated_crate_impls: RefCell<HashSet<ast::CrateNum>>, pub deref_trait_did: Cell<Option<DefId>>, } impl<'b, 'tcx> DocContext<'b, 'tcx> { pub fn sess<'a>(&'a self) -> &'a session::Session { match self.maybe_typed { Typed(tcx) => &tcx.sess, NotTyped(ref sess) => sess } } pub fn tcx_opt<'a>(&'a self) -> Option<&'a ty::ctxt<'tcx>> { match self.maybe_typed { Typed(tcx) => Some(tcx), NotTyped(_) => None } } pub fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { let tcx_opt = self.tcx_opt(); tcx_opt.expect("tcx not present") } } pub struct CrateAnalysis { pub exported_items: DefIdSet, pub public_items: DefIdSet, pub external_paths: ExternalPaths, pub external_typarams: RefCell<Option<HashMap<DefId, String>>>, pub inlined: RefCell<Option<HashSet<DefId>>>, pub deref_trait_did: Option<DefId>, } pub type Externs = HashMap<String, Vec<String>>; pub fn run_core(search_paths: SearchPaths, cfgs: Vec<String>, externs: Externs, input: Input, triple: Option<String>) -> (clean::Crate, CrateAnalysis) { // Parse, resolve, and typecheck the given crate. let cpath = match input { Input::File(ref p) => Some(p.clone()), _ => None }; let warning_lint = lint::builtin::WARNINGS.name_lower(); let sessopts = config::Options { maybe_sysroot: None, search_paths: search_paths, crate_types: vec!(config::CrateTypeRlib), lint_opts: vec!((warning_lint, lint::Allow)), lint_cap: Some(lint::Allow), externs: externs, target_triple: triple.unwrap_or(config::host_triple().to_string()), cfg: config::parse_cfgspecs(cfgs), // Ensure that rustdoc works even if rustc is feature-staged unstable_features: UnstableFeatures::Allow, ..config::basic_options().clone() }; let codemap = codemap::CodeMap::new(); let diagnostic_handler = diagnostic::Handler::new(diagnostic::Auto, None, true); let span_diagnostic_handler = diagnostic::SpanHandler::new(diagnostic_handler, codemap); let sess = session::build_session_(sessopts, cpath, span_diagnostic_handler); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess); target_features::add_configuration(&mut cfg, &sess); let krate = driver::phase_1_parse_input(&sess, cfg, &input); let name = link::find_crate_name(Some(&sess), &krate.attrs, &input); let krate = driver::phase_2_configure_and_expand(&sess, krate, &name, None) .expect("phase_2_configure_and_expand aborted in rustdoc!"); let krate = driver::assign_node_ids(&sess, krate); // Lower ast -> hir. let lcx = LoweringContext::new(&sess, Some(&krate)); let mut hir_forest = hir_map::Forest::new(lower_crate(&lcx, &krate)); let arenas = ty::CtxtArenas::new(); let hir_map = driver::make_map(&sess, &mut hir_forest); driver::phase_3_run_analysis_passes(&sess, hir_map, &arenas, &name, resolve::MakeGlobMap::No, |tcx, _, analysis| { let ty::CrateAnalysis { exported_items, public_items, .. } = analysis; // Convert from a NodeId set to a DefId set since we don't always have easy access // to the map from defid -> nodeid let exported_items: DefIdSet = exported_items.into_iter() .map(|n| tcx.map.local_def_id(n)) .collect(); let public_items: DefIdSet = public_items.into_iter() .map(|n| tcx.map.local_def_id(n)) .collect(); let ctxt = DocContext { map: &tcx.map, maybe_typed: Typed(tcx), input: input, external_traits: RefCell::new(Some(HashMap::new())), external_typarams: RefCell::new(Some(HashMap::new())), external_paths: RefCell::new(Some(HashMap::new())), inlined: RefCell::new(Some(HashSet::new())), populated_crate_impls: RefCell::new(HashSet::new()), deref_trait_did: Cell::new(None), }; debug!("crate: {:?}", ctxt.map.krate()); let mut analysis = CrateAnalysis { exported_items: exported_items, public_items: public_items, external_paths: RefCell::new(None), external_typarams: RefCell::new(None), inlined: RefCell::new(None), deref_trait_did: None, }; let krate = { let mut v = RustdocVisitor::new(&ctxt, Some(&analysis)); v.visit(ctxt.map.krate()); v.clean(&ctxt) }; let external_paths = ctxt.external_paths.borrow_mut().take(); *analysis.external_paths.borrow_mut() = external_paths; let map = ctxt.external_typarams.borrow_mut().take(); *analysis.external_typarams.borrow_mut() = map; let map = ctxt.inlined.borrow_mut().take(); *analysis.inlined.borrow_mut() = map; analysis.deref_trait_did = ctxt.deref_trait_did.get(); (krate, analysis) }) }
36.696517
90
0.608731
d7be2b4029cce2ec188c67fbc9c8b03fa48b7d41
1,688
// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use move_core_types::vm_status::sub_status::NFE_LCS_SERIALIZATION_FAILURE; use move_vm_types::{ gas_schedule::NativeCostIndex, loaded_data::runtime_types::Type, natives::function::{native_gas, NativeContext, NativeResult}, values::{values_impl::Reference, Value}, }; use std::collections::VecDeque; use vm::errors::PartialVMResult; /// Rust implementation of Move's `native public fun to_bytes<T>(&T): vector<u8>` pub fn native_to_bytes( context: &mut impl NativeContext, mut ty_args: Vec<Type>, mut args: VecDeque<Value>, ) -> PartialVMResult<NativeResult> { debug_assert!(ty_args.len() == 1); debug_assert!(args.len() == 1); let ref_to_val = pop_arg!(args, Reference); let arg_type = ty_args.pop().unwrap(); // delegate to the LCS serialization for `Value` let serialized_value_opt = match context.type_to_type_layout(&arg_type)? { None => None, Some(layout) => ref_to_val.read_ref()?.simple_serialize(&layout), }; let serialized_value = match serialized_value_opt { None => { let cost = native_gas(context.cost_table(), NativeCostIndex::LCS_TO_BYTES, 1); return Ok(NativeResult::err(cost, NFE_LCS_SERIALIZATION_FAILURE)); } Some(serialized_value) => serialized_value, }; // cost is proportional to the size of the serialized value let cost = native_gas( context.cost_table(), NativeCostIndex::LCS_TO_BYTES, serialized_value.len(), ); Ok(NativeResult::ok( cost, vec![Value::vector_u8(serialized_value)], )) }
33.098039
90
0.67654
e9e956481203b47ae6d9e41d25efecdb04c8d011
5,726
// Generated from definition io.k8s.api.apps.v1beta1.StatefulSetCondition /// StatefulSetCondition describes the state of a statefulset at a certain point. #[derive(Clone, Debug, Default, PartialEq)] pub struct StatefulSetCondition { /// Last time the condition transitioned from one status to another. pub last_transition_time: Option<crate::apimachinery::pkg::apis::meta::v1::Time>, /// A human readable message indicating details about the transition. pub message: Option<String>, /// The reason for the condition's last transition. pub reason: Option<String>, /// Status of the condition, one of True, False, Unknown. pub status: String, /// Type of statefulset condition. pub type_: String, } impl<'de> serde::Deserialize<'de> for StatefulSetCondition { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_last_transition_time, Key_message, Key_reason, Key_status, Key_type_, Other, } impl<'de> serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error { Ok(match v { "lastTransitionTime" => Field::Key_last_transition_time, "message" => Field::Key_message, "reason" => Field::Key_reason, "status" => Field::Key_status, "type" => Field::Key_type_, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = StatefulSetCondition; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("StatefulSetCondition") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> { let mut value_last_transition_time: Option<crate::apimachinery::pkg::apis::meta::v1::Time> = None; let mut value_message: Option<String> = None; let mut value_reason: Option<String> = None; let mut value_status: Option<String> = None; let mut value_type_: Option<String> = None; while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_last_transition_time => value_last_transition_time = serde::de::MapAccess::next_value(&mut map)?, Field::Key_message => value_message = serde::de::MapAccess::next_value(&mut map)?, Field::Key_reason => value_reason = serde::de::MapAccess::next_value(&mut map)?, Field::Key_status => value_status = Some(serde::de::MapAccess::next_value(&mut map)?), Field::Key_type_ => value_type_ = Some(serde::de::MapAccess::next_value(&mut map)?), Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(StatefulSetCondition { last_transition_time: value_last_transition_time, message: value_message, reason: value_reason, status: value_status.ok_or_else(|| serde::de::Error::missing_field("status"))?, type_: value_type_.ok_or_else(|| serde::de::Error::missing_field("type"))?, }) } } deserializer.deserialize_struct( "StatefulSetCondition", &[ "lastTransitionTime", "message", "reason", "status", "type", ], Visitor, ) } } impl serde::Serialize for StatefulSetCondition { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer { let mut state = serializer.serialize_struct( "StatefulSetCondition", 2 + self.last_transition_time.as_ref().map_or(0, |_| 1) + self.message.as_ref().map_or(0, |_| 1) + self.reason.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.last_transition_time { serde::ser::SerializeStruct::serialize_field(&mut state, "lastTransitionTime", value)?; } if let Some(value) = &self.message { serde::ser::SerializeStruct::serialize_field(&mut state, "message", value)?; } if let Some(value) = &self.reason { serde::ser::SerializeStruct::serialize_field(&mut state, "reason", value)?; } serde::ser::SerializeStruct::serialize_field(&mut state, "status", &self.status)?; serde::ser::SerializeStruct::serialize_field(&mut state, "type", &self.type_)?; serde::ser::SerializeStruct::end(state) } }
42.414815
132
0.542962
ed853f9a3691e303503f53468b07e0b4f9ab8668
3,260
use crate::format::problem::*; use crate::format::solution::UnassignedJobReason; use crate::helpers::*; #[test] fn can_separate_jobs_based_on_compatibility() { let problem = Problem { plan: Plan { jobs: vec![ create_delivery_job_with_compatibility("food", (1., 0.), "food"), create_delivery_job("job2", (8., 0.)), create_delivery_job_with_compatibility("junk", (2., 0.), "junk"), create_delivery_job("job4", (9., 0.)), ], ..create_empty_plan() }, fleet: Fleet { vehicles: vec![ VehicleType { type_id: "type1".to_string(), vehicle_ids: vec!["type1_1".to_string()], shifts: vec![create_default_vehicle_shift_with_locations((0., 0.), (0., 0.))], capacity: vec![2], ..create_default_vehicle_type() }, VehicleType { type_id: "type2".to_string(), vehicle_ids: vec!["type2_1".to_string()], shifts: vec![create_default_vehicle_shift_with_locations((10., 0.), (10., 0.))], capacity: vec![2], ..create_default_vehicle_type() }, ], profiles: create_default_matrix_profiles(), }, ..create_empty_problem() }; let matrix = create_matrix_from_problem(&problem); let solution = solve_with_metaheuristic(problem, Some(vec![matrix])); assert_eq!(solution.tours.len(), 2); assert!(solution.unassigned.is_none()); let junk_tour = solution.tours.iter().find(|tour| tour.vehicle_id == "type2_1").unwrap(); let food_tour = solution.tours.iter().find(|tour| tour.vehicle_id == "type1_1").unwrap(); assert_eq!(get_ids_from_tour(junk_tour).iter().flatten().filter(|id| *id == "junk" || *id == "job4").count(), 2); assert_eq!(get_ids_from_tour(food_tour).iter().flatten().filter(|id| *id == "food" || *id == "job2").count(), 2); } #[test] fn can_unassign_job_due_to_compatibility() { let problem = Problem { plan: Plan { jobs: vec![ create_delivery_job_with_compatibility("food", (1., 0.), "food"), create_delivery_job_with_compatibility("junk", (2., 0.), "junk"), ], ..create_empty_plan() }, fleet: Fleet { vehicles: vec![VehicleType { capacity: vec![2], ..create_default_vehicle_type() }], profiles: create_default_matrix_profiles(), }, ..create_empty_problem() }; let matrix = create_matrix_from_problem(&problem); let solution = solve_with_metaheuristic(problem, Some(vec![matrix])); assert_eq!(solution.tours.len(), 1); assert_eq!(solution.unassigned.as_ref().map_or(0, |u| u.len()), 1); let reasons = solution.unassigned.iter().flatten().flat_map(|u| u.reasons.iter().cloned()).collect::<Vec<_>>(); assert_eq!( reasons, vec![UnassignedJobReason { code: "COMPATIBILITY_CONSTRAINT".to_string(), description: "cannot be assigned due to compatibility constraint".to_string() }] ); }
40.246914
117
0.564724
eda0d52d9b45824cbd5c7d96735eeeea43db9c51
26,472
extern crate serde; use rltk::{GameState, Rltk, Point}; use specs::prelude::*; use specs::saveload::{SimpleMarker, SimpleMarkerAllocator}; mod components; pub use components::*; mod map; pub use map::*; mod player; use player::*; mod rect; pub use rect::Rect; mod visibility_system; use visibility_system::VisibilitySystem; mod map_indexing_system; use map_indexing_system::MapIndexingSystem; mod melee_combat_system; use melee_combat_system::MeleeCombatSystem; mod ranged_combat_system; use ranged_combat_system::RangedCombatSystem; mod damage_system; mod gui; mod gamelog; mod spawner; mod inventory_system; use inventory_system::{ ItemCollectionSystem, ItemUseSystem, ItemDropSystem, ItemRemoveSystem, SpellUseSystem }; pub mod saveload_system; pub mod random_table; pub mod particle_system; pub mod hunger_system; pub mod rex_assets; pub mod trigger_system; pub mod map_builders; pub mod camera; pub mod raws; mod gamesystem; pub use gamesystem::*; mod lighting_system; mod ai; mod movement_system; pub mod effects; #[macro_use] extern crate lazy_static; const SHOW_MAPGEN_VISUALIZER : bool = false; #[derive(PartialEq, Copy, Clone)] pub enum VendorMode { Buy, Sell } #[derive(PartialEq, Copy, Clone)] pub enum RunState { AwaitingInput, PreRun, Ticking, ShowInventory, ShowDropItem, ShowTargeting { range : i32, item : Entity}, MainMenu { menu_selection : gui::MainMenuSelection }, SaveGame, NextLevel, PreviousLevel, TownPortal, ShowRemoveItem, GameOver, MagicMapReveal { row : i32 }, MapGeneration, ShowCheatMenu, ShowVendor { vendor: Entity, mode : VendorMode }, TeleportingToOtherLevel { x: i32, y: i32, depth: i32 }, ShowRemoveCurse, ShowIdentify } pub struct State { pub ecs: World, mapgen_next_state : Option<RunState>, mapgen_history : Vec<Map>, mapgen_index : usize, mapgen_timer : f32 } impl State { fn run_systems(&mut self) { let mut mapindex = MapIndexingSystem{}; mapindex.run_now(&self.ecs); let mut vis = VisibilitySystem{}; vis.run_now(&self.ecs); let mut encumbrance = ai::EncumbranceSystem{}; encumbrance.run_now(&self.ecs); let mut initiative = ai::InitiativeSystem{}; initiative.run_now(&self.ecs); let mut turnstatus = ai::TurnStatusSystem{}; turnstatus.run_now(&self.ecs); let mut quipper = ai::QuipSystem{}; quipper.run_now(&self.ecs); let mut adjacent = ai::AdjacentAI{}; adjacent.run_now(&self.ecs); let mut visible = ai::VisibleAI{}; visible.run_now(&self.ecs); let mut approach = ai::ApproachAI{}; approach.run_now(&self.ecs); let mut flee = ai::FleeAI{}; flee.run_now(&self.ecs); let mut chase = ai::ChaseAI{}; chase.run_now(&self.ecs); let mut defaultmove = ai::DefaultMoveAI{}; defaultmove.run_now(&self.ecs); let mut moving = movement_system::MovementSystem{}; moving.run_now(&self.ecs); let mut triggers = trigger_system::TriggerSystem{}; triggers.run_now(&self.ecs); let mut melee = MeleeCombatSystem{}; melee.run_now(&self.ecs); let mut ranged = RangedCombatSystem{}; ranged.run_now(&self.ecs); let mut pickup = ItemCollectionSystem{}; pickup.run_now(&self.ecs); let mut itemequip = inventory_system::ItemEquipOnUse{}; itemequip.run_now(&self.ecs); let mut itemuse = ItemUseSystem{}; itemuse.run_now(&self.ecs); let mut spelluse = SpellUseSystem{}; spelluse.run_now(&self.ecs); let mut item_id = inventory_system::ItemIdentificationSystem{}; item_id.run_now(&self.ecs); let mut drop_items = ItemDropSystem{}; drop_items.run_now(&self.ecs); let mut item_remove = ItemRemoveSystem{}; item_remove.run_now(&self.ecs); let mut hunger = hunger_system::HungerSystem{}; hunger.run_now(&self.ecs); effects::run_effects_queue(&mut self.ecs); let mut particles = particle_system::ParticleSpawnSystem{}; particles.run_now(&self.ecs); let mut lighting = lighting_system::LightingSystem{}; lighting.run_now(&self.ecs); self.ecs.maintain(); } } impl GameState for State { #[allow(clippy::cognitive_complexity)] fn tick(&mut self, ctx : &mut Rltk) { let mut newrunstate; { let runstate = self.ecs.fetch::<RunState>(); newrunstate = *runstate; } ctx.cls(); particle_system::update_particles(&mut self.ecs, ctx); match newrunstate { RunState::MainMenu{..} => {} RunState::GameOver{..} => {} _ => { camera::render_camera(&self.ecs, ctx); gui::draw_ui(&self.ecs, ctx); } } match newrunstate { RunState::MapGeneration => { if !SHOW_MAPGEN_VISUALIZER { newrunstate = self.mapgen_next_state.unwrap(); } else { ctx.cls(); if self.mapgen_index < self.mapgen_history.len() && self.mapgen_index < self.mapgen_history.len() { camera::render_debug_map(&self.mapgen_history[self.mapgen_index], ctx); } self.mapgen_timer += ctx.frame_time_ms; if self.mapgen_timer > 250.0 { self.mapgen_timer = 0.0; self.mapgen_index += 1; if self.mapgen_index >= self.mapgen_history.len() { //self.mapgen_index -= 1; newrunstate = self.mapgen_next_state.unwrap(); } } } } RunState::PreRun => { self.run_systems(); self.ecs.maintain(); newrunstate = RunState::AwaitingInput; } RunState::AwaitingInput => { newrunstate = player_input(self, ctx); if newrunstate != RunState::AwaitingInput { crate::gamelog::record_event("Turn", 1); } } RunState::Ticking => { let mut should_change_target = false; while newrunstate == RunState::Ticking { self.run_systems(); self.ecs.maintain(); match *self.ecs.fetch::<RunState>() { RunState::AwaitingInput => { newrunstate = RunState::AwaitingInput; should_change_target = true; } RunState::MagicMapReveal{ .. } => newrunstate = RunState::MagicMapReveal{ row: 0 }, RunState::TownPortal => newrunstate = RunState::TownPortal, RunState::TeleportingToOtherLevel{ x, y, depth } => newrunstate = RunState::TeleportingToOtherLevel{ x, y, depth }, RunState::ShowRemoveCurse => newrunstate = RunState::ShowRemoveCurse, RunState::ShowIdentify => newrunstate = RunState::ShowIdentify, _ => newrunstate = RunState::Ticking } } if should_change_target { player::end_turn_targeting(&mut self.ecs); } } RunState::ShowInventory => { let result = gui::show_inventory(self, ctx); match result.0 { gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::ItemMenuResult::NoResponse => {} gui::ItemMenuResult::Selected => { let item_entity = result.1.unwrap(); let is_ranged = self.ecs.read_storage::<Ranged>(); let is_item_ranged = is_ranged.get(item_entity); if let Some(is_item_ranged) = is_item_ranged { newrunstate = RunState::ShowTargeting{ range: is_item_ranged.range, item: item_entity }; } else { let mut intent = self.ecs.write_storage::<WantsToUseItem>(); intent.insert(*self.ecs.fetch::<Entity>(), WantsToUseItem{ item: item_entity, target: None }).expect("Unable to insert intent"); newrunstate = RunState::Ticking; } } } } RunState::ShowCheatMenu => { let result = gui::show_cheat_mode(self, ctx); match result { gui::CheatMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::CheatMenuResult::NoResponse => {} gui::CheatMenuResult::TeleportToExit => { self.goto_level(1); self.mapgen_next_state = Some(RunState::PreRun); newrunstate = RunState::MapGeneration; } gui::CheatMenuResult::Heal => { let player = self.ecs.fetch::<Entity>(); let mut pools = self.ecs.write_storage::<Pools>(); let mut player_pools = pools.get_mut(*player).unwrap(); player_pools.hit_points.current = player_pools.hit_points.max; newrunstate = RunState::AwaitingInput; } gui::CheatMenuResult::Reveal => { let mut map = self.ecs.fetch_mut::<Map>(); for v in map.revealed_tiles.iter_mut() { *v = true; } newrunstate = RunState::AwaitingInput; } gui::CheatMenuResult::GodMode => { let player = self.ecs.fetch::<Entity>(); let mut pools = self.ecs.write_storage::<Pools>(); let mut player_pools = pools.get_mut(*player).unwrap(); player_pools.god_mode = true; newrunstate = RunState::AwaitingInput; } } } RunState::ShowDropItem => { let result = gui::drop_item_menu(self, ctx); match result.0 { gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::ItemMenuResult::NoResponse => {} gui::ItemMenuResult::Selected => { let item_entity = result.1.unwrap(); let mut intent = self.ecs.write_storage::<WantsToDropItem>(); intent.insert(*self.ecs.fetch::<Entity>(), WantsToDropItem{ item: item_entity }).expect("Unable to insert intent"); newrunstate = RunState::Ticking; } } } RunState::ShowRemoveItem => { let result = gui::remove_item_menu(self, ctx); match result.0 { gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::ItemMenuResult::NoResponse => {} gui::ItemMenuResult::Selected => { let item_entity = result.1.unwrap(); let mut intent = self.ecs.write_storage::<WantsToRemoveItem>(); intent.insert(*self.ecs.fetch::<Entity>(), WantsToRemoveItem{ item: item_entity }).expect("Unable to insert intent"); newrunstate = RunState::Ticking; } } } RunState::ShowRemoveCurse => { let result = gui::remove_curse_menu(self, ctx); match result.0 { gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::ItemMenuResult::NoResponse => {} gui::ItemMenuResult::Selected => { let item_entity = result.1.unwrap(); self.ecs.write_storage::<CursedItem>().remove(item_entity); newrunstate = RunState::Ticking; } } } RunState::ShowIdentify => { let result = gui::identify_menu(self, ctx); match result.0 { gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::ItemMenuResult::NoResponse => {} gui::ItemMenuResult::Selected => { let item_entity = result.1.unwrap(); if let Some(name) = self.ecs.read_storage::<Name>().get(item_entity) { let mut dm = self.ecs.fetch_mut::<MasterDungeonMap>(); dm.identified_items.insert(name.name.clone()); } newrunstate = RunState::Ticking; } } } RunState::ShowTargeting{range, item} => { let result = gui::ranged_target(self, ctx, range); match result.0 { gui::ItemMenuResult::Cancel => newrunstate = RunState::AwaitingInput, gui::ItemMenuResult::NoResponse => {} gui::ItemMenuResult::Selected => { if self.ecs.read_storage::<SpellTemplate>().get(item).is_some() { let mut intent = self.ecs.write_storage::<WantsToCastSpell>(); intent.insert(*self.ecs.fetch::<Entity>(), WantsToCastSpell{ spell: item, target: result.1 }).expect("Unable to insert intent"); newrunstate = RunState::Ticking; } else { let mut intent = self.ecs.write_storage::<WantsToUseItem>(); intent.insert(*self.ecs.fetch::<Entity>(), WantsToUseItem{ item, target: result.1 }).expect("Unable to insert intent"); newrunstate = RunState::Ticking; } } } } RunState::ShowVendor{vendor, mode} => { use crate::raws::*; let result = gui::show_vendor_menu(self, ctx, vendor, mode); match result.0 { gui::VendorResult::Cancel => newrunstate = RunState::AwaitingInput, gui::VendorResult::NoResponse => {} gui::VendorResult::Sell => { let price = self.ecs.read_storage::<Item>().get(result.1.unwrap()).unwrap().base_value * 0.8; self.ecs.write_storage::<Pools>().get_mut(*self.ecs.fetch::<Entity>()).unwrap().gold += price; self.ecs.delete_entity(result.1.unwrap()).expect("Unable to delete"); } gui::VendorResult::Buy => { let tag = result.2.unwrap(); let price = result.3.unwrap(); let mut pools = self.ecs.write_storage::<Pools>(); let player_entity = self.ecs.fetch::<Entity>(); let mut identified = self.ecs.write_storage::<IdentifiedItem>(); identified.insert(*player_entity, IdentifiedItem{ name : tag.clone() }).expect("Unable to insert"); std::mem::drop(identified); let player_pools = pools.get_mut(*player_entity).unwrap(); std::mem::drop(player_entity); if player_pools.gold >= price { player_pools.gold -= price; std::mem::drop(pools); let player_entity = *self.ecs.fetch::<Entity>(); crate::raws::spawn_named_item(&RAWS.lock().unwrap(), &mut self.ecs, &tag, SpawnType::Carried{ by: player_entity }); } } gui::VendorResult::BuyMode => newrunstate = RunState::ShowVendor{ vendor, mode: VendorMode::Buy }, gui::VendorResult::SellMode => newrunstate = RunState::ShowVendor{ vendor, mode: VendorMode::Sell } } } RunState::MainMenu{ .. } => { let result = gui::main_menu(self, ctx); match result { gui::MainMenuResult::NoSelection{ selected } => newrunstate = RunState::MainMenu{ menu_selection: selected }, gui::MainMenuResult::Selected{ selected } => { match selected { gui::MainMenuSelection::NewGame => newrunstate = RunState::PreRun, gui::MainMenuSelection::LoadGame => { saveload_system::load_game(&mut self.ecs); newrunstate = RunState::AwaitingInput; saveload_system::delete_save(); } gui::MainMenuSelection::Quit => { ::std::process::exit(0); } } } } } RunState::GameOver => { let result = gui::game_over(ctx); match result { gui::GameOverResult::NoSelection => {} gui::GameOverResult::QuitToMenu => { self.game_over_cleanup(); newrunstate = RunState::MapGeneration; self.mapgen_next_state = Some(RunState::MainMenu{ menu_selection: gui::MainMenuSelection::NewGame }); } } } RunState::SaveGame => { saveload_system::save_game(&mut self.ecs); newrunstate = RunState::MainMenu{ menu_selection : gui::MainMenuSelection::LoadGame }; } RunState::NextLevel => { self.goto_level(1); self.mapgen_next_state = Some(RunState::PreRun); newrunstate = RunState::MapGeneration; } RunState::PreviousLevel => { self.goto_level(-1); self.mapgen_next_state = Some(RunState::PreRun); newrunstate = RunState::MapGeneration; } RunState::TownPortal => { // Spawn the portal spawner::spawn_town_portal(&mut self.ecs); // Transition let map_depth = self.ecs.fetch::<Map>().depth; let destination_offset = 0 - (map_depth-1); self.goto_level(destination_offset); self.mapgen_next_state = Some(RunState::PreRun); newrunstate = RunState::MapGeneration; } RunState::TeleportingToOtherLevel{x, y, depth} => { self.goto_level(depth-1); let player_entity = self.ecs.fetch::<Entity>(); if let Some(pos) = self.ecs.write_storage::<Position>().get_mut(*player_entity) { pos.x = x; pos.y = y; } let mut ppos = self.ecs.fetch_mut::<rltk::Point>(); ppos.x = x; ppos.y = y; self.mapgen_next_state = Some(RunState::PreRun); newrunstate = RunState::MapGeneration; } RunState::MagicMapReveal{row} => { let mut map = self.ecs.fetch_mut::<Map>(); for x in 0..map.width { let idx = map.xy_idx(x as i32,row); map.revealed_tiles[idx] = true; } if row == map.height-1 { newrunstate = RunState::Ticking; } else { newrunstate = RunState::MagicMapReveal{ row: row+1 }; } } } { let mut runwriter = self.ecs.write_resource::<RunState>(); *runwriter = newrunstate; } damage_system::delete_the_dead(&mut self.ecs); } } impl State { fn goto_level(&mut self, offset: i32) { freeze_level_entities(&mut self.ecs); // Build a new map and place the player let current_depth = self.ecs.fetch::<Map>().depth; self.generate_world_map(current_depth + offset, offset); // Notify the player gamelog::Logger::new().append("You change level.").log(); } fn game_over_cleanup(&mut self) { // Delete everything let mut to_delete = Vec::new(); for e in self.ecs.entities().join() { to_delete.push(e); } for del in to_delete.iter() { self.ecs.delete_entity(*del).expect("Deletion failed"); } // Spawn a new player { let player_entity = spawner::player(&mut self.ecs, 0, 0); let mut player_entity_writer = self.ecs.write_resource::<Entity>(); *player_entity_writer = player_entity; } // Replace the world maps self.ecs.insert(map::MasterDungeonMap::new()); // Build a new map and place the player self.generate_world_map(1, 0); } fn generate_world_map(&mut self, new_depth : i32, offset: i32) { self.mapgen_index = 0; self.mapgen_timer = 0.0; self.mapgen_history.clear(); let map_building_info = map::level_transition(&mut self.ecs, new_depth, offset); if let Some(history) = map_building_info { self.mapgen_history = history; } else { map::thaw_level_entities(&mut self.ecs); } gamelog::clear_log(); gamelog::Logger::new() .append("Welcome to") .color(rltk::CYAN) .append("Rusty Roguelike") .log(); gamelog::clear_events(); } } fn main() -> rltk::BError { use rltk::RltkBuilder; let mut context = RltkBuilder::simple(80, 60) .unwrap() .with_title("Roguelike Tutorial") .build()?; context.with_post_scanlines(true); let mut gs = State { ecs: World::new(), mapgen_next_state : Some(RunState::MainMenu{ menu_selection: gui::MainMenuSelection::NewGame }), mapgen_index : 0, mapgen_history: Vec::new(), mapgen_timer: 0.0 }; gs.ecs.register::<Position>(); gs.ecs.register::<Renderable>(); gs.ecs.register::<Player>(); gs.ecs.register::<Viewshed>(); gs.ecs.register::<Name>(); gs.ecs.register::<BlocksTile>(); gs.ecs.register::<WantsToMelee>(); gs.ecs.register::<Item>(); gs.ecs.register::<ProvidesHealing>(); gs.ecs.register::<InflictsDamage>(); gs.ecs.register::<AreaOfEffect>(); gs.ecs.register::<Consumable>(); gs.ecs.register::<Ranged>(); gs.ecs.register::<InBackpack>(); gs.ecs.register::<WantsToPickupItem>(); gs.ecs.register::<WantsToUseItem>(); gs.ecs.register::<WantsToDropItem>(); gs.ecs.register::<Confusion>(); gs.ecs.register::<SimpleMarker<SerializeMe>>(); gs.ecs.register::<SerializationHelper>(); gs.ecs.register::<DMSerializationHelper>(); gs.ecs.register::<Equippable>(); gs.ecs.register::<Equipped>(); gs.ecs.register::<Weapon>(); gs.ecs.register::<Wearable>(); gs.ecs.register::<WantsToRemoveItem>(); gs.ecs.register::<ParticleLifetime>(); gs.ecs.register::<HungerClock>(); gs.ecs.register::<ProvidesFood>(); gs.ecs.register::<MagicMapper>(); gs.ecs.register::<Hidden>(); gs.ecs.register::<EntryTrigger>(); gs.ecs.register::<EntityMoved>(); gs.ecs.register::<SingleActivation>(); gs.ecs.register::<BlocksVisibility>(); gs.ecs.register::<Door>(); gs.ecs.register::<Quips>(); gs.ecs.register::<Attributes>(); gs.ecs.register::<Skills>(); gs.ecs.register::<Pools>(); gs.ecs.register::<NaturalAttackDefense>(); gs.ecs.register::<LootTable>(); gs.ecs.register::<OtherLevelPosition>(); gs.ecs.register::<LightSource>(); gs.ecs.register::<Initiative>(); gs.ecs.register::<MyTurn>(); gs.ecs.register::<Faction>(); gs.ecs.register::<WantsToApproach>(); gs.ecs.register::<WantsToFlee>(); gs.ecs.register::<MoveMode>(); gs.ecs.register::<Chasing>(); gs.ecs.register::<EquipmentChanged>(); gs.ecs.register::<Vendor>(); gs.ecs.register::<TownPortal>(); gs.ecs.register::<TeleportTo>(); gs.ecs.register::<ApplyMove>(); gs.ecs.register::<ApplyTeleport>(); gs.ecs.register::<MagicItem>(); gs.ecs.register::<ObfuscatedName>(); gs.ecs.register::<IdentifiedItem>(); gs.ecs.register::<SpawnParticleBurst>(); gs.ecs.register::<SpawnParticleLine>(); gs.ecs.register::<CursedItem>(); gs.ecs.register::<ProvidesRemoveCurse>(); gs.ecs.register::<ProvidesIdentification>(); gs.ecs.register::<AttributeBonus>(); gs.ecs.register::<Duration>(); gs.ecs.register::<StatusEffect>(); gs.ecs.register::<KnownSpells>(); gs.ecs.register::<SpellTemplate>(); gs.ecs.register::<WantsToCastSpell>(); gs.ecs.register::<TeachesSpell>(); gs.ecs.register::<ProvidesMana>(); gs.ecs.register::<Slow>(); gs.ecs.register::<DamageOverTime>(); gs.ecs.register::<SpecialAbilities>(); gs.ecs.register::<TileSize>(); gs.ecs.register::<OnDeath>(); gs.ecs.register::<AlwaysTargetsSelf>(); gs.ecs.register::<Target>(); gs.ecs.register::<WantsToShoot>(); gs.ecs.insert(SimpleMarkerAllocator::<SerializeMe>::new()); raws::load_raws(); gs.ecs.insert(map::MasterDungeonMap::new()); gs.ecs.insert(Map::new(1, 64, 64, "New Map")); gs.ecs.insert(Point::new(0, 0)); gs.ecs.insert(rltk::RandomNumberGenerator::new()); let player_entity = spawner::player(&mut gs.ecs, 0, 0); gs.ecs.insert(player_entity); gs.ecs.insert(RunState::MapGeneration{} ); gs.ecs.insert(particle_system::ParticleBuilder::new()); gs.ecs.insert(rex_assets::RexAssets::new()); gs.generate_world_map(1, 0); rltk::main_loop(context, gs) }
41.886076
193
0.535811
e95487064be72b951c58211189bce88d6ebf8131
7,405
/* * Copyright 2020 Draphar * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /*! Support for [Imgur](https://imgur.com/) downloads. # Domains - `i.imgur.com` - `imgur.com` */ use std::{io::BufRead, path::Path}; use bytes::buf::BufExt; use http::Uri; use serde::Deserialize; use serde_json::Value; use tokio::fs; use crate::prelude::*; /// Fetches an image from `i.imgur.com`. pub async fn fetch(client: &Client, url: &Uri, output: &Path) -> Result<()> { trace!("fetch({:?}, {:?})", url, output); let response = client.request(Builder::new().uri(url.clone())).await?; let status = response.status(); if status.is_success() { debug!("Received {} from {:?}", status, url); } else if status.as_u16() == 302 { // Imgur redirects to `imgur.com/*` instead of a normal 404. return Err(Error::new("File not found")); } else { return Err(Error::new(format!("Unexpected response code {}", status))); }; to_disk(response, output).await?; Ok(()) } /// An image on Imgur. #[derive(Deserialize, Debug, Eq, PartialEq)] struct Image { hash: String, ext: String, } /// Fetches Imgur albums and galleries. pub async fn fetch_album(client: &Client, url: &Uri, output: &Path) -> Result<()> { if url.path().starts_with("/a/") { download_images(client, album(client, url).await?, output).await } else if url.path().starts_with("/gallery/") { let mut id = url.path(); // Remove trailing `/` if id.ends_with('/') { id = &id[..id.len() - 1]; }; download_images(client, gallery(client, &id[9..]).await?, output).await } else { // Just assume that a direct link was used without the // `i.` prefix. An `imgur.com/*` link redirects to // `i.imgur.com/*`, so directly download from there. debug!("Trying to directly download image {}", url); fetch( client, &format!("https://i.imgur.com{}", url.path()) .parse() .unwrap(), output, ) .await } } /// Fetches an album using a HTML scraper. async fn album(client: &Client, url: &Uri) -> Result<Vec<Image>> { trace!("album({:?})", url); let slash = &url.path()[3..].find('/').map(|n| n + 3); let id = &url.path()[3..slash.unwrap_or_else(|| url.path().len())]; let url = format!("https://imgur.com/a/{}/embed", id); let response = client .request(Builder::new().method(Method::GET).uri(&url)) .await?; let status = response.status(); if status.is_success() { debug!("Received {} from {:?}", status, url); } else if status.as_u16() == 404 { return Err(Error::new("File not found")); } else { return Err(Error::new(format!("Unexpected response code {}", status))); }; let lines = hyper::body::aggregate(response).await?.reader().lines(); for i in lines { let i = i.unwrap(); // Because the contents of `impl Buf` are in memory, this operation is infallible (see `bytes` documentation) if i.trim_start().starts_with("album") { // This line contains the JSON. let colon = i .find(':') .ok_or_else(|| Error::new("Imgur parser error"))?; let end = i.trim_end().len(); let mut json: Value = serde_json::from_str(&i[(colon + 1)..(end - 1)])?; let images = serde_json::from_value(json["album_images"]["images"].take())?; return Ok(images); }; } Err(Error::new("Imgur parser error")) } /// Extracts the images from a gallery using a JSON API. async fn gallery(client: &Client, id: &str) -> Result<Vec<Image>> { trace!("gallery({:?})", id); let url = format!("https://imgur.com/gallery/{}.json", id); let response = client .request( Builder::new() .method(Method::GET) .uri(&url) .header("Accept", "application/json"), ) .await?; let status = response.status(); if status.is_success() { debug!("Received {} from {:?}", status, url); } else if status.as_u16() == 404 { return Err(Error::new("File not found")); } else { return Err(Error::new(format!("Unexpected response code {}", status))); }; let mut json: Value = to_json(response).await?; let images = serde_json::from_value(json["data"]["image"]["album_images"]["images"].take())?; Ok(images) } /// Downloads the set of images. async fn download_images(client: &Client, images: Vec<Image>, output: &Path) -> Result<()> { trace!("download_images({:?}, {:?})", images, output); debug!("Found Imgur gallery containing {} entries", images.len()); fs::create_dir_all(output).await?; let mut path = output.to_path_buf(); path.push("index"); // later overwritten for (i, image) in images.into_iter().enumerate() { let path = path.with_file_name(format!("{}{}", i, image.ext)); debug!("Saving individual image \"{}{}\"", image.hash, image.ext); download( client, &format!("https://i.imgur.com/{}{}", image.hash, image.ext).parse()?, &path, ) .await; // ignore individual errors } // Todo: A future join could be of use here. Ok(()) } #[tokio::test] #[cfg_attr(not(feature = "__tests-network"), ignore)] async fn imgur_album() { let client = Client::new(); let images = album(&client, &"https://imgur.com/a/dFz23".parse().unwrap()) .await .unwrap(); assert_eq!( vec![ Image { hash: "bxv008g".to_string(), ext: ".gif".to_string() }, Image { hash: "oXx9m52".to_string(), ext: ".gif".to_string() }, Image { hash: "s3XOVHt".to_string(), ext: ".png".to_string() }, Image { hash: "EanxY6r".to_string(), ext: ".gif".to_string() } ], images ); } #[tokio::test] #[cfg_attr(not(feature = "__tests-network"), ignore)] async fn imgur_gallery() { let client = Client::new(); let images = gallery(&client, "dFz23").await.unwrap(); assert_eq!( vec![ Image { hash: "bxv008g".to_string(), ext: ".gif".to_string() }, Image { hash: "oXx9m52".to_string(), ext: ".gif".to_string() }, Image { hash: "s3XOVHt".to_string(), ext: ".png".to_string() }, Image { hash: "EanxY6r".to_string(), ext: ".gif".to_string() } ], images ); }
30.348361
137
0.543957
9b93d789f5b509fc1c54f46ec38cc49a65972aec
1,417
use ring::hmac; use ring::constant_time::verify_slices_are_equal; use crate::raw::*; use crate::error::{Error, ErrorDetails}; use crate::crypto::SecretOrKey; use crate::crypto::algorithm::AlgorithmID; impl From<AlgorithmID> for hmac::Algorithm { fn from(alg: AlgorithmID) -> Self { match alg { AlgorithmID::HS256 => ring::hmac::HMAC_SHA256, AlgorithmID::HS384 => ring::hmac::HMAC_SHA384, AlgorithmID::HS512 => ring::hmac::HMAC_SHA512, _ => unreachable!("Tried to map HMAC type for a non-HMAC algorithm"), } } } pub(crate) fn sign(alg: AlgorithmID, secret_or_key: &SecretOrKey, message: &str) -> Result<String, Error> { match secret_or_key { SecretOrKey::Secret(key) => { let ring_alg = alg.into(); let digest = hmac::sign(&hmac::Key::new(ring_alg, &key), message.as_bytes()); Ok(b64_encode(digest.as_ref())) }, _ => Err(Error::InvalidInput(ErrorDetails::new("Missing secret for HMAC signing"))) } } pub fn verify(algorithm: AlgorithmID, secret_or_key: &SecretOrKey, message: &str, signature: &str) -> Result<(), Error> { // we just re-sign the message with the key and compare if they are equal let signed = sign(algorithm, secret_or_key, message)?; verify_slices_are_equal(signature.as_bytes(), signed.as_ref()) .map_err(|_| Error::InvalidSignature()) }
40.485714
121
0.647848
0ebac3f626c72e0222f951f275386fe401e1b814
22,696
// Copyright 2020, The Tari Project // // Redistribution and use in source and binary forms, with or without modification, are permitted provided that the // following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following // disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the // following disclaimer in the documentation and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote // products derived from this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, // INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE // USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. use std::{ cmp, convert::TryFrom, sync::{Arc, Weak}, }; use log::*; use tari_comms::{ peer_manager::NodeId, protocol::rpc::{Request, Response, RpcStatus, Streaming}, utils, }; use tari_crypto::tari_utilities::hex::Hex; use tari_utilities::Hashable; use tokio::{ sync::{mpsc, RwLock}, task, }; use tracing::{instrument, span, Instrument, Level}; use crate::{ base_node::{ comms_interface::BlockEvent, metrics, sync::rpc::{sync_utxos_task::SyncUtxosTask, BaseNodeSyncService}, LocalNodeCommsInterface, }, chain_storage::{async_db::AsyncBlockchainDb, BlockAddResult, BlockchainBackend}, iterators::NonOverlappingIntegerPairIter, proto, proto::base_node::{ FindChainSplitRequest, FindChainSplitResponse, SyncBlocksRequest, SyncHeadersRequest, SyncKernelsRequest, SyncUtxosRequest, SyncUtxosResponse, }, }; const LOG_TARGET: &str = "c::base_node::sync_rpc"; pub struct BaseNodeSyncRpcService<B> { db: AsyncBlockchainDb<B>, active_sessions: RwLock<Vec<Weak<NodeId>>>, base_node_service: LocalNodeCommsInterface, } impl<B: BlockchainBackend + 'static> BaseNodeSyncRpcService<B> { pub fn new(db: AsyncBlockchainDb<B>, base_node_service: LocalNodeCommsInterface) -> Self { Self { db, active_sessions: RwLock::new(Vec::new()), base_node_service, } } #[inline] fn db(&self) -> AsyncBlockchainDb<B> { self.db.clone() } pub async fn try_add_exclusive_session(&self, peer: NodeId) -> Result<Arc<NodeId>, RpcStatus> { let mut lock = self.active_sessions.write().await; *lock = lock.drain(..).filter(|l| l.strong_count() > 0).collect(); debug!(target: LOG_TARGET, "Number of active sync sessions: {}", lock.len()); if lock.iter().any(|p| p.upgrade().filter(|p| **p == peer).is_some()) { return Err(RpcStatus::forbidden( "Existing sync session found for this client. Only a single session is permitted", )); } let token = Arc::new(peer); lock.push(Arc::downgrade(&token)); metrics::active_sync_peers().set(lock.len() as i64); Ok(token) } } #[tari_comms::async_trait] impl<B: BlockchainBackend + 'static> BaseNodeSyncService for BaseNodeSyncRpcService<B> { #[instrument(level = "trace", name = "sync_rpc::sync_blocks", skip(self), err)] async fn sync_blocks( &self, request: Request<SyncBlocksRequest>, ) -> Result<Streaming<proto::base_node::BlockBodyResponse>, RpcStatus> { let peer_node_id = request.context().peer_node_id().clone(); let message = request.into_message(); let mut block_event_stream = self.base_node_service.get_block_event_stream(); let db = self.db(); let start_header = db .fetch_header_by_block_hash(message.start_hash) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))? .ok_or_else(|| RpcStatus::not_found("Header not found with given hash"))?; let metadata = db .get_chain_metadata() .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))?; let start_height = start_header.height + 1; if start_height < metadata.pruned_height() { return Err(RpcStatus::bad_request(format!( "Requested full block body at height {}, however this node has an effective pruned height of {}", start_height, metadata.pruned_height() ))); } if start_height > metadata.height_of_longest_chain() { return Ok(Streaming::empty()); } let end_header = db .fetch_header_by_block_hash(message.end_hash) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))? .ok_or_else(|| RpcStatus::not_found("Requested end block sync hash was not found"))?; let end_height = end_header.height; if start_height > end_height { return Err(RpcStatus::bad_request(format!( "Start block #{} is higher than end block #{}", start_height, end_height ))); } debug!( target: LOG_TARGET, "Initiating block sync with peer `{}` from height {} to {}", peer_node_id, start_height, end_height, ); let session_token = self.try_add_exclusive_session(peer_node_id).await?; // Number of blocks to load and push to the stream before loading the next batch const BATCH_SIZE: usize = 2; let (tx, rx) = mpsc::channel(BATCH_SIZE); let span = span!(Level::TRACE, "sync_rpc::block_sync::inner_worker"); task::spawn( async move { // Move token into this task let peer_node_id = session_token; let iter = NonOverlappingIntegerPairIter::new(start_height, end_height + 1, BATCH_SIZE); for (start, end) in iter { if tx.is_closed() { break; } // Check for reorgs during sync while let Ok(block_event) = block_event_stream.try_recv() { if let BlockEvent::ValidBlockAdded(_, BlockAddResult::ChainReorg { removed, .. }) = &*block_event { if let Some(reorg_block) = removed .iter() // If the reorg happens before the end height of sync we let the peer know that the chain they are syncing with has changed .find(|block| block.height() <= end_height) { warn!( target: LOG_TARGET, "Block reorg detected at height {} during sync, letting the sync peer {} know.", reorg_block.height(), peer_node_id ); let _ = tx.send(Err(RpcStatus::conflict(format!( "Reorg at height {} detected", reorg_block.height() )))); return; } } } debug!( target: LOG_TARGET, "Sending blocks #{} - #{} to '{}'", start, end, peer_node_id ); let blocks = db .fetch_blocks(start..=end) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET)); match blocks { Ok(blocks) if blocks.is_empty() => { break; }, Ok(blocks) => { let blocks = blocks .into_iter() .map(|hb| { match hb.try_into_block().map_err(RpcStatus::log_internal_error(LOG_TARGET)) { Ok(b) => Ok(b.to_compact()), Err(e) => Err(e), } }) .map(|block| match block { Ok(b) => proto::base_node::BlockBodyResponse::try_from(b).map_err(|e| { log::error!(target: LOG_TARGET, "Internal error: {}", e); RpcStatus::general_default() }), Err(err) => Err(err), }); // Ensure task stops if the peer prematurely stops their RPC session if utils::mpsc::send_all(&tx, blocks).await.is_err() { break; } }, Err(err) => { let _ = tx.send(Err(err)).await; break; }, } } metrics::active_sync_peers().dec(); debug!( target: LOG_TARGET, "Block sync round complete for peer `{}`.", peer_node_id, ); } .instrument(span), ); Ok(Streaming::new(rx)) } #[instrument(level = "trace", name = "sync_rpc::sync_headers", skip(self), err)] async fn sync_headers( &self, request: Request<SyncHeadersRequest>, ) -> Result<Streaming<proto::core::BlockHeader>, RpcStatus> { let db = self.db(); let peer_node_id = request.context().peer_node_id().clone(); let message = request.into_message(); let start_header = db .fetch_header_by_block_hash(message.start_hash) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))? .ok_or_else(|| RpcStatus::not_found("Header not found with given hash"))?; let mut count = message.count; if count == 0 { let tip_header = db .fetch_tip_header() .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))?; count = tip_header.height().saturating_sub(start_header.height); } if count == 0 { return Ok(Streaming::empty()); } let chunk_size = cmp::min(100, count) as usize; debug!( target: LOG_TARGET, "Initiating header sync with peer `{}` from height {} to {} (chunk_size={})", peer_node_id, start_header.height, count, chunk_size ); let session_token = self.try_add_exclusive_session(peer_node_id.clone()).await?; let (tx, rx) = mpsc::channel(chunk_size); let span = span!(Level::TRACE, "sync_rpc::sync_headers::inner_worker"); task::spawn( async move { // Move token into this task let session_token = session_token; let iter = NonOverlappingIntegerPairIter::new( start_header.height + 1, start_header.height.saturating_add(count).saturating_add(1), chunk_size, ); for (start, end) in iter { if tx.is_closed() { break; } debug!(target: LOG_TARGET, "Sending headers #{} - #{}", start, end); let headers = db .fetch_headers(start..=end) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET)); match headers { Ok(headers) if headers.is_empty() => { break; }, Ok(headers) => { let headers = headers.into_iter().map(proto::core::BlockHeader::from).map(Ok); // Ensure task stops if the peer prematurely stops their RPC session if utils::mpsc::send_all(&tx, headers).await.is_err() { break; } }, Err(err) => { let _ = tx.send(Err(err)).await; break; }, } } metrics::active_sync_peers().dec(); debug!( target: LOG_TARGET, "Header sync round complete for peer `{}`.", session_token, ); } .instrument(span), ); Ok(Streaming::new(rx)) } #[instrument(skip(self), err)] async fn get_header_by_height( &self, request: Request<u64>, ) -> Result<Response<proto::core::BlockHeader>, RpcStatus> { let height = request.into_message(); let header = self .db() .fetch_header(height) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))? .ok_or_else(|| RpcStatus::not_found(format!("Header not found at height {}", height)))?; Ok(Response::new(header.into())) } #[instrument(level = "debug", skip(self), err)] async fn find_chain_split( &self, request: Request<FindChainSplitRequest>, ) -> Result<Response<FindChainSplitResponse>, RpcStatus> { const MAX_ALLOWED_BLOCK_HASHES: usize = 1000; const MAX_ALLOWED_HEADER_COUNT: u64 = 1000; let peer = request.context().peer_node_id().clone(); let message = request.into_message(); if message.block_hashes.is_empty() { return Err(RpcStatus::bad_request( "Cannot find chain split because no hashes were sent", )); } if message.block_hashes.len() > MAX_ALLOWED_BLOCK_HASHES { return Err(RpcStatus::bad_request(format!( "Cannot query more than {} block hashes", MAX_ALLOWED_BLOCK_HASHES, ))); } if message.header_count > MAX_ALLOWED_HEADER_COUNT { return Err(RpcStatus::bad_request(format!( "Cannot ask for more than {} headers", MAX_ALLOWED_HEADER_COUNT, ))); } let db = self.db(); let maybe_headers = db .find_headers_after_hash(message.block_hashes, message.header_count) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))?; match maybe_headers { Some((idx, headers)) => { debug!( target: LOG_TARGET, "Sending forked index {} and {} header(s) to peer `{}`", idx, headers.len(), peer ); let metadata = db .get_chain_metadata() .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))?; Ok(Response::new(FindChainSplitResponse { fork_hash_index: idx as u64, headers: headers.into_iter().map(Into::into).collect(), tip_height: metadata.height_of_longest_chain(), })) }, None => { debug!( target: LOG_TARGET, "Unable to find link to main chain from peer `{}`", peer ); Err(RpcStatus::not_found("No link found to main chain")) }, } } #[instrument(skip(self), err)] async fn get_chain_metadata(&self, _: Request<()>) -> Result<Response<proto::base_node::ChainMetadata>, RpcStatus> { let chain_metadata = self .db() .get_chain_metadata() .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))?; Ok(Response::new(chain_metadata.into())) } #[instrument(skip(self), err)] async fn sync_kernels( &self, request: Request<SyncKernelsRequest>, ) -> Result<Streaming<proto::types::TransactionKernel>, RpcStatus> { let peer_node_id = request.context().peer_node_id().clone(); let req = request.into_message(); let (tx, rx) = mpsc::channel(100); let db = self.db(); let start_header = db .fetch_header_containing_kernel_mmr(req.start) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))? .into_header(); let end_header = db .fetch_header_by_block_hash(req.end_header_hash.clone()) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET))? .ok_or_else(|| RpcStatus::not_found("Unknown end header"))?; let mut current_height = start_header.height; let end_height = end_header.height; let mut current_mmr_position = start_header.kernel_mmr_size; let mut current_header_hash = start_header.hash(); if current_height > end_height { return Err(RpcStatus::bad_request("start header height is after end header")); } let session_token = self.try_add_exclusive_session(peer_node_id).await?; task::spawn(async move { while current_height <= end_height { if tx.is_closed() { break; } let res = db .fetch_kernels_in_block(current_header_hash.clone()) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET)); match res { Ok(kernels) if kernels.is_empty() => { let _ = tx .send(Err(RpcStatus::general(format!( "No kernels in block {}", current_header_hash.to_hex() )))) .await; break; }, Ok(kernels) => { debug!( target: LOG_TARGET, "Streaming kernels {} to {}", current_mmr_position, current_mmr_position + kernels.len() as u64 ); current_mmr_position += kernels.len() as u64; let kernels = kernels.into_iter().map(proto::types::TransactionKernel::from).map(Ok); // Ensure task stops if the peer prematurely stops their RPC session if utils::mpsc::send_all(&tx, kernels).await.is_err() { break; } }, Err(err) => { let _ = tx.send(Err(err)).await; break; }, } current_height += 1; if current_height <= end_height { let res = db .fetch_header(current_height) .await .map_err(RpcStatus::log_internal_error(LOG_TARGET)); match res { Ok(Some(header)) => { current_header_hash = header.hash(); }, Ok(None) => { let _ = tx .send(Err(RpcStatus::not_found(format!( "Could not find header #{} while streaming UTXOs after position {}", current_height, current_mmr_position )))) .await; break; }, Err(err) => { error!(target: LOG_TARGET, "DB error while streaming kernels: {}", err); let _ = tx .send(Err(RpcStatus::general("DB error while streaming kernels"))) .await; break; }, } } } metrics::active_sync_peers().dec(); debug!( target: LOG_TARGET, "Kernel sync round complete for peer `{}`.", session_token, ); }); Ok(Streaming::new(rx)) } #[instrument(skip(self), err)] async fn sync_utxos(&self, request: Request<SyncUtxosRequest>) -> Result<Streaming<SyncUtxosResponse>, RpcStatus> { let req = request.message(); let peer_node_id = request.context().peer_node_id(); debug!( target: LOG_TARGET, "Received sync_utxos request from header {} to {} (start = {}, include_pruned_utxos = {}, \ include_deleted_bitmaps = {})", peer_node_id, req.start, req.end_header_hash.to_hex(), req.include_pruned_utxos, req.include_deleted_bitmaps ); let _session_token = self.try_add_exclusive_session(peer_node_id.clone()).await?; let (tx, rx) = mpsc::channel(200); let task = SyncUtxosTask::new(self.db()); task.run(request, tx).await?; Ok(Streaming::new(rx)) } }
40.09894
155
0.50304
eba30fa9bca423f69c843352ae9202311087e92d
908
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[feature(managed_boxes)]; // ignore-fast type compare<T> = 'static |T, T| -> bool; fn test_generic<T:Clone>(expected: T, not_expected: T, eq: compare<T>) { let actual: T = if true { expected.clone() } else { not_expected }; assert!((eq(expected, actual))); } fn test_vec() { fn compare_box(v1: @int, v2: @int) -> bool { return v1 == v2; } test_generic::<@int>(@1, @2, compare_box); } pub fn main() { test_vec(); }
32.428571
72
0.679515
dba08a32e58b33256425842153cfd94ea0b7036f
1,598
use alloc::{Vec, BTreeMap}; use core::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering}; use core::mem; type ChunkId = usize; enum Slot<T> { /// Vacant slot, containing index to the next vacant slot Vacant(AtomicUsize), /// Occupied slot, containing value Occupied(T), } struct Chunk<T> { slots: Vec<Slot<T>>, num_vacant: AtomicUsize, } pub struct Entry { chunk_id: ChunkId, index: usize, } struct ChunkMap<T> { chunks: BTreeMap<ChunkId, Chunk<T>>, next_id: AtomicUsize, } /// A lock-free arena. pub struct Arena<T> { /// Map of chunk id to } impl<T> Arena<T> { /// Create an Arena with space for `capacity` objects. pub fn new(capacity: usize) -> Arena<T> { Arena { slots: Vec::with_capacity(size), len: ATOMIC_USIZE_INIT, head: !ATOMIC_USIZE_INIT, } } pub fn alloc(&mut self, object: T) -> Option<&mut T> { let old_len = self.len.fetch_add(1, Ordering::Relaxed); let head = self.head.load(Ordering::Relaxed); if head == !0 { self.slots.push(Slot::Occupied(object)); old_len } else { let index = head; match self.slots[index] { Slot::Vacant(next) => { self.head.store(next, Ordering::Relaxed); self.slots[index] = Slot::Occupied(object); }, Slot::Occupied(_) => unreachable!(), }; index } } }
24.96875
68
0.524406
fcea8e447e198ed47900c165ababb57a4f27a9cb
1,316
use std::fmt; use std::io; use yaml_rust::scanner::ScanError; #[derive(Debug)] pub enum AutoTeXErr { IOErr(io::Error), ScanErr(ScanError), CommandErr(clap::Error), NoneError, NoFilenameInputErr, ParsePdfErr, TakeFilesErr, InvalidOptionErr, } impl fmt::Display for AutoTeXErr { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use AutoTeXErr::*; match *self { IOErr(ref e) => e.fmt(f), ScanErr(ref e) => e.fmt(f), CommandErr(ref e) => e.fmt(f), NoneError => write!(f, "NoneError"), NoFilenameInputErr => write!(f, "There is no filename to compile!"), ParsePdfErr => write!(f, "Cannot take a pdf reader name!"), TakeFilesErr => write!(f, "Cannot make a list of tex relative files!"), InvalidOptionErr => write!(f, "No tex option is used!"), } } } impl From<io::Error> for AutoTeXErr { fn from(err: io::Error) -> Self { Self::IOErr(err) } } impl From<ScanError> for AutoTeXErr { fn from(err: ScanError) -> Self { Self::ScanErr(err) } } impl From<clap::Error> for AutoTeXErr { fn from(err: clap::Error) -> Self { Self::CommandErr(err) } } pub type Result<T> = std::result::Result<T, AutoTeXErr>;
25.307692
83
0.576748
330e10ba11cf7cde897e125dd51606e32f686d17
80
pub mod id_types; pub mod modules; mod noise; pub mod resources; pub mod world;
13.333333
18
0.7625
1d1ab1bdcc435544b06eb6f2a9031482b01a7e0f
1,401
pub struct IconReportProblem { props: crate::Props, } impl yew::Component for IconReportProblem { type Properties = crate::Props; type Message = (); fn create(props: Self::Properties, _: yew::prelude::ComponentLink<Self>) -> Self { Self { props } } fn update(&mut self, _: Self::Message) -> yew::prelude::ShouldRender { true } fn change(&mut self, _: Self::Properties) -> yew::prelude::ShouldRender { false } fn view(&self) -> yew::prelude::Html { yew::prelude::html! { <svg class=self.props.class.unwrap_or("") width=self.props.size.unwrap_or(24).to_string() height=self.props.size.unwrap_or(24).to_string() viewBox="0 0 24 24" fill=self.props.fill.unwrap_or("none") stroke=self.props.color.unwrap_or("currentColor") stroke-width=self.props.stroke_width.unwrap_or(2).to_string() stroke-linecap=self.props.stroke_linecap.unwrap_or("round") stroke-linejoin=self.props.stroke_linejoin.unwrap_or("round") > <svg xmlns="http://www.w3.org/2000/svg" height="24" viewBox="0 0 24 24" width="24"><path d="M0 0h24v24H0z" fill="none"/><path d="M1 21h22L12 2 1 21zm12-3h-2v-2h2v2zm0-4h-2v-4h2v4z"/></svg> </svg> } } }
30.456522
200
0.573876
1402f3e67ad687a5469f56901354905ff62acb18
5,991
use regex::Regex; use std::collections::HashMap; pub mod converter { use std::iter::FromIterator; fn convert_oldish_char(c: char) -> Option<char> { match c { '零' => Some('〇'), '壱' => Some('一'), '壹' => Some('一'), '弐' => Some('二'), '貳' => Some('二'), '貮' => Some('二'), '参' => Some('三'), '參' => Some('三'), '肆' => Some('四'), '伍' => Some('五'), '陸' => Some('六'), '漆' => Some('七'), '捌' => Some('八'), '玖' => Some('九'), '拾' => Some('十'), '陌' => Some('百'), '佰' => Some('百'), '阡' => Some('千'), '仟' => Some('千'), '萬' => Some('万'), _ => None, } } pub fn normalize_oldish_string(input: String) -> String { let mut vec_dst: Vec<char> = vec![]; for c in input.chars().collect::<Vec<_>>() { match convert_oldish_char(c) { Some(v) => vec_dst.push(v), None => match c { '廿' => { vec_dst.push('二'); vec_dst.push('十'); } _ => vec_dst.push(c), }, } } return String::from_iter(vec_dst); } pub fn normalize_kanjinum_string(input: String) -> String { let mut vec_dst: Vec<char> = vec![]; for c in input.chars().collect::<Vec<_>>() { let filtered = match c { '〇' => '0', '一' => '1', '二' => '2', '三' => '3', '四' => '4', '五' => '5', '六' => '6', '七' => '7', '八' => '8', '九' => '9', _ => c, }; vec_dst.push(filtered); } return String::from_iter(vec_dst); } pub fn normalize_zenkakunum_string(input: String) -> String { let mut vec_dst: Vec<char> = vec![]; for c in input.chars().collect::<Vec<_>>() { let filtered = match c { '0' => '0', '1' => '1', '2' => '2', '3' => '3', '4' => '4', '5' => '5', '6' => '6', '7' => '7', '8' => '8', '9' => '9', _ => c, }; vec_dst.push(filtered); } return String::from_iter(vec_dst); } #[test] fn test_normalize_oldish_string() { assert_eq!( normalize_oldish_string(String::from("伍伍カレー")), String::from("五五カレー") ); assert_eq!( normalize_oldish_string(String::from("CoCo壱番屋廿日市駅前店")), String::from("CoCo一番屋二十日市駅前店") ); } #[test] fn test_normalize_kanjinum_string() { assert_eq!( normalize_kanjinum_string(String::from("一九九九年一〇月")), String::from("1999年10月") ); } #[test] fn test_normalize_zenkakunum_string() { assert_eq!( normalize_zenkakunum_string(String::from("2021年")), String::from("2021年") ); } } /** * 漢数字を単位ごとに分割する */ pub fn split_large_number(japanese: String) -> HashMap<&'static str, i32> { let mut input = japanese; let mut parsed_num: HashMap<&str, i32> = HashMap::new(); // 京、兆、億、万の順でそれぞれの桁についてパースしていく for unit in vec!["京", "兆", "億", "万"] { // 正規表現を使い、◯◯万のような文字列をキャプチャする let expression = "(?P<number>.+)".to_string() + unit; let reg = Regex::new(&expression).unwrap(); let mut remove_part = String::new(); let result = match reg.captures(&input) { Some(v) => { remove_part = format!("{}{}", &v["number"], unit); kanjinum_to_int(&v["number"]) } None => 0, }; // キャプチャが存在した場合はもとの文字列からその部分を削除する if remove_part.as_str() != "" { input = input.replace(&remove_part, ""); } // 抜き出したものはHashMapに保存 parsed_num.insert(unit, result); } // 一万未満の桁がある場合処理を続ける if input.as_str() != "" { parsed_num.insert("千", kanjinum_to_int(&input)); } else { parsed_num.insert("千", 0); } return parsed_num; } fn kanjinum_to_int(input: &str) -> i32 { // 入力がすべてアラビア数字で構成されている場合 if Regex::new(r"^[0-9]+$").unwrap().is_match(input) { let num: i32 = input.parse().unwrap(); return num; } let mut number = 0; for (unit, base) in vec![("千", 1000), ("百", 100), ("十", 10)] { let expression = "(?P<number>.*)".to_string() + unit; let reg = Regex::new(&expression).unwrap(); let capture = reg.captures(input); let n = match capture { Some(v) => { if Regex::new("^[0-9]+$").unwrap().is_match(&v["number"]) { let num: i32 = v["number"].parse().unwrap(); return num; } else { let num: i32 = converter::normalize_kanjinum_string(v["number"].to_string()) .parse() .unwrap(); return num; } } None => 0, }; number = number + n * base; } return number; } #[test] fn test_kanjinum_to_int() { assert_eq!(kanjinum_to_int("三千"), 3000); assert_eq!(kanjinum_to_int("千九百一"), 1901); assert_eq!(kanjinum_to_int("九九"), 99); } /* const match = kanji.match(reg) if (match) { numbers[key] = kan2n(match[1]) kanji = kanji.replace(match[0], '') } else { numbers[key] = 0 } */ /* let unit_large: HashMap<&str, i64> = vec![ ("万", 1_0000), ("億", 1_0000_0000), ("兆", 1_0000_0000_0000), ("京", 1_0000_0000_0000_0000) ].into_iter().collect(); */
28.393365
96
0.427808
7540e66d4ffa0e7d4c2eefffaa50fbbe6ce33530
171
/*input this is a pen. */ fn main() { let mut s = format!(""); std::io::stdin().read_line(&mut s).expect(""); s.pop(); println!("{}", s.to_uppercase()); }
17.1
50
0.508772
d9011e6d9e5af690f6bb5f4e4c7fdee274f40c2f
4,216
//! Elasticsearch driver. use std::io::Write; use hyper::Url; use hyper::client::{Client, Body}; use hyper::header::ContentType; use serde_json::{to_vec, to_writer, from_reader, Value}; use error::{ErrorKind, Result, ResultExt}; /// Elasticsearch client. pub struct Es<'a> { client: Client, base: Url, index: &'a str, type_: &'a str, } impl<'a> Es<'a> { /// Constructs a new Elasticsearch client. pub fn new(base: Url, index: &'a str, type_: &'a str) -> Es<'a> { Es { client: Client::new(), base: base, index: index, type_: type_, } } /// Creates the "localization" index. pub fn create_index(&self, shards: u32, replicas: u32) -> Result<()> { let body = to_vec(&json!({ "settings": { "number_of_shards": shards, "number_of_replicas": replicas, }, "mappings": { self.type_: { "_all": {"enabled": false}, "dynamic": true, }, }, })).unwrap(); let resp = self.client .put(self.base.join(self.index).unwrap()) .header(ContentType::json()) .body(Body::BufBody(&body, body.len())) .send().chain_err(|| ErrorKind::CreateIndex)?; let content: Value = from_reader(resp).chain_err(|| ErrorKind::CreateIndex)?; if content["acknowledged"].as_bool() == Some(true) { Ok(()) } else if content["error"]["type"].as_str() == Some("index_already_exists_exception") { let error_reason = content["error"]["reason"].as_str().unwrap_or("localization index already exists"); eprintln!("warning: {}", error_reason); Ok(()) } else { bail!(ErrorKind::CreateIndexUnexpectedReply(content)) } } /// Inserts an iterator of JSON values into the localization index. /// /// Returns the number of entries successfully added. pub fn add_translations<I>(&self, translations: I) -> Result<usize> where I: Iterator<Item=Value> { let index = to_vec(&json!({ "index": { "_index": self.index, "_type": self.type_, }, })).unwrap(); let mut bulk = Vec::new(); for translation in translations { bulk.extend(&index); bulk.push(b'\n'); to_writer(&mut bulk, &translation)?; bulk.push(b'\n'); } if bulk.is_empty() { // we need to special-case this, otherwise we will cause Elasticsearch to go NPE (HTTP 500). return Ok(0); } let result = self.client .post(self.base.join("_bulk").unwrap()) .header(ContentType(mime!(Application/("x-ndjson")))) .body(Body::BufBody(&bulk, bulk.len())) .send()?; let result: Value = from_reader(result)?; let items = result["items"].as_array(); Ok(items.map(|a| a.iter().filter(|r| r["index"]["status"].as_i64() == Some(201)).count()).unwrap_or(0)) } } /* Copyright 2017 kennytm Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
36.034188
118
0.594165
e901ece3be5bece07bd808602d3f4b4320509c95
12,707
mod os_related; mod tests { use predicates::prelude::*; use std::path::Path; // Used for writing assertions fn tedge_command<I, S>(args: I) -> Result<assert_cmd::Command, Box<dyn std::error::Error>> where I: IntoIterator<Item = S>, S: AsRef<std::ffi::OsStr>, { let path: &str = "tedge"; let mut cmd = assert_cmd::Command::cargo_bin(path)?; cmd.args(args); Ok(cmd) } #[test] fn run_help() -> Result<(), Box<dyn std::error::Error>> { let mut cmd = tedge_command(&["--help"])?; cmd.assert() .success() .stdout(predicate::str::contains("USAGE")); Ok(()) } #[test] fn run_version() -> Result<(), Box<dyn std::error::Error>> { let mut cmd = tedge_command(&["-V"])?; let version_string = format!("tedge {}", env!("CARGO_PKG_VERSION")); cmd.assert() .success() .stdout(predicate::str::starts_with(version_string)); Ok(()) } #[test] fn run_create_certificate() -> Result<(), Box<dyn std::error::Error>> { let tempdir = tempfile::tempdir()?; let device_id = "test"; let cert_path = temp_path(&tempdir, "test-cert.pem"); let key_path = temp_path(&tempdir, "test-key.pem"); let home_dir = tempdir.path().to_str().unwrap(); let mut get_device_id_cmd = tedge_command_with_test_home(home_dir, &["config", "get", "device.id"])?; let mut set_cert_path_cmd = tedge_command_with_test_home( home_dir, &["config", "set", "device.cert.path", &cert_path], )?; let mut set_key_path_cmd = tedge_command_with_test_home( home_dir, &["config", "set", "device.key.path", &key_path], )?; let mut create_cmd = tedge_command_with_test_home(home_dir, &["cert", "create", "--device-id", device_id])?; let mut show_cmd = tedge_command_with_test_home(home_dir, &["cert", "show"])?; let mut remove_cmd = tedge_command_with_test_home(home_dir, &["cert", "remove"])?; // Configure tedge to use specific paths for the private key and the certificate set_cert_path_cmd.assert().success(); set_key_path_cmd.assert().success(); // The remove command can be run when there is no certificate remove_cmd.assert().success(); // We start we no certificate, hence no device id get_device_id_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'device.id' is not set", )); // The create command created a certificate create_cmd.assert().success(); // The certificate use the device id as CN show_cmd .assert() .success() .stdout(predicate::str::contains(format!("CN={},", device_id))); // The create command updated the config with the device.id get_device_id_cmd .assert() .success() .stdout(predicate::str::contains(device_id)); // When a certificate exists, it is not over-written by the create command create_cmd .assert() .failure() .stderr(predicate::str::contains("A certificate already exists")); // The remove command removed the certificate remove_cmd.assert().success(); // which can no more be displayed show_cmd .assert() .failure() .stderr(predicate::str::contains("Missing file")); // The remove command also removed the device id from the config get_device_id_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'device.id' is not set", )); // The a new certificate can then be created. create_cmd.assert().success(); Ok(()) } #[test] fn run_config_set_get_unset_read_only_key() -> Result<(), Box<dyn std::error::Error>> { let temp_dir = tempfile::tempdir().unwrap(); let temp_dir_path = temp_dir.path(); let test_home_str = temp_dir_path.to_str().unwrap(); let device_id = "test"; // allowed to get read-only key by CLI let mut get_device_id_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "device.id"])?; get_device_id_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'device.id' is not set", )); // forbidden to set read-only key by CLI let mut set_device_id_cmd = tedge_command_with_test_home( test_home_str, &["config", "set", "device.id", device_id], )?; set_device_id_cmd.assert().failure(); // forbidden to unset read-only key by CLI let mut unset_device_id_cmd = tedge_command_with_test_home(test_home_str, &["config", "unset", "device.id"])?; unset_device_id_cmd.assert().failure(); Ok(()) } #[test] fn run_config_set_get_unset_read_write_key() -> Result<(), Box<dyn std::error::Error>> { let temp_dir = tempfile::tempdir().unwrap(); let temp_dir_path = temp_dir.path(); let test_home_str = temp_dir_path.to_str().unwrap(); let c8y_url = "mytenant.cumulocity.com"; let mut get_c8y_url_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "c8y.url"])?; get_c8y_url_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'c8y.url' is not set", )); let mut set_c8y_url_cmd = tedge_command_with_test_home(test_home_str, &["config", "set", "c8y.url", c8y_url])?; set_c8y_url_cmd.assert().success(); get_c8y_url_cmd .assert() .success() .stdout(predicate::str::contains(c8y_url)); let mut unset_c8y_url_cmd = tedge_command_with_test_home(test_home_str, &["config", "unset", "c8y.url"])?; unset_c8y_url_cmd.assert().success(); get_c8y_url_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'c8y.url' is not set", )); Ok(()) } #[test] fn run_config_defaults() -> Result<(), Box<dyn std::error::Error>> { let temp_dir = tempfile::tempdir().unwrap(); let temp_dir_path = temp_dir.path(); let test_home_str = temp_dir_path.to_str().unwrap(); let cert_path = temp_path( &temp_dir, &join_paths(".tedge", "device-certs/tedge-certificate.pem"), ); let key_path = temp_path( &temp_dir, &join_paths(".tedge", "device-certs/tedge-private-key.pem"), ); let mut get_device_id_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "device.id"])?; get_device_id_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'device.id' is not set", )); let mut get_cert_path_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "device.cert.path"])?; get_cert_path_cmd .assert() .success() .stdout(predicate::str::contains(&cert_path)); let mut get_key_path_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "device.key.path"])?; get_key_path_cmd .assert() .success() .stdout(predicate::str::contains(&key_path)); let mut get_c8y_url_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "c8y.url"])?; get_c8y_url_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'c8y.url' is not set", )); let mut get_c8y_root_cert_path_cmd = tedge_command_with_test_home(test_home_str, &["config", "get", "c8y.root.cert.path"])?; get_c8y_root_cert_path_cmd .assert() .success() .stdout(predicate::str::contains( "The provided config key: 'c8y.root.cert.path' is not set", )); Ok(()) } #[test] fn run_config_list_default() { let temp_dir = tempfile::tempdir().unwrap(); let test_home_str = temp_dir.path().to_str().unwrap(); let mut list_cmd = tedge_command_with_test_home(test_home_str, &["config", "list"]).unwrap(); let assert = list_cmd.assert().success(); let output = assert.get_output().clone(); let output_str = String::from_utf8(output.stdout).unwrap(); let key_path = extract_config_value(&output_str, "device.key.path"); assert!(key_path.ends_with("tedge-private-key.pem")); assert!(key_path.contains(".tedge")); let cert_path = extract_config_value(&output_str, "device.cert.path"); assert!(cert_path.ends_with("tedge-certificate.pem")); assert!(cert_path.contains(".tedge")); } fn extract_config_value(output: &String, key: &str) -> String { output .lines() .map(|line| line.splitn(2, "=").collect::<Vec<_>>()) .find(|pair| pair[0] == key) .unwrap()[1] .into() } #[test] fn tedge_disconnect_c8y_no_bridge_config() { let temp_dir = tempfile::tempdir().unwrap(); let temp_dir_path = temp_dir.path(); let test_home_str = temp_dir_path.to_str().unwrap(); // If file doesn't exist exit code will be 0. tedge_command_with_test_home(test_home_str, &["disconnect", "c8y"]) .unwrap() .assert() .success(); } #[test] fn run_config_list_all() { let temp_dir = tempfile::tempdir().unwrap(); let test_home_str = temp_dir.path().to_str().unwrap(); let mut list_cmd = tedge_command_with_test_home(test_home_str, &["config", "list", "--all"]).unwrap(); let assert = list_cmd.assert().success(); let output = assert.get_output(); let output_str = String::from_utf8(output.clone().stdout).unwrap(); let key_path = extract_config_value(&output_str, "device.key.path"); assert!(key_path.ends_with("tedge-private-key.pem")); assert!(key_path.contains(".tedge")); let cert_path = extract_config_value(&output_str, "device.cert.path"); assert!(cert_path.ends_with("tedge-certificate.pem")); assert!(cert_path.contains(".tedge")); for key in get_tedge_config_keys() { assert_eq!(true, output_str.contains(key)); } } #[test] fn run_config_list_doc() { let temp_dir = tempfile::tempdir().unwrap(); let test_home_str = temp_dir.path().to_str().unwrap(); let mut list_cmd = tedge_command_with_test_home(test_home_str, &["config", "list", "--doc"]).unwrap(); let assert = list_cmd.assert().success(); let output = assert.get_output().clone(); let output_str = String::from_utf8(output.clone().stdout).unwrap(); for key in get_tedge_config_keys() { assert_eq!(true, output_str.contains(key)); } assert_eq!(true, output_str.contains("Example")); } fn tedge_command_with_test_home<I, S>( test_home: &str, args: I, ) -> Result<assert_cmd::Command, Box<dyn std::error::Error>> where I: IntoIterator<Item = S>, S: AsRef<std::ffi::OsStr>, { let mut command = tedge_command(args)?; command.env("HOME", test_home); Ok(command) } fn join_paths(prefix_path: &str, trailer_path: &str) -> String { Path::new(prefix_path) .join(trailer_path) .to_str() .unwrap() .into() } fn temp_path(dir: &tempfile::TempDir, filename: &str) -> String { String::from(dir.path().join(filename).to_str().unwrap()) } fn get_tedge_config_keys() -> Vec<&'static str> { let vec = vec![ "device.id", "device.key.path", "device.cert.path", "c8y.url", "c8y.root.cert.path", ]; return vec; } }
32.834625
99
0.565358
482751460e4c3202d21aa2ac14865c3ef5f31379
1,634
/// Enables contract runtime to panic with the given type. Any error type used in conjunction /// with `#[handle_result]` has to implement this trait. /// /// ``` /// use near_sdk::FunctionError; /// /// enum Error { /// NotFound, /// Unexpected { message: String }, /// } /// /// impl FunctionError for Error { /// fn panic(&self) -> ! { /// match self { /// Error::NotFound => /// near_sdk::env::panic_str("not found"), /// Error::Unexpected { message } => /// near_sdk::env::panic_str(&format!("unexpected error: {}", message)) /// } /// } /// } /// ``` pub trait FunctionError { fn panic(&self) -> !; } impl<T> FunctionError for T where T: AsRef<str>, { fn panic(&self) -> ! { crate::env::panic_str(self.as_ref()) } } /// A simple type used in conjunction with [FunctionError] representing that the function should /// abort without a custom message. /// /// ``` /// use near_sdk::borsh::{BorshDeserialize, BorshSerialize}; /// use near_sdk::{Abort, near_bindgen}; /// /// #[near_bindgen] /// #[derive(Default, BorshDeserialize, BorshSerialize)] /// pub struct Contract; /// /// #[near_bindgen] /// impl Contract { /// #[handle_result] /// pub fn foo(&self, text: &str) -> Result<String, Abort> { /// if text == "success" { /// Ok("success".to_string()) /// } else { /// Err(Abort) /// } /// } /// } /// ``` #[derive(Debug, Clone, PartialEq)] pub struct Abort; impl FunctionError for Abort { fn panic(&self) -> ! { crate::env::abort() } }
24.38806
96
0.547124
0830a2aa0f540073828c34c754951b4fa00f2462
60,120
// Copyright (c) 2016 The vulkano developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or // https://www.apache.org/licenses/LICENSE-2.0> or the MIT // license <LICENSE-MIT or https://opensource.org/licenses/MIT>, // at your option. All files in the project carrying such // notice may not be copied, modified, or distributed except // according to those terms. use crate::buffer::BufferAccess; use crate::check_errors; use crate::command_buffer::submit::SubmitAnyBuilder; use crate::command_buffer::submit::SubmitPresentBuilder; use crate::command_buffer::submit::SubmitPresentError; use crate::command_buffer::submit::SubmitSemaphoresWaitBuilder; use crate::device::Device; use crate::device::DeviceOwned; use crate::device::Queue; use crate::format::Format; use crate::image::swapchain::SwapchainImage; use crate::image::sys::UnsafeImage; use crate::image::ImageAccess; use crate::image::ImageCreateFlags; use crate::image::ImageDimensions; use crate::image::ImageInner; use crate::image::ImageLayout; use crate::image::ImageTiling; use crate::image::ImageType; use crate::image::ImageUsage; use crate::image::SampleCount; use crate::swapchain::CapabilitiesError; use crate::swapchain::ColorSpace; use crate::swapchain::CompositeAlpha; use crate::swapchain::PresentMode; use crate::swapchain::PresentRegion; use crate::swapchain::Surface; use crate::swapchain::SurfaceSwapchainLock; use crate::swapchain::SurfaceTransform; use crate::sync::semaphore::SemaphoreError; use crate::sync::AccessCheckError; use crate::sync::AccessError; use crate::sync::AccessFlags; use crate::sync::Fence; use crate::sync::FlushError; use crate::sync::GpuFuture; use crate::sync::PipelineStages; use crate::sync::Semaphore; use crate::sync::SharingMode; use crate::Error; use crate::OomError; use crate::Success; use crate::VulkanObject; use std::error; use std::fmt; use std::mem; use std::mem::MaybeUninit; use std::ptr; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; use std::sync::Arc; use std::sync::Mutex; use std::time::Duration; /// The way fullscreen exclusivity is handled. #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[repr(i32)] pub enum FullscreenExclusive { /// Indicates that the driver should determine the appropriate full-screen method /// by whatever means it deems appropriate. Default = ash::vk::FullScreenExclusiveEXT::DEFAULT.as_raw(), /// Indicates that the driver may use full-screen exclusive mechanisms when available. /// Such mechanisms may result in better performance and/or the availability of /// different presentation capabilities, but may require a more disruptive transition // during swapchain initialization, first presentation and/or destruction. Allowed = ash::vk::FullScreenExclusiveEXT::ALLOWED.as_raw(), /// Indicates that the driver should avoid using full-screen mechanisms which rely /// on disruptive transitions. Disallowed = ash::vk::FullScreenExclusiveEXT::DISALLOWED.as_raw(), /// Indicates the application will manage full-screen exclusive mode by using /// `Swapchain::acquire_fullscreen_exclusive()` and /// `Swapchain::release_fullscreen_exclusive()` functions. AppControlled = ash::vk::FullScreenExclusiveEXT::APPLICATION_CONTROLLED.as_raw(), } impl From<FullscreenExclusive> for ash::vk::FullScreenExclusiveEXT { #[inline] fn from(val: FullscreenExclusive) -> Self { Self::from_raw(val as i32) } } /// Tries to take ownership of an image in order to draw on it. /// /// The function returns the index of the image in the array of images that was returned /// when creating the swapchain, plus a future that represents the moment when the image will /// become available from the GPU (which may not be *immediately*). /// /// If you try to draw on an image without acquiring it first, the execution will block. (TODO /// behavior may change). /// /// The second field in the tuple in the Ok result is a bool represent if the acquisition was /// suboptimal. In this case the acquired image is still usable, but the swapchain should be /// recreated as the Surface's properties no longer match the swapchain. pub fn acquire_next_image<W>( swapchain: Arc<Swapchain<W>>, timeout: Option<Duration>, ) -> Result<(usize, bool, SwapchainAcquireFuture<W>), AcquireError> { let semaphore = Semaphore::from_pool(swapchain.device.clone())?; let fence = Fence::from_pool(swapchain.device.clone())?; let AcquiredImage { id, suboptimal } = { // Check that this is not an old swapchain. From specs: // > swapchain must not have been replaced by being passed as the // > VkSwapchainCreateInfoKHR::oldSwapchain value to vkCreateSwapchainKHR let stale = swapchain.stale.lock().unwrap(); if *stale { return Err(AcquireError::OutOfDate); } let acquire_result = unsafe { acquire_next_image_raw(&swapchain, timeout, Some(&semaphore), Some(&fence)) }; if let &Err(AcquireError::FullscreenExclusiveLost) = &acquire_result { swapchain .fullscreen_exclusive_held .store(false, Ordering::SeqCst); } acquire_result? }; Ok(( id, suboptimal, SwapchainAcquireFuture { swapchain, semaphore: Some(semaphore), fence: Some(fence), image_id: id, finished: AtomicBool::new(false), }, )) } /// Presents an image on the screen. /// /// The parameter is the same index as what `acquire_next_image` returned. The image must /// have been acquired first. /// /// The actual behavior depends on the present mode that you passed when creating the /// swapchain. pub fn present<F, W>( swapchain: Arc<Swapchain<W>>, before: F, queue: Arc<Queue>, index: usize, ) -> PresentFuture<F, W> where F: GpuFuture, { assert!(index < swapchain.images.len()); // TODO: restore this check with a dummy ImageAccess implementation /*let swapchain_image = me.images.lock().unwrap().get(index).unwrap().0.upgrade().unwrap(); // TODO: return error instead // Normally if `check_image_access` returns false we're supposed to call the `gpu_access` // function on the image instead. But since we know that this method on `SwapchainImage` // always returns false anyway (by design), we don't need to do it. assert!(before.check_image_access(&swapchain_image, ImageLayout::PresentSrc, true, &queue).is_ok()); // TODO: return error instead*/ PresentFuture { previous: before, queue, swapchain, image_id: index, present_region: None, flushed: AtomicBool::new(false), finished: AtomicBool::new(false), } } /// Same as `swapchain::present`, except it allows specifying a present region. /// Areas outside the present region may be ignored by Vulkan in order to optimize presentation. /// /// This is just an optimization hint, as the Vulkan driver is free to ignore the given present region. /// /// If `VK_KHR_incremental_present` is not enabled on the device, the parameter will be ignored. pub fn present_incremental<F, W>( swapchain: Arc<Swapchain<W>>, before: F, queue: Arc<Queue>, index: usize, present_region: PresentRegion, ) -> PresentFuture<F, W> where F: GpuFuture, { assert!(index < swapchain.images.len()); // TODO: restore this check with a dummy ImageAccess implementation /*let swapchain_image = me.images.lock().unwrap().get(index).unwrap().0.upgrade().unwrap(); // TODO: return error instead // Normally if `check_image_access` returns false we're supposed to call the `gpu_access` // function on the image instead. But since we know that this method on `SwapchainImage` // always returns false anyway (by design), we don't need to do it. assert!(before.check_image_access(&swapchain_image, ImageLayout::PresentSrc, true, &queue).is_ok()); // TODO: return error instead*/ PresentFuture { previous: before, queue, swapchain, image_id: index, present_region: Some(present_region), flushed: AtomicBool::new(false), finished: AtomicBool::new(false), } } /// Contains the swapping system and the images that can be shown on a surface. pub struct Swapchain<W> { // The Vulkan device this swapchain was created with. device: Arc<Device>, // The surface, which we need to keep alive. surface: Arc<Surface<W>>, // The swapchain object. swapchain: ash::vk::SwapchainKHR, // The images of this swapchain. images: Vec<ImageEntry>, // If true, that means we have tried to use this swapchain to recreate a new swapchain. The current // swapchain can no longer be used for anything except presenting already-acquired images. // // We use a `Mutex` instead of an `AtomicBool` because we want to keep that locked while // we acquire the image. stale: Mutex<bool>, // Parameters passed to the constructor. num_images: u32, format: Format, color_space: ColorSpace, dimensions: [u32; 2], layers: u32, usage: ImageUsage, sharing_mode: SharingMode, transform: SurfaceTransform, composite_alpha: CompositeAlpha, present_mode: PresentMode, fullscreen_exclusive: FullscreenExclusive, fullscreen_exclusive_held: AtomicBool, clipped: bool, } struct ImageEntry { // The actual image. image: UnsafeImage, // If true, then the image is still in the undefined layout and must be transitioned. undefined_layout: AtomicBool, } impl<W> Swapchain<W> { /// Starts the process of building a new swapchain, using default values for the parameters. #[inline] pub fn start(device: Arc<Device>, surface: Arc<Surface<W>>) -> SwapchainBuilder<W> { SwapchainBuilder { device, surface, num_images: 2, format: None, color_space: ColorSpace::SrgbNonLinear, dimensions: None, layers: 1, usage: ImageUsage::none(), sharing_mode: SharingMode::Exclusive, transform: Default::default(), composite_alpha: CompositeAlpha::Opaque, present_mode: PresentMode::Fifo, fullscreen_exclusive: FullscreenExclusive::Default, clipped: true, old_swapchain: None, } } /// Starts building a new swapchain from an existing swapchain. /// /// Use this when a swapchain has become invalidated, such as due to window resizes. /// The builder is pre-filled with the parameters of the old one, except for `dimensions`, /// which is set to `None`. #[inline] pub fn recreate(self: &Arc<Self>) -> SwapchainBuilder<W> { SwapchainBuilder { device: self.device().clone(), surface: self.surface().clone(), num_images: self.images.len() as u32, format: Some(self.format), color_space: self.color_space, dimensions: None, layers: self.layers, usage: self.usage, sharing_mode: self.sharing_mode.clone(), transform: self.transform, composite_alpha: self.composite_alpha, present_mode: self.present_mode, fullscreen_exclusive: self.fullscreen_exclusive, clipped: self.clipped, old_swapchain: Some(self.clone()), } } /// Returns the saved Surface, from the Swapchain creation. #[inline] pub fn surface(&self) -> &Arc<Surface<W>> { &self.surface } /// Returns of the images that belong to this swapchain. #[inline] pub fn raw_image(&self, offset: usize) -> Option<ImageInner> { self.images.get(offset).map(|i| ImageInner { image: &i.image, first_layer: 0, num_layers: self.layers as usize, first_mipmap_level: 0, num_mipmap_levels: 1, }) } /// Returns the number of images of the swapchain. #[inline] pub fn num_images(&self) -> u32 { self.images.len() as u32 } /// Returns the format of the images of the swapchain. #[inline] pub fn format(&self) -> Format { self.format } /// Returns the dimensions of the images of the swapchain. #[inline] pub fn dimensions(&self) -> [u32; 2] { self.dimensions } /// Returns the number of layers of the images of the swapchain. #[inline] pub fn layers(&self) -> u32 { self.layers } /// Returns the transform that was passed when creating the swapchain. #[inline] pub fn transform(&self) -> SurfaceTransform { self.transform } /// Returns the alpha mode that was passed when creating the swapchain. #[inline] pub fn composite_alpha(&self) -> CompositeAlpha { self.composite_alpha } /// Returns the present mode that was passed when creating the swapchain. #[inline] pub fn present_mode(&self) -> PresentMode { self.present_mode } /// Returns the value of `clipped` that was passed when creating the swapchain. #[inline] pub fn clipped(&self) -> bool { self.clipped } /// Returns the value of 'fullscreen_exclusive` that was passed when creating the swapchain. #[inline] pub fn fullscreen_exclusive(&self) -> FullscreenExclusive { self.fullscreen_exclusive } /// `FullscreenExclusive::AppControlled` must be the active fullscreen exclusivity mode. /// Acquire fullscreen exclusivity until either the `release_fullscreen_exclusive` is /// called, or if any of the the other `Swapchain` functions return `FullscreenExclusiveLost`. /// Requires: `FullscreenExclusive::AppControlled` pub fn acquire_fullscreen_exclusive(&self) -> Result<(), FullscreenExclusiveError> { if self.fullscreen_exclusive != FullscreenExclusive::AppControlled { return Err(FullscreenExclusiveError::NotAppControlled); } if self.fullscreen_exclusive_held.swap(true, Ordering::SeqCst) { return Err(FullscreenExclusiveError::DoubleAcquire); } unsafe { check_errors( self.device .fns() .ext_full_screen_exclusive .acquire_full_screen_exclusive_mode_ext( self.device.internal_object(), self.swapchain, ), )?; } Ok(()) } /// `FullscreenExclusive::AppControlled` must be the active fullscreen exclusivity mode. /// Release fullscreen exclusivity. pub fn release_fullscreen_exclusive(&self) -> Result<(), FullscreenExclusiveError> { if self.fullscreen_exclusive != FullscreenExclusive::AppControlled { return Err(FullscreenExclusiveError::NotAppControlled); } if !self.fullscreen_exclusive_held.swap(false, Ordering::SeqCst) { return Err(FullscreenExclusiveError::DoubleRelease); } unsafe { check_errors( self.device .fns() .ext_full_screen_exclusive .release_full_screen_exclusive_mode_ext( self.device.internal_object(), self.swapchain, ), )?; } Ok(()) } /// `FullscreenExclusive::AppControlled` is not the active fullscreen exclusivity mode, /// then this function will always return false. If true is returned the swapchain /// is in `FullscreenExclusive::AppControlled` fullscreen exclusivity mode and exclusivity /// is currently acquired. pub fn is_fullscreen_exclusive(&self) -> bool { if self.fullscreen_exclusive != FullscreenExclusive::AppControlled { false } else { self.fullscreen_exclusive_held.load(Ordering::SeqCst) } } // This method is necessary to allow `SwapchainImage`s to signal when they have been // transitioned out of their initial `undefined` image layout. // // See the `ImageAccess::layout_initialized` method documentation for more details. pub(crate) fn image_layout_initialized(&self, image_offset: usize) { let image_entry = self.images.get(image_offset); if let Some(ref image_entry) = image_entry { image_entry.undefined_layout.store(false, Ordering::SeqCst); } } pub(crate) fn is_image_layout_initialized(&self, image_offset: usize) -> bool { let image_entry = self.images.get(image_offset); if let Some(ref image_entry) = image_entry { !image_entry.undefined_layout.load(Ordering::SeqCst) } else { false } } } unsafe impl<W> VulkanObject for Swapchain<W> { type Object = ash::vk::SwapchainKHR; #[inline] fn internal_object(&self) -> ash::vk::SwapchainKHR { self.swapchain } } unsafe impl<W> DeviceOwned for Swapchain<W> { fn device(&self) -> &Arc<Device> { &self.device } } impl<W> fmt::Debug for Swapchain<W> { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!(fmt, "<Vulkan swapchain {:?}>", self.swapchain) } } impl<W> Drop for Swapchain<W> { #[inline] fn drop(&mut self) { unsafe { let fns = self.device.fns(); fns.khr_swapchain.destroy_swapchain_khr( self.device.internal_object(), self.swapchain, ptr::null(), ); self.surface.flag().store(false, Ordering::Release); } } } /// Builder for a [`Swapchain`]. #[derive(Debug)] pub struct SwapchainBuilder<W> { device: Arc<Device>, surface: Arc<Surface<W>>, old_swapchain: Option<Arc<Swapchain<W>>>, num_images: u32, format: Option<Format>, // None = use a default color_space: ColorSpace, dimensions: Option<[u32; 2]>, layers: u32, usage: ImageUsage, sharing_mode: SharingMode, transform: SurfaceTransform, composite_alpha: CompositeAlpha, present_mode: PresentMode, fullscreen_exclusive: FullscreenExclusive, clipped: bool, } impl<W> SwapchainBuilder<W> { /// Builds a new swapchain. Allocates images who content can be made visible on a surface. /// /// See also the `Surface::get_capabilities` function which returns the values that are /// supported by the implementation. All the parameters that you pass to the builder /// must be supported. /// /// This function returns the swapchain plus a list of the images that belong to the /// swapchain. The order in which the images are returned is important for the /// `acquire_next_image` and `present` functions. /// /// # Panic /// /// - Panics if the device and the surface don't belong to the same instance. /// - Panics if `usage` is empty. /// // TODO: isn't it unsafe to take the surface through an Arc when it comes to vulkano-win? pub fn build( self, ) -> Result<(Arc<Swapchain<W>>, Vec<Arc<SwapchainImage<W>>>), SwapchainCreationError> { let SwapchainBuilder { device, surface, old_swapchain, num_images, format, color_space, dimensions, layers, usage, sharing_mode, transform, composite_alpha, present_mode, fullscreen_exclusive, clipped, } = self; assert_eq!( device.instance().internal_object(), surface.instance().internal_object() ); // Checking that the requested parameters match the capabilities. let capabilities = surface.capabilities(device.physical_device())?; if num_images < capabilities.min_image_count { return Err(SwapchainCreationError::UnsupportedMinImagesCount); } if let Some(c) = capabilities.max_image_count { if num_images > c { return Err(SwapchainCreationError::UnsupportedMaxImagesCount); } } let format = { if let Some(format) = format { if !capabilities .supported_formats .iter() .any(|&(f, c)| f == format && c == color_space) { return Err(SwapchainCreationError::UnsupportedFormat); } format } else { if let Some(format) = [Format::R8G8B8A8_UNORM, Format::B8G8R8A8_UNORM] .iter() .copied() .find(|&format| { capabilities .supported_formats .iter() .any(|&(f, c)| f == format && c == color_space) }) { format } else { return Err(SwapchainCreationError::UnsupportedFormat); } } }; let dimensions = if let Some(dimensions) = dimensions { if dimensions[0] < capabilities.min_image_extent[0] { return Err(SwapchainCreationError::UnsupportedDimensions); } if dimensions[1] < capabilities.min_image_extent[1] { return Err(SwapchainCreationError::UnsupportedDimensions); } if dimensions[0] > capabilities.max_image_extent[0] { return Err(SwapchainCreationError::UnsupportedDimensions); } if dimensions[1] > capabilities.max_image_extent[1] { return Err(SwapchainCreationError::UnsupportedDimensions); } dimensions } else { capabilities.current_extent.unwrap() }; if layers < 1 || layers > capabilities.max_image_array_layers { return Err(SwapchainCreationError::UnsupportedArrayLayers); } if (ash::vk::ImageUsageFlags::from(usage) & ash::vk::ImageUsageFlags::from(capabilities.supported_usage_flags)) != ash::vk::ImageUsageFlags::from(usage) { return Err(SwapchainCreationError::UnsupportedUsageFlags); } if !capabilities.supported_transforms.supports(transform) { return Err(SwapchainCreationError::UnsupportedSurfaceTransform); } if !capabilities .supported_composite_alpha .supports(composite_alpha) { return Err(SwapchainCreationError::UnsupportedCompositeAlpha); } if !capabilities.present_modes.supports(present_mode) { return Err(SwapchainCreationError::UnsupportedPresentMode); } let flags = ImageCreateFlags::none(); // check that the physical device supports the swapchain image configuration if device .physical_device() .image_format_properties( format, ImageType::Dim2d, ImageTiling::Optimal, usage, flags, None, None, )? .is_none() { return Err(SwapchainCreationError::UnsupportedImageConfiguration); } // If we recreate a swapchain, make sure that the surface is the same. if let Some(ref sc) = old_swapchain { if surface.internal_object() != sc.surface.internal_object() { return Err(SwapchainCreationError::OldSwapchainSurfaceMismatch); } } else { // Checking that the surface doesn't already have a swapchain. let has_already = surface.flag().swap(true, Ordering::AcqRel); if has_already { return Err(SwapchainCreationError::SurfaceInUse); } } if !device.enabled_extensions().khr_swapchain { return Err(SwapchainCreationError::MissingExtensionKHRSwapchain); } let mut surface_full_screen_exclusive_info = None; // TODO: VK_EXT_FULL_SCREEN_EXCLUSIVE requires these extensions, so they should always // be enabled if it is. A separate check here is unnecessary; this should be checked at // device creation. if device.enabled_extensions().ext_full_screen_exclusive && surface .instance() .enabled_extensions() .khr_get_physical_device_properties2 && surface .instance() .enabled_extensions() .khr_get_surface_capabilities2 { surface_full_screen_exclusive_info = Some(ash::vk::SurfaceFullScreenExclusiveInfoEXT { full_screen_exclusive: fullscreen_exclusive.into(), ..Default::default() }); } let p_next = match surface_full_screen_exclusive_info.as_ref() { Some(some) => unsafe { mem::transmute(some as *const _) }, None => ptr::null(), }; // Required by the specs. assert_ne!(usage, ImageUsage::none()); if let Some(ref old_swapchain) = old_swapchain { let mut stale = old_swapchain.stale.lock().unwrap(); // The swapchain has already been used to create a new one. if *stale { return Err(SwapchainCreationError::OldSwapchainAlreadyUsed); } else { // According to the documentation of VkSwapchainCreateInfoKHR: // // > Upon calling vkCreateSwapchainKHR with a oldSwapchain that is not VK_NULL_HANDLE, // > any images not acquired by the application may be freed by the implementation, // > which may occur even if creation of the new swapchain fails. // // Therefore, we set stale to true and keep it to true even if the call to `vkCreateSwapchainKHR` below fails. *stale = true; } } let fns = device.fns(); let swapchain = unsafe { let (sh_mode, sh_count, sh_indices) = match sharing_mode { SharingMode::Exclusive => (ash::vk::SharingMode::EXCLUSIVE, 0, ptr::null()), SharingMode::Concurrent(ref ids) => ( ash::vk::SharingMode::CONCURRENT, ids.len() as u32, ids.as_ptr(), ), }; let infos = ash::vk::SwapchainCreateInfoKHR { p_next, flags: ash::vk::SwapchainCreateFlagsKHR::empty(), surface: surface.internal_object(), min_image_count: num_images, image_format: format.into(), image_color_space: color_space.into(), image_extent: ash::vk::Extent2D { width: dimensions[0], height: dimensions[1], }, image_array_layers: layers, image_usage: usage.into(), image_sharing_mode: sh_mode, queue_family_index_count: sh_count, p_queue_family_indices: sh_indices, pre_transform: transform.into(), composite_alpha: composite_alpha.into(), present_mode: present_mode.into(), clipped: if clipped { ash::vk::TRUE } else { ash::vk::FALSE }, old_swapchain: if let Some(ref old_swapchain) = old_swapchain { old_swapchain.swapchain } else { ash::vk::SwapchainKHR::null() }, ..Default::default() }; let mut output = MaybeUninit::uninit(); check_errors(fns.khr_swapchain.create_swapchain_khr( device.internal_object(), &infos, ptr::null(), output.as_mut_ptr(), ))?; output.assume_init() }; let image_handles = unsafe { let mut num = 0; check_errors(fns.khr_swapchain.get_swapchain_images_khr( device.internal_object(), swapchain, &mut num, ptr::null_mut(), ))?; let mut images = Vec::with_capacity(num as usize); check_errors(fns.khr_swapchain.get_swapchain_images_khr( device.internal_object(), swapchain, &mut num, images.as_mut_ptr(), ))?; images.set_len(num as usize); images }; let images = image_handles .into_iter() .map(|image| unsafe { let dims = ImageDimensions::Dim2d { width: dimensions[0], height: dimensions[1], array_layers: layers, }; let img = UnsafeImage::from_raw( device.clone(), image, usage, format, flags, dims, SampleCount::Sample1, 1, ); ImageEntry { image: img, undefined_layout: AtomicBool::new(true), } }) .collect::<Vec<_>>(); let fullscreen_exclusive_held = old_swapchain .as_ref() .map(|old_swapchain| { if old_swapchain.fullscreen_exclusive != FullscreenExclusive::AppControlled { false } else { old_swapchain .fullscreen_exclusive_held .load(Ordering::SeqCst) } }) .unwrap_or(false); let swapchain = Arc::new(Swapchain { device: device.clone(), surface: surface.clone(), swapchain, images, stale: Mutex::new(false), num_images, format, color_space, dimensions, layers, usage: usage.clone(), sharing_mode, transform, composite_alpha, present_mode, fullscreen_exclusive, fullscreen_exclusive_held: AtomicBool::new(fullscreen_exclusive_held), clipped, }); let swapchain_images = unsafe { let mut swapchain_images = Vec::with_capacity(swapchain.images.len()); for n in 0..swapchain.images.len() { swapchain_images.push(SwapchainImage::from_raw(swapchain.clone(), n)?); } swapchain_images }; Ok((swapchain, swapchain_images)) } /// Sets the number of images that will be created. /// /// The default is 2. #[inline] pub fn num_images(mut self, num_images: u32) -> Self { self.num_images = num_images; self } /// Sets the pixel format that will be used for the images. /// /// The default is either `R8G8B8A8Unorm` or `B8G8R8A8Unorm`, whichever is supported. #[inline] pub fn format(mut self, format: Format) -> Self { self.format = Some(format); self } /// Sets the color space that will be used for the images. /// /// The default is `SrgbNonLinear`. #[inline] pub fn color_space(mut self, color_space: ColorSpace) -> Self { self.color_space = color_space; self } /// Sets the dimensions of the images. /// /// The default is `None`, which means the value of /// [`Capabilities::current_extent`](crate::swapchain::Capabilities::current_extent) will be /// used. Setting this will override it with a custom `Some` value. #[inline] pub fn dimensions(mut self, dimensions: [u32; 2]) -> Self { self.dimensions = Some(dimensions); self } /// Sets the number of layers for each image. /// /// The default is 1. #[inline] pub fn layers(mut self, layers: u32) -> Self { self.layers = layers; self } /// Sets how the images will be used. /// /// The default is `ImageUsage::none()`. #[inline] pub fn usage(mut self, usage: ImageUsage) -> Self { self.usage = usage; self } /// Sets the sharing mode of the images. /// /// The default is `Exclusive`. #[inline] pub fn sharing_mode<S>(mut self, sharing_mode: S) -> Self where S: Into<SharingMode>, { self.sharing_mode = sharing_mode.into(); self } /// Sets the transform that is to be applied to the surface. /// /// The default is `Identity`. #[inline] pub fn transform(mut self, transform: SurfaceTransform) -> Self { self.transform = transform; self } /// Sets how alpha values of the pixels in the image are to be treated. /// /// The default is `Opaque`. #[inline] pub fn composite_alpha(mut self, composite_alpha: CompositeAlpha) -> Self { self.composite_alpha = composite_alpha; self } /// Sets the present mode for the swapchain. /// /// The default is `Fifo`. #[inline] pub fn present_mode(mut self, present_mode: PresentMode) -> Self { self.present_mode = present_mode; self } /// Sets how fullscreen exclusivity is to be handled. /// /// The default is `Default`. #[inline] pub fn fullscreen_exclusive(mut self, fullscreen_exclusive: FullscreenExclusive) -> Self { self.fullscreen_exclusive = fullscreen_exclusive; self } /// Sets whether the implementation is allowed to discard rendering operations that affect /// regions of the surface which aren't visible. This is important to take into account if /// your fragment shader has side-effects or if you want to read back the content of the image /// afterwards. /// /// The default is `true`. #[inline] pub fn clipped(mut self, clipped: bool) -> Self { self.clipped = clipped; self } } /// Error that can happen when creation a swapchain. #[derive(Clone, Debug, PartialEq, Eq)] pub enum SwapchainCreationError { /// Not enough memory. OomError(OomError), /// The device was lost. DeviceLost, /// The surface was lost. SurfaceLost, /// The surface is already used by another swapchain. SurfaceInUse, /// The window is already in use by another API. NativeWindowInUse, /// The `VK_KHR_swapchain` extension was not enabled. MissingExtensionKHRSwapchain, /// The `VK_EXT_full_screen_exclusive` extension was not enabled. MissingExtensionExtFullScreenExclusive, /// Surface mismatch between old and new swapchain. OldSwapchainSurfaceMismatch, /// The old swapchain has already been used to recreate another one. OldSwapchainAlreadyUsed, /// The requested number of swapchain images is not supported by the surface. UnsupportedMinImagesCount, /// The requested number of swapchain images is not supported by the surface. UnsupportedMaxImagesCount, /// The requested image format is not supported by the surface. UnsupportedFormat, /// The requested dimensions are not supported by the surface. UnsupportedDimensions, /// The requested array layers count is not supported by the surface. UnsupportedArrayLayers, /// The requested image usage is not supported by the surface. UnsupportedUsageFlags, /// The requested surface transform is not supported by the surface. UnsupportedSurfaceTransform, /// The requested composite alpha is not supported by the surface. UnsupportedCompositeAlpha, /// The requested present mode is not supported by the surface. UnsupportedPresentMode, /// The image configuration is not supported by the physical device. UnsupportedImageConfiguration, } impl error::Error for SwapchainCreationError { #[inline] fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { SwapchainCreationError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for SwapchainCreationError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( fmt, "{}", match *self { SwapchainCreationError::OomError(_) => "not enough memory available", SwapchainCreationError::DeviceLost => "the device was lost", SwapchainCreationError::SurfaceLost => "the surface was lost", SwapchainCreationError::SurfaceInUse => { "the surface is already used by another swapchain" } SwapchainCreationError::NativeWindowInUse => { "the window is already in use by another API" } SwapchainCreationError::MissingExtensionKHRSwapchain => { "the `VK_KHR_swapchain` extension was not enabled" } SwapchainCreationError::MissingExtensionExtFullScreenExclusive => { "the `VK_EXT_full_screen_exclusive` extension was not enabled" } SwapchainCreationError::OldSwapchainSurfaceMismatch => { "surface mismatch between old and new swapchain" } SwapchainCreationError::OldSwapchainAlreadyUsed => { "old swapchain has already been used to recreate a new one" } SwapchainCreationError::UnsupportedMinImagesCount => { "the requested number of swapchain images is not supported by the surface" } SwapchainCreationError::UnsupportedMaxImagesCount => { "the requested number of swapchain images is not supported by the surface" } SwapchainCreationError::UnsupportedFormat => { "the requested image format is not supported by the surface" } SwapchainCreationError::UnsupportedDimensions => { "the requested dimensions are not supported by the surface" } SwapchainCreationError::UnsupportedArrayLayers => { "the requested array layers count is not supported by the surface" } SwapchainCreationError::UnsupportedUsageFlags => { "the requested image usage is not supported by the surface" } SwapchainCreationError::UnsupportedSurfaceTransform => { "the requested surface transform is not supported by the surface" } SwapchainCreationError::UnsupportedCompositeAlpha => { "the requested composite alpha is not supported by the surface" } SwapchainCreationError::UnsupportedPresentMode => { "the requested present mode is not supported by the surface" } SwapchainCreationError::UnsupportedImageConfiguration => { "the requested image configuration is not supported by the physical device" } } ) } } impl From<Error> for SwapchainCreationError { #[inline] fn from(err: Error) -> SwapchainCreationError { match err { err @ Error::OutOfHostMemory => SwapchainCreationError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => SwapchainCreationError::OomError(OomError::from(err)), Error::DeviceLost => SwapchainCreationError::DeviceLost, Error::SurfaceLost => SwapchainCreationError::SurfaceLost, Error::NativeWindowInUse => SwapchainCreationError::NativeWindowInUse, _ => panic!("unexpected error: {:?}", err), } } } impl From<OomError> for SwapchainCreationError { #[inline] fn from(err: OomError) -> SwapchainCreationError { SwapchainCreationError::OomError(err) } } impl From<CapabilitiesError> for SwapchainCreationError { #[inline] fn from(err: CapabilitiesError) -> SwapchainCreationError { match err { CapabilitiesError::OomError(err) => SwapchainCreationError::OomError(err), CapabilitiesError::SurfaceLost => SwapchainCreationError::SurfaceLost, } } } /// Represents the moment when the GPU will have access to a swapchain image. #[must_use] pub struct SwapchainAcquireFuture<W> { swapchain: Arc<Swapchain<W>>, image_id: usize, // Semaphore that is signalled when the acquire is complete. Empty if the acquire has already // happened. semaphore: Option<Semaphore>, // Fence that is signalled when the acquire is complete. Empty if the acquire has already // happened. fence: Option<Fence>, finished: AtomicBool, } impl<W> SwapchainAcquireFuture<W> { /// Returns the index of the image in the list of images returned when creating the swapchain. #[inline] pub fn image_id(&self) -> usize { self.image_id } /// Returns the corresponding swapchain. #[inline] pub fn swapchain(&self) -> &Arc<Swapchain<W>> { &self.swapchain } } unsafe impl<W> GpuFuture for SwapchainAcquireFuture<W> { #[inline] fn cleanup_finished(&mut self) {} #[inline] unsafe fn build_submission(&self) -> Result<SubmitAnyBuilder, FlushError> { if let Some(ref semaphore) = self.semaphore { let mut sem = SubmitSemaphoresWaitBuilder::new(); sem.add_wait_semaphore(&semaphore); Ok(SubmitAnyBuilder::SemaphoresWait(sem)) } else { Ok(SubmitAnyBuilder::Empty) } } #[inline] fn flush(&self) -> Result<(), FlushError> { Ok(()) } #[inline] unsafe fn signal_finished(&self) { self.finished.store(true, Ordering::SeqCst); } #[inline] fn queue_change_allowed(&self) -> bool { true } #[inline] fn queue(&self) -> Option<Arc<Queue>> { None } #[inline] fn check_buffer_access( &self, _: &dyn BufferAccess, _: bool, _: &Queue, ) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> { Err(AccessCheckError::Unknown) } #[inline] fn check_image_access( &self, image: &dyn ImageAccess, layout: ImageLayout, _: bool, _: &Queue, ) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> { let swapchain_image = self.swapchain.raw_image(self.image_id).unwrap(); if swapchain_image.image.internal_object() != image.inner().image.internal_object() { return Err(AccessCheckError::Unknown); } if self.swapchain.images[self.image_id] .undefined_layout .load(Ordering::Relaxed) && layout != ImageLayout::Undefined { return Err(AccessCheckError::Denied(AccessError::ImageNotInitialized { requested: layout, })); } if layout != ImageLayout::Undefined && layout != ImageLayout::PresentSrc { return Err(AccessCheckError::Denied( AccessError::UnexpectedImageLayout { allowed: ImageLayout::PresentSrc, requested: layout, }, )); } Ok(None) } } unsafe impl<W> DeviceOwned for SwapchainAcquireFuture<W> { #[inline] fn device(&self) -> &Arc<Device> { &self.swapchain.device } } impl<W> Drop for SwapchainAcquireFuture<W> { fn drop(&mut self) { if let Some(ref fence) = self.fence { fence.wait(None).unwrap(); // TODO: handle error? self.semaphore = None; } // TODO: if this future is destroyed without being presented, then eventually acquiring // a new image will block forever ; difficulty: hard } } /// Error that can happen when calling `Swapchain::acquire_fullscreen_exclusive` or `Swapchain::release_fullscreen_exclusive` #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[repr(u32)] pub enum FullscreenExclusiveError { /// Not enough memory. OomError(OomError), /// Operation could not be completed for driver specific reasons. InitializationFailed, /// The surface is no longer accessible and must be recreated. SurfaceLost, /// Fullscreen exclusivity is already acquired. DoubleAcquire, /// Fullscreen exclusivity is not current acquired. DoubleRelease, /// Swapchain is not in fullscreen exclusive app controlled mode NotAppControlled, } impl From<Error> for FullscreenExclusiveError { #[inline] fn from(err: Error) -> FullscreenExclusiveError { match err { err @ Error::OutOfHostMemory => FullscreenExclusiveError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => { FullscreenExclusiveError::OomError(OomError::from(err)) } Error::SurfaceLost => FullscreenExclusiveError::SurfaceLost, Error::InitializationFailed => FullscreenExclusiveError::InitializationFailed, _ => panic!("unexpected error: {:?}", err), } } } impl From<OomError> for FullscreenExclusiveError { #[inline] fn from(err: OomError) -> FullscreenExclusiveError { FullscreenExclusiveError::OomError(err) } } impl error::Error for FullscreenExclusiveError { #[inline] fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { FullscreenExclusiveError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for FullscreenExclusiveError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( fmt, "{}", match *self { FullscreenExclusiveError::OomError(_) => "not enough memory", FullscreenExclusiveError::SurfaceLost => { "the surface of this swapchain is no longer valid" } FullscreenExclusiveError::InitializationFailed => { "operation could not be completed for driver specific reasons" } FullscreenExclusiveError::DoubleAcquire => "fullscreen exclusivity is already acquired", FullscreenExclusiveError::DoubleRelease => "fullscreen exclusivity is not acquired", FullscreenExclusiveError::NotAppControlled => { "swapchain is not in fullscreen exclusive app controlled mode" } } ) } } /// Error that can happen when calling `acquire_next_image`. #[derive(Copy, Clone, Debug, PartialEq, Eq)] #[repr(u32)] pub enum AcquireError { /// Not enough memory. OomError(OomError), /// The connection to the device has been lost. DeviceLost, /// The timeout of the function has been reached before an image was available. Timeout, /// The surface is no longer accessible and must be recreated. SurfaceLost, /// The swapchain has lost or doesn't have fullscreen exclusivity possibly for /// implementation-specific reasons outside of the application’s control. FullscreenExclusiveLost, /// The surface has changed in a way that makes the swapchain unusable. You must query the /// surface's new properties and recreate a new swapchain if you want to continue drawing. OutOfDate, /// Error during semaphore creation SemaphoreError(SemaphoreError), } impl error::Error for AcquireError { #[inline] fn source(&self) -> Option<&(dyn error::Error + 'static)> { match *self { AcquireError::OomError(ref err) => Some(err), _ => None, } } } impl fmt::Display for AcquireError { #[inline] fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> { write!( fmt, "{}", match *self { AcquireError::OomError(_) => "not enough memory", AcquireError::DeviceLost => "the connection to the device has been lost", AcquireError::Timeout => "no image is available for acquiring yet", AcquireError::SurfaceLost => "the surface of this swapchain is no longer valid", AcquireError::OutOfDate => "the swapchain needs to be recreated", AcquireError::FullscreenExclusiveLost => { "the swapchain no longer has fullscreen exclusivity" } AcquireError::SemaphoreError(_) => "error creating semaphore", } ) } } impl From<SemaphoreError> for AcquireError { fn from(err: SemaphoreError) -> Self { AcquireError::SemaphoreError(err) } } impl From<OomError> for AcquireError { #[inline] fn from(err: OomError) -> AcquireError { AcquireError::OomError(err) } } impl From<Error> for AcquireError { #[inline] fn from(err: Error) -> AcquireError { match err { err @ Error::OutOfHostMemory => AcquireError::OomError(OomError::from(err)), err @ Error::OutOfDeviceMemory => AcquireError::OomError(OomError::from(err)), Error::DeviceLost => AcquireError::DeviceLost, Error::SurfaceLost => AcquireError::SurfaceLost, Error::OutOfDate => AcquireError::OutOfDate, Error::FullscreenExclusiveLost => AcquireError::FullscreenExclusiveLost, _ => panic!("unexpected error: {:?}", err), } } } /// Represents a swapchain image being presented on the screen. #[must_use = "Dropping this object will immediately block the thread until the GPU has finished processing the submission"] pub struct PresentFuture<P, W> where P: GpuFuture, { previous: P, queue: Arc<Queue>, swapchain: Arc<Swapchain<W>>, image_id: usize, present_region: Option<PresentRegion>, // True if `flush()` has been called on the future, which means that the present command has // been submitted. flushed: AtomicBool, // True if `signal_finished()` has been called on the future, which means that the future has // been submitted and has already been processed by the GPU. finished: AtomicBool, } impl<P, W> PresentFuture<P, W> where P: GpuFuture, { /// Returns the index of the image in the list of images returned when creating the swapchain. #[inline] pub fn image_id(&self) -> usize { self.image_id } /// Returns the corresponding swapchain. #[inline] pub fn swapchain(&self) -> &Arc<Swapchain<W>> { &self.swapchain } } unsafe impl<P, W> GpuFuture for PresentFuture<P, W> where P: GpuFuture, { #[inline] fn cleanup_finished(&mut self) { self.previous.cleanup_finished(); } #[inline] unsafe fn build_submission(&self) -> Result<SubmitAnyBuilder, FlushError> { if self.flushed.load(Ordering::SeqCst) { return Ok(SubmitAnyBuilder::Empty); } let queue = self.previous.queue().map(|q| q.clone()); // TODO: if the swapchain image layout is not PRESENT, should add a transition command // buffer Ok(match self.previous.build_submission()? { SubmitAnyBuilder::Empty => { let mut builder = SubmitPresentBuilder::new(); builder.add_swapchain( &self.swapchain, self.image_id as u32, self.present_region.as_ref(), ); SubmitAnyBuilder::QueuePresent(builder) } SubmitAnyBuilder::SemaphoresWait(sem) => { let mut builder: SubmitPresentBuilder = sem.into(); builder.add_swapchain( &self.swapchain, self.image_id as u32, self.present_region.as_ref(), ); SubmitAnyBuilder::QueuePresent(builder) } SubmitAnyBuilder::CommandBuffer(cb) => { // submit the command buffer by flushing previous. // Since the implementation should remember being flushed it's safe to call build_submission multiple times self.previous.flush()?; let mut builder = SubmitPresentBuilder::new(); builder.add_swapchain( &self.swapchain, self.image_id as u32, self.present_region.as_ref(), ); SubmitAnyBuilder::QueuePresent(builder) } SubmitAnyBuilder::BindSparse(cb) => { // submit the command buffer by flushing previous. // Since the implementation should remember being flushed it's safe to call build_submission multiple times self.previous.flush()?; let mut builder = SubmitPresentBuilder::new(); builder.add_swapchain( &self.swapchain, self.image_id as u32, self.present_region.as_ref(), ); SubmitAnyBuilder::QueuePresent(builder) } SubmitAnyBuilder::QueuePresent(present) => { unimplemented!() // TODO: /*present.submit(); let mut builder = SubmitPresentBuilder::new(); builder.add_swapchain(self.command_buffer.inner(), self.image_id); SubmitAnyBuilder::CommandBuffer(builder)*/ } }) } #[inline] fn flush(&self) -> Result<(), FlushError> { unsafe { // If `flushed` already contains `true`, then `build_submission` will return `Empty`. let build_submission_result = self.build_submission(); if let &Err(FlushError::FullscreenExclusiveLost) = &build_submission_result { self.swapchain .fullscreen_exclusive_held .store(false, Ordering::SeqCst); } match build_submission_result? { SubmitAnyBuilder::Empty => {} SubmitAnyBuilder::QueuePresent(present) => { let present_result = present.submit(&self.queue); if let &Err(SubmitPresentError::FullscreenExclusiveLost) = &present_result { self.swapchain .fullscreen_exclusive_held .store(false, Ordering::SeqCst); } present_result?; } _ => unreachable!(), } self.flushed.store(true, Ordering::SeqCst); Ok(()) } } #[inline] unsafe fn signal_finished(&self) { self.flushed.store(true, Ordering::SeqCst); self.finished.store(true, Ordering::SeqCst); self.previous.signal_finished(); } #[inline] fn queue_change_allowed(&self) -> bool { false } #[inline] fn queue(&self) -> Option<Arc<Queue>> { debug_assert!(match self.previous.queue() { None => true, Some(q) => q.is_same(&self.queue), }); Some(self.queue.clone()) } #[inline] fn check_buffer_access( &self, buffer: &dyn BufferAccess, exclusive: bool, queue: &Queue, ) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> { self.previous.check_buffer_access(buffer, exclusive, queue) } #[inline] fn check_image_access( &self, image: &dyn ImageAccess, layout: ImageLayout, exclusive: bool, queue: &Queue, ) -> Result<Option<(PipelineStages, AccessFlags)>, AccessCheckError> { let swapchain_image = self.swapchain.raw_image(self.image_id).unwrap(); if swapchain_image.image.internal_object() == image.inner().image.internal_object() { // This future presents the swapchain image, which "unlocks" it. Therefore any attempt // to use this swapchain image afterwards shouldn't get granted automatic access. // Instead any attempt to access the image afterwards should get an authorization from // a later swapchain acquire future. Hence why we return `Unknown` here. Err(AccessCheckError::Unknown) } else { self.previous .check_image_access(image, layout, exclusive, queue) } } } unsafe impl<P, W> DeviceOwned for PresentFuture<P, W> where P: GpuFuture, { #[inline] fn device(&self) -> &Arc<Device> { self.queue.device() } } impl<P, W> Drop for PresentFuture<P, W> where P: GpuFuture, { fn drop(&mut self) { unsafe { if !*self.finished.get_mut() { match self.flush() { Ok(()) => { // Block until the queue finished. self.queue().unwrap().wait().unwrap(); self.previous.signal_finished(); } Err(_) => { // In case of error we simply do nothing, as there's nothing to do // anyway. } } } } } } pub struct AcquiredImage { pub id: usize, pub suboptimal: bool, } /// Unsafe variant of `acquire_next_image`. /// /// # Safety /// /// - The semaphore and/or the fence must be kept alive until it is signaled. /// - The swapchain must not have been replaced by being passed as the old swapchain when creating /// a new one. pub unsafe fn acquire_next_image_raw<W>( swapchain: &Swapchain<W>, timeout: Option<Duration>, semaphore: Option<&Semaphore>, fence: Option<&Fence>, ) -> Result<AcquiredImage, AcquireError> { let fns = swapchain.device.fns(); let timeout_ns = if let Some(timeout) = timeout { timeout .as_secs() .saturating_mul(1_000_000_000) .saturating_add(timeout.subsec_nanos() as u64) } else { u64::MAX }; let mut out = MaybeUninit::uninit(); let r = check_errors( fns.khr_swapchain.acquire_next_image_khr( swapchain.device.internal_object(), swapchain.swapchain, timeout_ns, semaphore .map(|s| s.internal_object()) .unwrap_or(ash::vk::Semaphore::null()), fence .map(|f| f.internal_object()) .unwrap_or(ash::vk::Fence::null()), out.as_mut_ptr(), ), )?; let out = out.assume_init(); let (id, suboptimal) = match r { Success::Success => (out as usize, false), Success::Suboptimal => (out as usize, true), Success::NotReady => return Err(AcquireError::Timeout), Success::Timeout => return Err(AcquireError::Timeout), s => panic!("unexpected success value: {:?}", s), }; Ok(AcquiredImage { id, suboptimal }) }
34.811812
144
0.597156
d636935c5926297e6610ad78806cf68ba588fec0
1,416
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ use common::SourceLocationKey; use fixture_tests::Fixture; use graphql_ir::{build, Program}; use graphql_syntax::parse_executable; use graphql_text_printer::{print_fragment, print_operation}; use relay_test_schema::get_test_schema_with_extensions; use relay_transforms::flatten; use std::sync::Arc; pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> { let source_location = SourceLocationKey::standalone(fixture.file_name); let ast = parse_executable(fixture.content, source_location).unwrap(); let schema = get_test_schema_with_extensions( r#" directive @serverInlineDirective on INLINE_FRAGMENT"#, ); let ir = build(&schema, &ast.definitions).unwrap(); let mut context = Program::from_definitions(Arc::clone(&schema), ir); flatten(&mut context, !fixture.content.contains("%for_printing%")).unwrap(); let mut printed_queries = context .operations() .map(|def| print_operation(&schema, def)) .collect::<Vec<_>>(); let mut printed = context .fragments() .map(|def| print_fragment(&schema, def)) .collect::<Vec<_>>(); printed.append(&mut printed_queries); printed.sort(); Ok(printed.join("\n\n")) }
34.536585
80
0.701977
e801438caf4b2ab0a10ec8080f30eec4cefd6695
792
use crate::{shell::shell_operation::ShellOperation, utils::intersperse}; pub struct CombinedOperation { operations: Vec<Box<dyn ShellOperation>>, } impl CombinedOperation { pub fn new(operations: Vec<Box<dyn ShellOperation>>) -> Self { CombinedOperation { operations, } } } impl ShellOperation for CombinedOperation { fn bash_eval(&self) -> String { let evals = self.operations.iter().map(|e| e.bash_eval()); intersperse(evals, '\n') } fn fish_eval(&self) -> String { let evals = self.operations.iter().map(|e| e.fish_eval()); intersperse(evals, '\n') } fn zsh_eval(&self) -> String { let evals = self.operations.iter().map(|e| e.zsh_eval()); intersperse(evals, '\n') } }
23.294118
72
0.606061
64694d4fd458022e6276b155c5ff6af1a20a98db
1,517
//! Module that contains the selected version of Mutex/RwLock. #[cfg(feature = "parking_lot")] pub use parking_lot::{Mutex, RwLock}; #[cfg(not(feature = "parking_lot"))] pub use self::wrapper::{Mutex, RwLock}; #[cfg(not(feature = "parking_lot"))] #[allow(dead_code)] mod wrapper { use std::sync::{MutexGuard, RwLockReadGuard, RwLockWriteGuard}; #[derive(Debug, Default)] pub struct Mutex<T>(std::sync::Mutex<T>); impl<T> Mutex<T> { #[inline] pub fn new(val: T) -> Self { Mutex(std::sync::Mutex::new(val)) } #[inline] pub fn lock(&self) -> MutexGuard<T> { self.0.lock().unwrap() } #[inline] pub fn try_lock(&self) -> Option<MutexGuard<T>> { self.0.try_lock().ok() } } #[derive(Debug, Default)] pub struct RwLock<T>(std::sync::RwLock<T>); impl<T> RwLock<T> { #[inline] pub fn new(val: T) -> Self { RwLock(std::sync::RwLock::new(val)) } #[inline] pub fn read(&self) -> RwLockReadGuard<T> { self.0.read().unwrap() } #[inline] pub fn write(&self) -> RwLockWriteGuard<T> { self.0.write().unwrap() } #[inline] pub fn try_read(&self) -> Option<RwLockReadGuard<T>> { self.0.try_read().ok() } #[inline] pub fn try_write(&self) -> Option<RwLockWriteGuard<T>> { self.0.try_write().ok() } } }
23.703125
67
0.516809
4a4da80ceac0339451d19d117c3871036e331046
3,971
use super::{interpret::eval_expression, interpreter_env::*, types::*}; use crate::{ ast::expr::{FunctionValue, GetAccessor}, env::*, token::*, }; use std::{cell::RefMut, rc::Rc}; // A shorthand way to extract identifier's name pub fn assume_identifier(t: &Token) -> &str { match &t.token_type { TokenType::Identifier(i) => i, TokenType::Super => "super", TokenType::This => "this", _ => unreachable!("Couldn't extract identifier."), } } pub fn guard_function( ibv: StmtResult<InterpreterValue>, ) -> Result<InterpreterValue, RuntimeError> { match ibv { StmtResult::Break(token) => Err(RuntimeError { message: "Cannot use `break` outside of a loop".into(), token, }), StmtResult::Continue(token) => Err(RuntimeError { message: "Cannot use `continue` outside of a loop".into(), token, }), StmtResult::Return { value, .. } => Ok(value), StmtResult::Noop => Ok(InterpreterValue::Nil), } } #[inline(always)] pub fn confirm_arity( target: usize, value: usize, blame: &Token, ) -> Result<(), RuntimeError> { if target != value { Err(RuntimeError { message: format!( "{} arguments", if value > target { "Too many" } else { "Not enough" } ), token: blame.clone(), }) } else { Ok(()) } } #[inline(always)] pub fn map_arguments( parameters: &[Token], arguments: &[InterpreterValue], fun_env: &InterpreterEnvironment, ) { parameters.iter().zip(arguments).for_each(|(param, arg)| { let name = assume_identifier(param); fun_env.declare( name.to_string(), DeclaredValue { mutable: true, value: arg.clone(), }, ); }) } #[inline(always)] pub fn construct_lox_defined_function( fv: &FunctionValue, env: &InterpreterEnvironment, ) -> InterpreterValue { InterpreterValue::Function { enclosing_env: env.clone(), fun: Rc::new(InterpreterFunction::LoxDefined(FunctionValue { body: fv.body.as_ref().map(|b| Rc::clone(b)), keyword: fv.keyword.clone(), name: fv.name.clone(), params: fv.params.as_ref().map(|p| Rc::clone(p)), })), } } pub fn bind_function( fun: &InterpreterValue, instance: InterpreterValue, ) -> InterpreterValue { let (fun, new_env) = if let InterpreterValue::Function { fun, enclosing_env } = fun { (fun.clone(), enclosing_env.fork()) } else { unreachable!("CHuju kurwa panie") }; new_env.declare( "this".into(), DeclaredValue { mutable: false, value: instance, }, ); InterpreterValue::Function { fun, enclosing_env: new_env, } } #[inline(always)] pub fn unwrap_list<'a>( value: &'a InterpreterValue, blame: &Token, arg_index: usize, override_msg: Option<String>, ) -> Result<RefMut<'a, Vec<InterpreterValue>>, RuntimeError> { if let InterpreterValue::List(l) = &value { Ok(l.borrow_mut()) } else { Err(RuntimeError { message: override_msg.unwrap_or_else(|| { format!("Argument {} must be of type list", arg_index) }), token: blame.clone(), }) } } pub fn extract_subscription_index( accessor: &GetAccessor, blame: &Token, max_len: usize, env: &InterpreterEnvironment, ) -> Result<usize, RuntimeError> { let extracted_n = match &accessor { GetAccessor::SubscriptionNumber(n) => Ok(*n), GetAccessor::SubscriptionEval(expr) => { let eval = eval_expression(expr, env)?; if let InterpreterValue::Number(n) = eval { Ok(n) } else { Err(RuntimeError { message: format!( "Cannot use {} for indexing", eval.human_type() ), token: blame.clone(), }) } } _ => unreachable!("Wrong accessor in subscription"), }?; if extracted_n.fract() != 0.0 || extracted_n < 0.0 { return Err(RuntimeError { message: format!("Cannot access element on index {}", extracted_n), token: blame.clone(), }); } let index = extracted_n as usize; if index >= max_len { Err(RuntimeError { message: format!("Index {} out of bounds", extracted_n), token: blame.clone(), }) } else { Ok(extracted_n as usize) } }
21.581522
70
0.645933
f78225adef3bd86928c83e30fe35b3b101ba5d88
2,111
use std::collections::{VecDeque}; use ::channel::{SinglePoll}; use ::proxy::{self, Proxy, Control, Eid}; use ::proxy_handle::{self, ProxyWrapper, Handle, UserProxy, UserHandle}; pub use proxy_handle::{Tx, Rx}; pub struct DummyProxy {} impl DummyProxy { fn new() -> Self { Self {} } } impl Proxy for DummyProxy { fn attach(&mut self, _ctrl: &Control) -> ::Result<()> { Ok(()) } fn detach(&mut self, _ctrl: &Control) -> ::Result<()> { Ok(()) } fn process(&mut self, _ctrl: &mut Control, _readiness: mio::Ready, _eid: Eid) -> ::Result<()> { Ok(()) } } impl UserProxy<Tx, Rx> for DummyProxy { fn process_channel(&mut self, _ctrl: &mut Control, _msg: Tx) -> ::Result<()> { Ok(()) } } pub struct DummyHandle { pub msgs: VecDeque<Rx>, } impl DummyHandle { fn new() -> Self { Self { msgs: VecDeque::new(), } } } impl UserHandle<Tx, Rx> for DummyHandle { fn process_channel(&mut self, msg: Rx) -> ::Result<()> { self.msgs.push_back(msg); Ok(()) } } pub fn create() -> ::Result<(ProxyWrapper<DummyProxy, Tx, Rx>, Handle<DummyHandle, Tx, Rx>)> { proxy_handle::create(DummyProxy::new(), DummyHandle::new()) } pub fn wait_msgs(h: &mut Handle<DummyHandle, Tx, Rx>, sp: &mut SinglePoll, n: usize) -> ::Result<()> { let ns = h.user.msgs.len(); loop { if let Err(e) = sp.wait(None) { break Err(::Error::Channel(e.into())); } if let Err(e) = h.process() { break Err(e); } if h.user.msgs.len() - ns >= n { break Ok(()); } } } pub fn wait_close(h: &mut Handle<DummyHandle, Tx, Rx>, sp: &mut SinglePoll) -> ::Result<()> { loop { if let Err(e) = sp.wait(None) { break Err(::Error::Channel(e.into())); } match h.process() { Ok(()) => continue, Err(err) => match err { ::Error::Proxy(proxy::Error::Closed) => break Ok(()), other => break Err(other), } } } }
23.988636
102
0.516817
9137eed439e3420146dce767ed02a51d31c90b9c
1,506
// Copyright (c) 2019, Facebook, Inc. // All rights reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the "hack" directory of this source tree. use std::convert::From; use oxidized::full_fidelity_parser_env::FullFidelityParserEnv; #[derive(Clone, Debug, Default)] pub struct ParserEnv { pub codegen: bool, pub hhvm_compat_mode: bool, pub php5_compat_mode: bool, pub allow_new_attribute_syntax: bool, pub enable_xhp_class_modifier: bool, pub disable_xhp_element_mangling: bool, pub disable_xhp_children_declarations: bool, pub disable_modes: bool, pub disallow_hash_comments: bool, pub disallow_fun_and_cls_meth_pseudo_funcs: bool, } impl From<FullFidelityParserEnv> for ParserEnv { fn from(env: FullFidelityParserEnv) -> Self { Self { hhvm_compat_mode: env.hhvm_compat_mode, php5_compat_mode: env.php5_compat_mode, codegen: env.codegen, allow_new_attribute_syntax: env.allow_new_attribute_syntax, enable_xhp_class_modifier: env.enable_xhp_class_modifier, disable_xhp_element_mangling: env.disable_xhp_element_mangling, disable_xhp_children_declarations: env.disable_xhp_children_declarations, disable_modes: env.disable_modes, disallow_hash_comments: env.disallow_hash_comments, disallow_fun_and_cls_meth_pseudo_funcs: env.disallow_fun_and_cls_meth_pseudo_funcs, } } }
36.731707
95
0.733068
26694d2e9b12df851562b425077ea84eaa57df00
3,068
extern crate pom; use pom::char_class::hex_digit; use pom::parser::*; use std::char::{decode_utf16, REPLACEMENT_CHARACTER}; use std::collections::HashMap; use std::str::{self, FromStr}; #[derive(Debug, PartialEq)] pub enum JsonValue { Null, Bool(bool), Str(String), Num(f64), Array(Vec<JsonValue>), Object(HashMap<String, JsonValue>), } fn space<'a>() -> Parser<'a, u8, ()> { one_of(b" \t\r\n").repeat(0..).discard() } fn number<'a>() -> Parser<'a, u8, f64> { let integer = one_of(b"123456789") - one_of(b"0123456789").repeat(0..) | sym(b'0'); let frac = sym(b'.') + one_of(b"0123456789").repeat(1..); let exp = one_of(b"eE") + one_of(b"+-").opt() + one_of(b"0123456789").repeat(1..); let number = sym(b'-').opt() + integer + frac.opt() + exp.opt(); number.collect().convert(str::from_utf8).convert(f64::from_str) } fn string<'a>() -> Parser<'a, u8, String> { let special_char = sym(b'\\') | sym(b'/') | sym(b'"') | sym(b'b').map(|_|b'\x08') | sym(b'f').map(|_|b'\x0C') | sym(b'n').map(|_|b'\n') | sym(b'r').map(|_|b'\r') | sym(b't').map(|_|b'\t'); let escape_sequence = sym(b'\\') * special_char; let char_string = (none_of(b"\\\"") | escape_sequence).repeat(1..).convert(String::from_utf8); let utf16_char = seq(b"\\u") * is_a(hex_digit).repeat(4).convert(String::from_utf8).convert(|digits|u16::from_str_radix(&digits, 16)); let utf16_string = utf16_char.repeat(1..).map(|chars|decode_utf16(chars).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect::<String>()); let string = sym(b'"') * (char_string | utf16_string).repeat(0..) - sym(b'"'); string.map(|strings| strings.concat()) } fn array<'a>() -> Parser<'a, u8, Vec<JsonValue>> { let elems = list(call(value), sym(b',') * space()); sym(b'[') * space() * elems - sym(b']') } fn object<'a>() -> Parser<'a, u8, HashMap<String, JsonValue>> { let member = string() - space() - sym(b':') - space() + call(value); let members = list(member, sym(b',') * space()); let obj = sym(b'{') * space() * members - sym(b'}'); obj.map(|members| members.into_iter().collect::<HashMap<_, _>>()) } fn value<'a>() -> Parser<'a, u8, JsonValue> { ( seq(b"null").map(|_|JsonValue::Null) | seq(b"true").map(|_|JsonValue::Bool(true)) | seq(b"false").map(|_|JsonValue::Bool(false)) | number().map(|num|JsonValue::Num(num)) | string().map(|text|JsonValue::Str(text)) | array().map(|arr|JsonValue::Array(arr)) | object().map(|obj|JsonValue::Object(obj)) ) - space() } pub fn json<'a>() -> Parser<'a, u8, JsonValue> { space() * value() - end() } #[allow(dead_code)] fn main() { let input = br#" { "Image": { "Width": 800, "Height": 600, "Title": "View from 15th Floor", "Thumbnail": { "Url": "http://www.example.com/image/481989943", "Height": 125, "Width": 100 }, "Animated" : false, "IDs": [116, 943, 234, 38793] }, "escaped characters": "\u2192\uD83D\uDE00\"\t\uD834\uDD1E" }"#; println!("{:?}", json().parse(input)); }
33.714286
139
0.578879
3368bd4dc35e2752cd736ae19cc83b1a80a077a0
102
pub trait IIntersect<Other, Result> { fn intersect(&mut self, other: &Other) -> Option<Result>; }
25.5
61
0.676471
e2f8f7c8ebf0a6348b4fc267cefa6bb00e59ce81
922
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. type compare<T> = |Box<T>, Box<T>|: 'static -> bool; fn test_generic<T:Clone>(expected: Box<T>, eq: compare<T>) { let actual: Box<T> = match true { true => { expected.clone() }, _ => fail!("wat") }; assert!((eq(expected, actual))); } fn test_box() { fn compare_box(b1: Box<bool>, b2: Box<bool>) -> bool { return *b1 == *b2; } test_generic::<bool>(box true, compare_box); } pub fn main() { test_box(); }
30.733333
68
0.645336
56351f0b681bb626271ab046bfa14b872f142d8a
2,125
// Copyright (c) 2020 Ant Financial // // SPDX-License-Identifier: Apache-2.0 // use std::fs::File; use std::io::{Read, Write}; use ttrpc_codegen::Codegen; use ttrpc_codegen::Customize; fn main() { let protos = vec![ "protocols/protos/github.com/kata-containers/agent/pkg/types/types.proto", "protocols/protos/agent.proto", "protocols/protos/health.proto", "protocols/protos/google/protobuf/empty.proto", "protocols/protos/oci.proto", ]; // Tell Cargo that if the .proto files changed, to rerun this build script. protos .iter() .for_each(|p| println!("cargo:rerun-if-changed={}", &p)); Codegen::new() .out_dir("protocols/sync") .inputs(&protos) .include("protocols/protos") .rust_protobuf() .customize(Customize { ..Default::default() }) .run() .expect("Gen sync code failed."); Codegen::new() .out_dir("protocols/asynchronous") .inputs(&protos) .include("protocols/protos") .rust_protobuf() .customize(Customize { async_all: true, ..Default::default() }) .run() .expect("Gen async code failed."); // There is a message named 'Box' in oci.proto // so there is a struct named 'Box', we should replace Box<Self> to ::std::boxed::Box<Self> // to avoid the conflict. replace_text_in_file( "protocols/sync/oci.rs", "self: Box<Self>", "self: ::std::boxed::Box<Self>", ) .unwrap(); replace_text_in_file( "protocols/asynchronous/oci.rs", "self: Box<Self>", "self: ::std::boxed::Box<Self>", ) .unwrap(); } fn replace_text_in_file(file_name: &str, from: &str, to: &str) -> Result<(), std::io::Error> { let mut src = File::open(file_name)?; let mut contents = String::new(); src.read_to_string(&mut contents).unwrap(); drop(src); let new_contents = contents.replace(from, to); let mut dst = File::create(&file_name)?; dst.write(new_contents.as_bytes())?; Ok(()) }
26.898734
95
0.581176
1d12a567d9668de9a9c06a7bebc241c7ef48ac0c
4,628
use super::OUI; use core::convert::TryInto; use core::str::FromStr; #[repr(transparent)] #[derive(Default, PartialEq, Eq, Hash, Copy, Clone)] pub struct EtherAddr(pub [u8; EtherAddr::SIZE]); impl EtherAddr { pub const SIZE: usize = 6; pub const NULL: EtherAddr = EtherAddr([0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); pub const BROADCAST: EtherAddr = EtherAddr([0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); pub const QUALCOMM_LOCALCAST: EtherAddr = EtherAddr([0x00, 0xb0, 0x52, 0x00, 0x00, 0x01]); pub const IEEE1905_MULTICAST: EtherAddr = EtherAddr([0x01, 0x80, 0xc2, 0x00, 0x00, 0x13]); pub fn from_slice(slice: &[u8]) -> EtherAddr { EtherAddr(slice.try_into().unwrap()) } pub fn as_bytes(&self) -> [u8; EtherAddr::SIZE] { self.0 } pub fn oui(&self) -> OUI { OUI([self[0], self[1], self[2]]) } pub fn padded(&self) -> [u8; 8] { [self[0], self[1], self[2], self[3], self[4], self[5], 0, 0] } pub fn is_unicast(&self) -> bool { !self.is_multicast() } pub fn is_multicast(&self) -> bool { self[0] & 1 == 1 } pub fn is_broadcast(&self) -> bool { *self == Self::BROADCAST } } impl core::ops::Deref for EtherAddr { type Target = [u8; 6]; fn deref(&self) -> &Self::Target { &self.0 } } impl core::fmt::Debug for EtherAddr { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { write!( f, "{:02x}:{:02x}:{:02x}:{:02x}:{:02x}:{:02x}", self[0], self[1], self[2], self[3], self[4], self[5] ) } } impl FromStr for EtherAddr { type Err = (); fn from_str(input: &str) -> Result<Self, Self::Err> { let input = input.trim(); let mut addr = [0u8; EtherAddr::SIZE]; let mut i = 0; for part in input.split(|c| c == ':' || c == '-') { if i >= addr.len() { return Err(()); } let part = part.trim(); if part.len() != 2 { return Err(()); } let value = u8::from_str_radix(part, 16).map_err(|_| ())?; addr[i] = value; i += 1; } if i != EtherAddr::SIZE { return Err(()); } Ok(EtherAddr(addr)) } } #[cfg(test)] mod tests { use super::*; #[test] fn properties() { assert_eq!(EtherAddr::BROADCAST.is_unicast(), false); assert_eq!(EtherAddr::BROADCAST.is_multicast(), true); assert_eq!(EtherAddr::BROADCAST.is_broadcast(), true); assert_eq!(EtherAddr::BROADCAST.oui(), OUI([0xff, 0xff, 0xff])); assert_eq!(EtherAddr::QUALCOMM_LOCALCAST.is_unicast(), true); assert_eq!(EtherAddr::QUALCOMM_LOCALCAST.is_multicast(), false); assert_eq!(EtherAddr::QUALCOMM_LOCALCAST.is_broadcast(), false); assert_eq!(EtherAddr::QUALCOMM_LOCALCAST.oui(), OUI::QUALCOMM); assert_eq!(EtherAddr::IEEE1905_MULTICAST.is_unicast(), false); assert_eq!(EtherAddr::IEEE1905_MULTICAST.is_multicast(), true); assert_eq!(EtherAddr::IEEE1905_MULTICAST.is_broadcast(), false); assert_eq!(EtherAddr::IEEE1905_MULTICAST.oui(), OUI([0x01, 0x80, 0xc2])); } #[test] fn from_str() { // Test valid inputs for test in &[ "01:23:45:67:89:ab", "01-23-45-67-89-ab", "01-23-45-67-89-AB", "01-23-45-67-89-Ab", "\t01-23-45-67-89-ab ", " 01 : 23 : 45 : 67 : 89 : ab ", ] { let model = Ok(EtherAddr([0x01, 0x23, 0x45, 0x67, 0x89, 0xab])); assert_eq!(model, EtherAddr::from_str(test), "\"{}\"", test); } // Test invalid inputs // Various invalid addresses for test in &[ "", "0", "01", "01-", "01-2", "01-23", "01-23-", "01-23-4", "01-23-45", "01-23-45-", "01-23-45-6", "01-23-45-67", "01-23-45-67-", "01-23-45-67-8", "01-23-45-67-89", "01-23-45-67-89-", "01-23-45-67-89-a", "xx-xx-xx-xx-xx-xx", "01-23-45-67-89-xx", "01-23-45-67-89-ab-", "01-23-45-67-89-ab-cd", "-01-23-45-67-89-ab", "01--23-45-67-89-ab", "0 1-23-45-67-89-ab", "1-2-3-4-5-6", "-----", ":::::", ] { assert_eq!(Err(()), EtherAddr::from_str(test), "\"{}\"", test); } } }
30.447368
94
0.491789
39b8d15ab6f09b6c40296a39676927e9118ed423
1,494
#![no_std] #![no_main] #![feature(type_alias_impl_trait)] use defmt::{info, unwrap}; use defmt_rtt as _; use embassy::executor::Spawner; #[cfg(feature = "_nrf")] use embassy_nrf::Peripherals; #[cfg(feature = "_stm32")] use embassy_stm32::Peripherals; use panic_probe as _; mod boards; mod network; #[cfg(feature = "microbit-v2")] use crate::boards::microbit_v2 as bsp; #[cfg(feature = "nrf52840-dk")] use crate::boards::nrf52840_dk as bsp; #[cfg(feature = "nrf9160-dk-s")] use crate::boards::nrf9160_dk_s as bsp; #[cfg(feature = "stm32f767zi")] use crate::boards::stm32f767zi as bsp; #[cfg(feature = "stm32h743zi")] use crate::boards::stm32h743zi as bsp; #[embassy::main] async fn main(spawner: Spawner, p: Peripherals) { info!("Network starting"); // The general idea is to initialise the board // specific peripherals that we will be using. // This often ends up being an assignment to // a tuple of peripherals. let network_peripherals = bsp::init(p); // We generally create a task per component // that ends up owning a number of peripherals. // There are a number of tasks like this and // we use either signals or channels to // communicate with them. unwrap!(spawner.spawn(network::main_task(network_peripherals,))); // We end up here normally with a loop and something // "main-like" that executes for your application, // often with the ability to communicate to the other // tasks via signals and channels etc. }
29.88
69
0.700134
d6e84940291a575627377d7e88106d43afcb55b3
32,468
use std::fmt::{self, Display}; use crate::borrow_check::nll::region_infer::{ RegionInferenceContext, error_reporting::ErrorReportingCtx, }; use crate::borrow_check::nll::universal_regions::DefiningTy; use crate::borrow_check::nll::ToRegionVid; use crate::borrow_check::Upvar; use rustc::hir; use rustc::hir::def::{Res, DefKind}; use rustc::hir::def_id::DefId; use rustc::infer::InferCtxt; use rustc::mir::Body; use rustc::ty::subst::{SubstsRef, GenericArgKind}; use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt}; use rustc::ty::print::RegionHighlightMode; use rustc_errors::DiagnosticBuilder; use syntax::symbol::kw; use rustc_data_structures::fx::FxHashMap; use syntax_pos::{Span, symbol::Symbol}; /// A name for a particular region used in emitting diagnostics. This name could be a generated /// name like `'1`, a name used by the user like `'a`, or a name like `'static`. #[derive(Debug, Clone)] crate struct RegionName { /// The name of the region (interned). crate name: Symbol, /// Where the region comes from. crate source: RegionNameSource, } /// Denotes the source of a region that is named by a `RegionName`. For example, a free region that /// was named by the user would get `NamedFreeRegion` and `'static` lifetime would get `Static`. /// This helps to print the right kinds of diagnostics. #[derive(Debug, Clone)] crate enum RegionNameSource { /// A bound (not free) region that was substituted at the def site (not an HRTB). NamedEarlyBoundRegion(Span), /// A free region that the user has a name (`'a`) for. NamedFreeRegion(Span), /// The `'static` region. Static, /// The free region corresponding to the environment of a closure. SynthesizedFreeEnvRegion(Span, String), /// The region name corresponds to a region where the type annotation is completely missing /// from the code, e.g. in a closure arguments `|x| { ... }`, where `x` is a reference. CannotMatchHirTy(Span, String), /// The region name corresponds a reference that was found by traversing the type in the HIR. MatchedHirTy(Span), /// A region name from the generics list of a struct/enum/union. MatchedAdtAndSegment(Span), /// The region corresponding to a closure upvar. AnonRegionFromUpvar(Span, String), /// The region corresponding to the return type of a closure. AnonRegionFromOutput(Span, String, String), AnonRegionFromYieldTy(Span, String), } /// Records region names that have been assigned before so that we can use the same ones in later /// diagnostics. #[derive(Debug, Clone)] crate struct RegionErrorNamingCtx { /// Record the region names generated for each region in the given /// MIR def so that we can reuse them later in help/error messages. renctx: FxHashMap<RegionVid, RegionName>, /// The counter for generating new region names. counter: usize, } impl RegionErrorNamingCtx { crate fn new() -> Self { Self { counter: 1, renctx: FxHashMap::default(), } } crate fn get(&self, region: &RegionVid) -> Option<&RegionName> { self.renctx.get(region) } crate fn insert(&mut self, region: RegionVid, name: RegionName) { self.renctx.insert(region, name); } } impl RegionName { #[allow(dead_code)] crate fn was_named(&self) -> bool { match self.source { RegionNameSource::NamedEarlyBoundRegion(..) | RegionNameSource::NamedFreeRegion(..) | RegionNameSource::Static => true, RegionNameSource::SynthesizedFreeEnvRegion(..) | RegionNameSource::CannotMatchHirTy(..) | RegionNameSource::MatchedHirTy(..) | RegionNameSource::MatchedAdtAndSegment(..) | RegionNameSource::AnonRegionFromUpvar(..) | RegionNameSource::AnonRegionFromOutput(..) | RegionNameSource::AnonRegionFromYieldTy(..) => false, } } #[allow(dead_code)] crate fn was_synthesized(&self) -> bool { !self.was_named() } #[allow(dead_code)] crate fn name(&self) -> Symbol { self.name } crate fn highlight_region_name(&self, diag: &mut DiagnosticBuilder<'_>) { match &self.source { RegionNameSource::NamedFreeRegion(span) | RegionNameSource::NamedEarlyBoundRegion(span) => { diag.span_label(*span, format!("lifetime `{}` defined here", self)); } RegionNameSource::SynthesizedFreeEnvRegion(span, note) => { diag.span_label( *span, format!("lifetime `{}` represents this closure's body", self), ); diag.note(&note); } RegionNameSource::CannotMatchHirTy(span, type_name) => { diag.span_label(*span, format!("has type `{}`", type_name)); } RegionNameSource::MatchedHirTy(span) => { diag.span_label( *span, format!("let's call the lifetime of this reference `{}`", self), ); } RegionNameSource::MatchedAdtAndSegment(span) => { diag.span_label(*span, format!("let's call this `{}`", self)); } RegionNameSource::AnonRegionFromUpvar(span, upvar_name) => { diag.span_label( *span, format!( "lifetime `{}` appears in the type of `{}`", self, upvar_name ), ); } RegionNameSource::AnonRegionFromOutput(span, mir_description, type_name) => { diag.span_label( *span, format!("return type{} is {}", mir_description, type_name), ); }, RegionNameSource::AnonRegionFromYieldTy(span, type_name) => { diag.span_label( *span, format!("yield type is {}", type_name), ); } RegionNameSource::Static => {}, } } } impl Display for RegionName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.name) } } impl<'tcx> RegionInferenceContext<'tcx> { /// Maps from an internal MIR region vid to something that we can /// report to the user. In some cases, the region vids will map /// directly to lifetimes that the user has a name for (e.g., /// `'static`). But frequently they will not, in which case we /// have to find some way to identify the lifetime to the user. To /// that end, this function takes a "diagnostic" so that it can /// create auxiliary notes as needed. /// /// Example (function arguments): /// /// Suppose we are trying to give a name to the lifetime of the /// reference `x`: /// /// ``` /// fn foo(x: &u32) { .. } /// ``` /// /// This function would create a label like this: /// /// ``` /// | fn foo(x: &u32) { .. } /// ------- fully elaborated type of `x` is `&'1 u32` /// ``` /// /// and then return the name `'1` for us to use. crate fn give_region_a_name( &self, errctx: &ErrorReportingCtx<'_, '_, 'tcx>, renctx: &mut RegionErrorNamingCtx, fr: RegionVid, ) -> Option<RegionName> { let ErrorReportingCtx { infcx, body, mir_def_id, upvars, .. } = errctx; debug!("give_region_a_name(fr={:?}, counter={:?})", fr, renctx.counter); assert!(self.universal_regions.is_universal_region(fr)); if let Some(value) = renctx.get(&fr) { return Some(value.clone()); } let value = self .give_name_from_error_region(infcx.tcx, *mir_def_id, fr, renctx) .or_else(|| { self.give_name_if_anonymous_region_appears_in_arguments( infcx, body, *mir_def_id, fr, renctx, ) }) .or_else(|| { self.give_name_if_anonymous_region_appears_in_upvars( infcx.tcx, upvars, fr, renctx ) }) .or_else(|| { self.give_name_if_anonymous_region_appears_in_output( infcx, body, *mir_def_id, fr, renctx, ) }) .or_else(|| { self.give_name_if_anonymous_region_appears_in_yield_ty( infcx, body, *mir_def_id, fr, renctx, ) }); if let Some(ref value) = value { renctx.insert(fr, value.clone()); } debug!("give_region_a_name: gave name {:?}", value); value } /// Checks for the case where `fr` maps to something that the /// *user* has a name for. In that case, we'll be able to map /// `fr` to a `Region<'tcx>`, and that region will be one of /// named variants. fn give_name_from_error_region( &self, tcx: TyCtxt<'tcx>, mir_def_id: DefId, fr: RegionVid, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let error_region = self.to_error_region(fr)?; debug!("give_region_a_name: error_region = {:?}", error_region); match error_region { ty::ReEarlyBound(ebr) => { if ebr.has_name() { let span = self.get_named_span(tcx, error_region, ebr.name); Some(RegionName { name: ebr.name, source: RegionNameSource::NamedEarlyBoundRegion(span), }) } else { None } } ty::ReStatic => Some(RegionName { name: kw::StaticLifetime, source: RegionNameSource::Static }), ty::ReFree(free_region) => match free_region.bound_region { ty::BoundRegion::BrNamed(_, name) => { let span = self.get_named_span(tcx, error_region, name); Some(RegionName { name, source: RegionNameSource::NamedFreeRegion(span), }) } ty::BoundRegion::BrEnv => { let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir"); let def_ty = self.universal_regions.defining_ty; if let DefiningTy::Closure(def_id, substs) = def_ty { let args_span = if let hir::ExprKind::Closure(_, _, _, span, _) = tcx.hir().expect_expr(mir_hir_id).kind { span } else { bug!("Closure is not defined by a closure expr"); }; let region_name = self.synthesize_region_name(renctx); let closure_kind_ty = substs.as_closure().kind_ty(def_id, tcx); let note = match closure_kind_ty.to_opt_closure_kind() { Some(ty::ClosureKind::Fn) => { "closure implements `Fn`, so references to captured variables \ can't escape the closure" } Some(ty::ClosureKind::FnMut) => { "closure implements `FnMut`, so references to captured variables \ can't escape the closure" } Some(ty::ClosureKind::FnOnce) => { bug!("BrEnv in a `FnOnce` closure"); } None => bug!("Closure kind not inferred in borrow check"), }; Some(RegionName { name: region_name, source: RegionNameSource::SynthesizedFreeEnvRegion( args_span, note.to_string(), ), }) } else { // Can't have BrEnv in functions, constants or generators. bug!("BrEnv outside of closure."); } } ty::BoundRegion::BrAnon(_) => None, }, ty::ReLateBound(..) | ty::ReScope(..) | ty::ReVar(..) | ty::RePlaceholder(..) | ty::ReEmpty | ty::ReErased | ty::ReClosureBound(..) => None, } } /// Gets a span of a named region to provide context for error messages that /// mention that span, for example: /// /// ``` /// | /// | fn two_regions<'a, 'b, T>(cell: Cell<&'a ()>, t: T) /// | -- -- lifetime `'b` defined here /// | | /// | lifetime `'a` defined here /// | /// | with_signature(cell, t, |cell, t| require(cell, t)); /// | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'b` must /// | outlive `'a` /// ``` fn get_named_span( &self, tcx: TyCtxt<'tcx>, error_region: &RegionKind, name: Symbol, ) -> Span { let scope = error_region.free_region_binding_scope(tcx); let node = tcx.hir().as_local_hir_id(scope).unwrap_or(hir::DUMMY_HIR_ID); let span = tcx.sess.source_map().def_span(tcx.hir().span(node)); if let Some(param) = tcx.hir() .get_generics(scope) .and_then(|generics| generics.get_named(name)) { param.span } else { span } } /// Finds an argument that contains `fr` and label it with a fully /// elaborated type, returning something like `'1`. Result looks /// like: /// /// ``` /// | fn foo(x: &u32) { .. } /// ------- fully elaborated type of `x` is `&'1 u32` /// ``` fn give_name_if_anonymous_region_appears_in_arguments( &self, infcx: &InferCtxt<'_, 'tcx>, body: &Body<'tcx>, mir_def_id: DefId, fr: RegionVid, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let implicit_inputs = self.universal_regions.defining_ty.implicit_inputs(); let argument_index = self.get_argument_index_for_region(infcx.tcx, fr)?; let arg_ty = self.universal_regions.unnormalized_input_tys[implicit_inputs + argument_index]; if let Some(region_name) = self.give_name_if_we_can_match_hir_ty_from_argument( infcx, mir_def_id, fr, arg_ty, argument_index, renctx, ) { return Some(region_name); } self.give_name_if_we_cannot_match_hir_ty(infcx, body, fr, arg_ty, renctx) } fn give_name_if_we_can_match_hir_ty_from_argument( &self, infcx: &InferCtxt<'_, 'tcx>, mir_def_id: DefId, needle_fr: RegionVid, argument_ty: Ty<'tcx>, argument_index: usize, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let mir_hir_id = infcx.tcx.hir().as_local_hir_id(mir_def_id)?; let fn_decl = infcx.tcx.hir().fn_decl_by_hir_id(mir_hir_id)?; let argument_hir_ty: &hir::Ty = fn_decl.inputs.get(argument_index)?; match argument_hir_ty.kind { // This indicates a variable with no type annotation, like // `|x|`... in that case, we can't highlight the type but // must highlight the variable. // NOTE(eddyb) this is handled in/by the sole caller // (`give_name_if_anonymous_region_appears_in_arguments`). hir::TyKind::Infer => None, _ => self.give_name_if_we_can_match_hir_ty( infcx.tcx, needle_fr, argument_ty, argument_hir_ty, renctx, ), } } /// Attempts to highlight the specific part of a type in an argument /// that has no type annotation. /// For example, we might produce an annotation like this: /// /// ``` /// | foo(|a, b| b) /// | - - /// | | | /// | | has type `&'1 u32` /// | has type `&'2 u32` /// ``` fn give_name_if_we_cannot_match_hir_ty( &self, infcx: &InferCtxt<'_, 'tcx>, body: &Body<'tcx>, needle_fr: RegionVid, argument_ty: Ty<'tcx>, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let counter = renctx.counter; let mut highlight = RegionHighlightMode::default(); highlight.highlighting_region_vid(needle_fr, counter); let type_name = infcx.extract_type_name(&argument_ty, Some(highlight)).0; debug!( "give_name_if_we_cannot_match_hir_ty: type_name={:?} needle_fr={:?}", type_name, needle_fr ); let assigned_region_name = if type_name.find(&format!("'{}", counter)).is_some() { // Only add a label if we can confirm that a region was labelled. let argument_index = self.get_argument_index_for_region(infcx.tcx, needle_fr)?; let (_, span) = self.get_argument_name_and_span_for_region(body, argument_index); Some(RegionName { // This counter value will already have been used, so this function will increment // it so the next value will be used next and return the region name that would // have been used. name: self.synthesize_region_name(renctx), source: RegionNameSource::CannotMatchHirTy(span, type_name), }) } else { None }; assigned_region_name } /// Attempts to highlight the specific part of a type annotation /// that contains the anonymous reference we want to give a name /// to. For example, we might produce an annotation like this: /// /// ``` /// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item = &T>> { /// | - let's call the lifetime of this reference `'1` /// ``` /// /// the way this works is that we match up `argument_ty`, which is /// a `Ty<'tcx>` (the internal form of the type) with /// `argument_hir_ty`, a `hir::Ty` (the syntax of the type /// annotation). We are descending through the types stepwise, /// looking in to find the region `needle_fr` in the internal /// type. Once we find that, we can use the span of the `hir::Ty` /// to add the highlight. /// /// This is a somewhat imperfect process, so along the way we also /// keep track of the **closest** type we've found. If we fail to /// find the exact `&` or `'_` to highlight, then we may fall back /// to highlighting that closest type instead. fn give_name_if_we_can_match_hir_ty( &self, tcx: TyCtxt<'tcx>, needle_fr: RegionVid, argument_ty: Ty<'tcx>, argument_hir_ty: &hir::Ty, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let search_stack: &mut Vec<(Ty<'tcx>, &hir::Ty)> = &mut vec![(argument_ty, argument_hir_ty)]; while let Some((ty, hir_ty)) = search_stack.pop() { match (&ty.kind, &hir_ty.kind) { // Check if the `argument_ty` is `&'X ..` where `'X` // is the region we are looking for -- if so, and we have a `&T` // on the RHS, then we want to highlight the `&` like so: // // & // - let's call the lifetime of this reference `'1` ( ty::Ref(region, referent_ty, _), hir::TyKind::Rptr(_lifetime, referent_hir_ty), ) => { if region.to_region_vid() == needle_fr { let region_name = self.synthesize_region_name(renctx); // Just grab the first character, the `&`. let source_map = tcx.sess.source_map(); let ampersand_span = source_map.start_point(hir_ty.span); return Some(RegionName { name: region_name, source: RegionNameSource::MatchedHirTy(ampersand_span), }); } // Otherwise, let's descend into the referent types. search_stack.push((referent_ty, &referent_hir_ty.ty)); } // Match up something like `Foo<'1>` ( ty::Adt(_adt_def, substs), hir::TyKind::Path(hir::QPath::Resolved(None, path)), ) => { match path.res { // Type parameters of the type alias have no reason to // be the same as those of the ADT. // FIXME: We should be able to do something similar to // match_adt_and_segment in this case. Res::Def(DefKind::TyAlias, _) => (), _ => if let Some(last_segment) = path.segments.last() { if let Some(name) = self.match_adt_and_segment( substs, needle_fr, last_segment, renctx, search_stack, ) { return Some(name); } } } } // The following cases don't have lifetimes, so we // just worry about trying to match up the rustc type // with the HIR types: (ty::Tuple(elem_tys), hir::TyKind::Tup(elem_hir_tys)) => { search_stack.extend(elem_tys.iter().map(|k| k.expect_ty()).zip(elem_hir_tys)); } (ty::Slice(elem_ty), hir::TyKind::Slice(elem_hir_ty)) | (ty::Array(elem_ty, _), hir::TyKind::Array(elem_hir_ty, _)) => { search_stack.push((elem_ty, elem_hir_ty)); } (ty::RawPtr(mut_ty), hir::TyKind::Ptr(mut_hir_ty)) => { search_stack.push((mut_ty.ty, &mut_hir_ty.ty)); } _ => { // FIXME there are other cases that we could trace } } } return None; } /// We've found an enum/struct/union type with the substitutions /// `substs` and -- in the HIR -- a path type with the final /// segment `last_segment`. Try to find a `'_` to highlight in /// the generic args (or, if not, to produce new zipped pairs of /// types+hir to search through). fn match_adt_and_segment<'hir>( &self, substs: SubstsRef<'tcx>, needle_fr: RegionVid, last_segment: &'hir hir::PathSegment, renctx: &mut RegionErrorNamingCtx, search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty)>, ) -> Option<RegionName> { // Did the user give explicit arguments? (e.g., `Foo<..>`) let args = last_segment.args.as_ref()?; let lifetime = self.try_match_adt_and_generic_args(substs, needle_fr, args, search_stack)?; match lifetime.name { hir::LifetimeName::Param(_) | hir::LifetimeName::Error | hir::LifetimeName::Static | hir::LifetimeName::Underscore => { let region_name = self.synthesize_region_name(renctx); let ampersand_span = lifetime.span; Some(RegionName { name: region_name, source: RegionNameSource::MatchedAdtAndSegment(ampersand_span), }) } hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Implicit => { // In this case, the user left off the lifetime; so // they wrote something like: // // ``` // x: Foo<T> // ``` // // where the fully elaborated form is `Foo<'_, '1, // T>`. We don't consider this a match; instead we let // the "fully elaborated" type fallback above handle // it. None } } } /// We've found an enum/struct/union type with the substitutions /// `substs` and -- in the HIR -- a path with the generic /// arguments `args`. If `needle_fr` appears in the args, return /// the `hir::Lifetime` that corresponds to it. If not, push onto /// `search_stack` the types+hir to search through. fn try_match_adt_and_generic_args<'hir>( &self, substs: SubstsRef<'tcx>, needle_fr: RegionVid, args: &'hir hir::GenericArgs, search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty)>, ) -> Option<&'hir hir::Lifetime> { for (kind, hir_arg) in substs.iter().zip(&args.args) { match (kind.unpack(), hir_arg) { (GenericArgKind::Lifetime(r), hir::GenericArg::Lifetime(lt)) => { if r.to_region_vid() == needle_fr { return Some(lt); } } (GenericArgKind::Type(ty), hir::GenericArg::Type(hir_ty)) => { search_stack.push((ty, hir_ty)); } (GenericArgKind::Const(_ct), hir::GenericArg::Const(_hir_ct)) => { // Lifetimes cannot be found in consts, so we don't need // to search anything here. } (GenericArgKind::Lifetime(_), _) | (GenericArgKind::Type(_), _) | (GenericArgKind::Const(_), _) => { // I *think* that HIR lowering should ensure this // doesn't happen, even in erroneous // programs. Else we should use delay-span-bug. span_bug!( hir_arg.span(), "unmatched subst and hir arg: found {:?} vs {:?}", kind, hir_arg, ); } } } None } /// Finds a closure upvar that contains `fr` and label it with a /// fully elaborated type, returning something like `'1`. Result /// looks like: /// /// ``` /// | let x = Some(&22); /// - fully elaborated type of `x` is `Option<&'1 u32>` /// ``` fn give_name_if_anonymous_region_appears_in_upvars( &self, tcx: TyCtxt<'tcx>, upvars: &[Upvar], fr: RegionVid, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let upvar_index = self.get_upvar_index_for_region(tcx, fr)?; let (upvar_name, upvar_span) = self.get_upvar_name_and_span_for_region(tcx, upvars, upvar_index); let region_name = self.synthesize_region_name(renctx); Some(RegionName { name: region_name, source: RegionNameSource::AnonRegionFromUpvar(upvar_span, upvar_name.to_string()), }) } /// Checks for arguments appearing in the (closure) return type. It /// must be a closure since, in a free fn, such an argument would /// have to either also appear in an argument (if using elision) /// or be early bound (named, not in argument). fn give_name_if_anonymous_region_appears_in_output( &self, infcx: &InferCtxt<'_, 'tcx>, body: &Body<'tcx>, mir_def_id: DefId, fr: RegionVid, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { let tcx = infcx.tcx; let return_ty = self.universal_regions.unnormalized_output_ty; debug!( "give_name_if_anonymous_region_appears_in_output: return_ty = {:?}", return_ty ); if !tcx.any_free_region_meets(&return_ty, |r| r.to_region_vid() == fr) { return None; } let mut highlight = RegionHighlightMode::default(); highlight.highlighting_region_vid(fr, renctx.counter); let type_name = infcx.extract_type_name(&return_ty, Some(highlight)).0; let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir"); let (return_span, mir_description) = match tcx.hir().get(mir_hir_id) { hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(_, return_ty, _, span, gen_move), .. }) => ( match return_ty.output { hir::FunctionRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span), hir::FunctionRetTy::Return(_) => return_ty.output.span(), }, if gen_move.is_some() { " of generator" } else { " of closure" }, ), hir::Node::ImplItem(hir::ImplItem { kind: hir::ImplItemKind::Method(method_sig, _), .. }) => (method_sig.decl.output.span(), ""), _ => (body.span, ""), }; Some(RegionName { // This counter value will already have been used, so this function will increment it // so the next value will be used next and return the region name that would have been // used. name: self.synthesize_region_name(renctx), source: RegionNameSource::AnonRegionFromOutput( return_span, mir_description.to_string(), type_name, ), }) } fn give_name_if_anonymous_region_appears_in_yield_ty( &self, infcx: &InferCtxt<'_, 'tcx>, body: &Body<'tcx>, mir_def_id: DefId, fr: RegionVid, renctx: &mut RegionErrorNamingCtx, ) -> Option<RegionName> { // Note: generators from `async fn` yield `()`, so we don't have to // worry about them here. let yield_ty = self.universal_regions.yield_ty?; debug!( "give_name_if_anonymous_region_appears_in_yield_ty: yield_ty = {:?}", yield_ty, ); let tcx = infcx.tcx; if !tcx.any_free_region_meets(&yield_ty, |r| r.to_region_vid() == fr) { return None; } let mut highlight = RegionHighlightMode::default(); highlight.highlighting_region_vid(fr, renctx.counter); let type_name = infcx.extract_type_name(&yield_ty, Some(highlight)).0; let mir_hir_id = tcx.hir().as_local_hir_id(mir_def_id).expect("non-local mir"); let yield_span = match tcx.hir().get(mir_hir_id) { hir::Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(_, _, _, span, _), .. }) => ( tcx.sess.source_map().end_point(*span) ), _ => body.span, }; debug!( "give_name_if_anonymous_region_appears_in_yield_ty: \ type_name = {:?}, yield_span = {:?}", yield_span, type_name, ); Some(RegionName { name: self.synthesize_region_name(renctx), source: RegionNameSource::AnonRegionFromYieldTy(yield_span, type_name), }) } /// Creates a synthetic region named `'1`, incrementing the counter. fn synthesize_region_name(&self, renctx: &mut RegionErrorNamingCtx) -> Symbol { let c = renctx.counter; renctx.counter += 1; Symbol::intern(&format!("'{:?}", c)) } }
38.33294
99
0.525225
03927792bff7ebc29cb6ca5d262f480b0711709a
9,421
use std::fs; use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::sync::Mutex; use handlebars::{self as hb, Handlebars, HelperDef, JsonValue, handlebars_helper}; use regex::{Regex, Error as ReError}; use image::image_dimensions; use lazy_static::lazy_static; use serde::Serialize; use crate::book::Song; use crate::project::{Metadata, Output, Project}; use crate::util::PathBufExt; use crate::{PROGRAM_META, ProgramMeta}; use crate::error::*; use super::Render; type RegexCache = HashMap<String, Result<Regex, ReError>>; lazy_static! { static ref REGEX_CACHE: Mutex<RegexCache> = Mutex::new(RegexCache::new()); } pub trait DefaultTemaplate { const TPL_NAME: &'static str; const TPL_CONTENT: &'static str; } fn latex_escape(input: &str, pre_spaces: bool) -> String { let mut res = String::with_capacity(input.len()); for c in input.chars() { match c { ' ' if pre_spaces => res.push('~'), '&' | '%' | '$' | '#' | '_' | '{' | '}' => { res.push('\\'); res.push(c); } '[' => res.push_str("{\\lbrack}"), ']' => res.push_str("{\\rbrack}"), '~' => res.push_str("{\\textasciitilde}"), '^' => res.push_str("{\\textasciicircum}"), '\\' => res.push_str("{\\textbackslash}"), c => res.push(c), } } res } fn hb_latex_escape(input: &str) -> String { latex_escape(input, false) } handlebars_helper!(hb_eq: |v1: Json, v2: Json| { v1 == v2 }); handlebars_helper!(hb_contains: |obj: object, key: str| { obj.contains_key(key) }); handlebars_helper!(hb_default: |value: Json, def: Json| { match value { JsonValue::Null => def.clone(), other => other.clone(), } }); handlebars_helper!(hb_pre: |input: str| { latex_escape(input, true) }); handlebars_helper!(hb_matches: |value: str, regex: str| { let mut cache = REGEX_CACHE.lock().unwrap(); if !cache.contains_key(regex) { let res = Regex::new(regex); if res.is_err() { eprintln!("Warning: `matches` helper: Invalid regular expression: `{}`", regex); } cache.insert(regex.into(), res); } match cache.get(regex) { Some(Ok(re)) => re.is_match(value), _ => false, } }); struct ImgHelper { out_dir: PathBuf, result_i: usize, name: &'static str, } impl ImgHelper { fn width(project: &Project) -> Box<Self> { let out_dir = project.settings.dir_output().to_owned(); Box::new(Self { out_dir, result_i: 0, name: "img_w", }) } fn height(project: &Project) -> Box<Self> { let out_dir = project.settings.dir_output().to_owned(); Box::new(Self { out_dir, result_i: 1, name: "img_h", }) } } impl HelperDef for ImgHelper { fn call_inner<'reg: 'rc, 'rc>( &self, h: &hb::Helper<'reg, 'rc>, _: &'reg Handlebars<'reg>, _: &'rc hb::Context, _: &mut hb::RenderContext<'reg, 'rc>, ) -> Result<Option<hb::ScopedJson<'reg, 'rc>>, hb::RenderError> { let path: &str = h .param(0) .map(|x| x.value()) .ok_or_else(|| hb::RenderError::new(format!("{}: Image path not supplied", self.name))) .and_then(|x| { x.as_str().ok_or_else(|| { hb::RenderError::new(&format!( "{}: Image path not a string, it's {:?} as JSON.", self.name, x, )) }) })?; let pathbuf = Path::new(&path).to_owned().resolved(&self.out_dir); let (w, h) = image_dimensions(&pathbuf).map_err(|e| { hb::RenderError::new(&format!( "{}: Couldn't read image at `{}`: {}", self.name, pathbuf.display(), e )) })?; let res = [w, h][self.result_i]; Ok(Some(hb::ScopedJson::Derived(JsonValue::from(res)))) } } struct DpiHelper { dpi: f64, } impl DpiHelper { const INCH_MM: f64 = 25.4; fn new(output: &Output) -> Box<Self> { Box::new(Self { dpi: output.dpi() }) } } impl HelperDef for DpiHelper { fn call_inner<'reg: 'rc, 'rc>( &self, h: &hb::Helper<'reg, 'rc>, _: &'reg Handlebars<'reg>, _: &'rc hb::Context, _: &mut hb::RenderContext<'reg, 'rc>, ) -> Result<Option<hb::ScopedJson<'reg, 'rc>>, hb::RenderError> { let value: f64 = h .param(0) .map(|x| x.value()) .ok_or_else(|| hb::RenderError::new("px2mm: Input value not supplied")) .and_then(|x| { x.as_f64().ok_or_else(|| { hb::RenderError::new(&format!( "px2mm: Input value not a number, it's {:?} as JSON.", x, )) }) })?; let res = (value / self.dpi) * Self::INCH_MM; Ok(Some(hb::ScopedJson::Derived(JsonValue::from(res)))) } } #[derive(Serialize, Debug)] struct HbContext<'a> { book: &'a Metadata, songs: &'a [Song], output: &'a Metadata, program: &'a ProgramMeta, } #[derive(Debug)] struct HbRender<'a> { hb: Handlebars<'static>, tpl_name: String, project: &'a Project, output: &'a Output, } impl<'a> HbRender<'a> { fn new<DT: DefaultTemaplate>(project: &'a Project, output: &'a Output) -> Result<Self> { let mut hb = Handlebars::new(); hb.register_helper("eq", Box::new(hb_eq)); hb.register_helper("contains", Box::new(hb_contains)); hb.register_helper("default", Box::new(hb_default)); hb.register_helper("matches", Box::new(hb_matches)); hb.register_helper("px2mm", DpiHelper::new(output)); hb.register_helper("img_w", ImgHelper::width(project)); hb.register_helper("img_h", ImgHelper::height(project)); let tpl_name = if let Some(template) = output.template.as_ref() { // NB: unwrap() should be ok, UTF-8 validity is checked while parsing // project settings TOML: let tpl_name = template.to_str().unwrap().to_string(); if template.exists() { hb.register_template_file(&tpl_name, &template) .with_context(|| format!("Error in template file `{}`", template.display()))?; } else { let parent = template.parent().unwrap(); // The temaplate should've been resolved as absolute in Project fs::create_dir_all(parent) .and_then(|_| fs::write(&template, DT::TPL_CONTENT.as_bytes())) .with_context(|| { format!( "Error writing default template to file: `{}`", template.display() ) })?; hb.register_template_string(&tpl_name, DT::TPL_CONTENT) .expect("Internal error: Could not load default template"); } tpl_name } else { hb.register_template_string(DT::TPL_NAME, DT::TPL_CONTENT) .expect("Internal error: Could not load default template"); DT::TPL_NAME.to_string() }; Ok(Self { hb, tpl_name, project, output, }) } fn render(&self) -> Result<&'a Output> { let context = HbContext { book: self.project.metadata(), songs: self.project.songs(), output: &self.output.metadata, program: &PROGRAM_META, }; let html = self.hb.render(&self.tpl_name, &context)?; fs::write(&self.output.file, html.as_bytes()).with_context(|| { format!( "Error writing output file: `{}`", self.output.file.display() ) })?; Ok(self.output) } } pub struct RHtml; impl DefaultTemaplate for RHtml { const TPL_NAME: &'static str = "html.hbs"; const TPL_CONTENT: &'static str = include_str!("../../default/templates/html.hbs"); } impl Render for RHtml { fn render<'a>(project: &'a Project, output: &'a Output) -> Result<&'a Output> { let render = HbRender::new::<Self>(project, output)?; render.render() } } pub struct RTex; impl DefaultTemaplate for RTex { const TPL_NAME: &'static str = "pdf.hbs"; const TPL_CONTENT: &'static str = include_str!("../../default/templates/pdf.hbs"); } impl Render for RTex { fn render<'a>(project: &'a Project, output: &'a Output) -> Result<&'a Output> { let mut render = HbRender::new::<Self>(project, output)?; // Setup Latex escaping render.hb.register_escape_fn(hb_latex_escape); render.hb.register_helper("pre", Box::new(hb_pre)); render.render() } } pub struct RHovorka; impl DefaultTemaplate for RHovorka { const TPL_NAME: &'static str = "hovorka.hbs"; const TPL_CONTENT: &'static str = include_str!("../../example/templates/hovorka.hbs"); } impl Render for RHovorka { fn render<'a>(project: &'a Project, output: &'a Output) -> Result<&'a Output> { let render = HbRender::new::<Self>(project, output)?; render.render() } }
29.625786
120
0.537523
de955056a08d2981876f015fe9ae22f3d6c381ea
10,236
use std::convert::TryFrom; use std::fs::OpenOptions; use std::io::Read; use std::path::{Path, PathBuf}; use anyhow::{anyhow, bail, Context}; use clap::ArgMatches; use encoding::DecoderTrap; use lief::Binary; use picky::hash::HashAlgorithm; use picky::x509::date::UTCDate; use picky::x509::pkcs7::authenticode::{AuthenticodeSignature, AuthenticodeValidator, ShaVariant}; use picky::x509::pkcs7::ctl::http_fetch::CtlHttpFetch; use picky::x509::pkcs7::ctl::CertificateTrustList; use picky::x509::wincert::WinCertificate; use crate::config::{ ARG_BINARY, ARG_PS_SCRIPT, ARG_VERIFY, ARG_VERIFY_BASIC, ARG_VERIFY_CA, ARG_VERIFY_CHAIN, ARG_VERIFY_SIGNING_CERTIFICATE, PS_AUTHENTICODE_FOOTER, PS_AUTHENTICODE_HEADER, PS_AUTHENTICODE_LINES_SPLITTER, }; use crate::get_utf8_file_name; use crate::sign::compute_ps_file_checksum_from_content; pub fn verify(matches: &ArgMatches, files: &[PathBuf]) -> anyhow::Result<()> { let mut at_least_one_error = false; let flags = matches .values_of(ARG_VERIFY) .unwrap() .map(ToString::to_string) .collect::<Vec<String>>(); let authenticode_signatures = match (matches.is_present(ARG_BINARY), matches.is_present(ARG_PS_SCRIPT)) { (true, false) => { let binary_path = files[0].clone(); let binary = Binary::new(binary_path.clone()).map_err(|err| anyhow!("Failed to load the executable: {}", err))?; let authenticode_signature = extract_authenticode_signature_from_binary(&binary)?; let binary_name = get_utf8_file_name(&binary_path)?; let file_hash = binary .get_file_hash_sha256() .map_err(|err| anyhow!("Failed to compute file hash for target binary: {}", err.to_string()))?; vec![(authenticode_signature, binary_name.to_owned(), file_hash)] } (false, true) => { let mut authenticode_signatures = Vec::with_capacity(files.len()); for file_path in files { let extract_signature = |file_path: &Path| -> anyhow::Result<(AuthenticodeSignature, String, Vec<u8>)> { let authenticode_signature = match authenticode_signature_ps_from_file(file_path) { Ok(authenticode_signature) => authenticode_signature, Err(err) => bail!("{} -> {}\n", err.to_string(), err.root_cause()), }; let algorithm_identifier_oid = authenticode_signature .0 .digest_algorithms() .first() .expect("AlgorithmIdentifier should be present") .oid_asn1() .clone(); let sha_variant = ShaVariant::try_from(algorithm_identifier_oid) .with_context(|| format!("Failed compute checksum for {:?}", file_path))?; let hash = HashAlgorithm::try_from(sha_variant) .with_context(|| format!("Failed compute checksum for {:?}", file_path))?; let ps_file_name = get_utf8_file_name(file_path)?; let file_hash = compute_ps_file_checksum_from_content(file_path, hash) .with_context(|| format!("Failed to compute {:?} checksum for {:?}", hash, file_path))?; Ok((authenticode_signature, ps_file_name.to_owned(), file_hash)) }; match extract_signature(file_path) { Ok(extracted) => authenticode_signatures.push(extracted), Err(e) => { at_least_one_error = true; eprintln!("{:?}", e); } } } authenticode_signatures } (true, true) => bail!("Do not know what to verify exactly(`binary` and `script` both are specified)"), (false, false) => bail!("Do not know what to verify(`binary` or `script` is not specified)"), }; let flags = flags .iter() .filter(|flag| match flag.as_str() { ARG_VERIFY_BASIC | ARG_VERIFY_SIGNING_CERTIFICATE | ARG_VERIFY_CHAIN | ARG_VERIFY_CA => true, other => { eprintln!("Skipping unknown flag `{}`", other); false } }) .cloned() .collect::<Vec<String>>(); let ctl = if flags.iter().any(|flag| flag.as_str() == ARG_VERIFY_CHAIN) { let ctl = CertificateTrustList::fetch()?; Some(ctl) } else { None }; for (authenticode_signature, file_name, file_hash) in authenticode_signatures { let validator = authenticode_signature.authenticode_verifier(); let now = UTCDate::now(); let validator = apply_flags(&validator, &flags, &now, file_hash, ctl.as_ref()); match validator.verify() { Ok(()) => println!("{} has valid digital signature", file_name), Err(err) => { eprintln!("{} has invalid digital signature: {}", file_name, err.to_string()); at_least_one_error = true; } } } if at_least_one_error { bail!("Terminated with error(s)"); } Ok(()) } fn extract_authenticode_signature_from_binary(binary: &Binary) -> anyhow::Result<AuthenticodeSignature> { let authenticode_data = binary .get_authenticode_data() .map_err(|err| anyhow!("Failed to extract Authenticode signature from target binary: {}", err))?; let wincert = WinCertificate::decode(&authenticode_data) .map_err(|err| anyhow!("Failed to decode authenticode data: {}", err))?; let authenticode_signature = AuthenticodeSignature::from_der(wincert.get_certificate()) .context("Failed to deserialize Authenticode signature")?; Ok(authenticode_signature) } pub fn authenticode_signature_ps_from_file(file_path: &Path) -> anyhow::Result<AuthenticodeSignature> { let mut file = OpenOptions::new() .read(true) .open(file_path) .with_context(|| format!("Failed to open {:?} for reading", file_path))?; let mut buffer = Vec::new(); file.read_to_end(&mut buffer).unwrap(); let (decoded, _) = encoding::decode( &buffer, DecoderTrap::Strict, encoding::all::UTF_8 as encoding::EncodingRef, ); let buffer = decoded.map_err(|err| anyhow!("Failed to decoded {:?} ps file: {}", file_path, err))?; let signature = extract_ps_authenticode_signature(buffer) .with_context(|| format!("Failed to extract Authenticode signature from {:?}", file_path))?; let der_signature = base64::decode(signature).context("Failed to convert signature to DER")?; let authenticode_signature = AuthenticodeSignature::from_der(&der_signature) .with_context(|| format!("Failed to deserialize Authenticode signature for {:?}", file_path))?; Ok(authenticode_signature) } fn extract_ps_authenticode_signature(content: String) -> anyhow::Result<String> { let index = content .find(PS_AUTHENTICODE_HEADER) .ok_or_else(|| anyhow!("File is not digital signed"))?; let (_, signature) = content.split_at(index); let mut out = String::new(); for line in signature.lines() { if line.contains(PS_AUTHENTICODE_HEADER) || line.contains(PS_AUTHENTICODE_FOOTER) { continue; } if line.contains(PS_AUTHENTICODE_LINES_SPLITTER) { out.push_str(line.replace(PS_AUTHENTICODE_LINES_SPLITTER, "").trim_end_matches('\r')); } } Ok(out) } fn apply_flags<'a>( validator: &'a AuthenticodeValidator<'a>, flags: &[String], time: &'a UTCDate, file_hash: Vec<u8>, ctl: Option<&'a CertificateTrustList>, ) -> &'a AuthenticodeValidator<'a> { let validator = validator .ignore_basic_authenticode_validation() .ignore_signing_certificate_check() .ignore_chain_check() .ignore_ca_against_ctl_check() .ignore_not_before_check() .ignore_not_after_check() .ignore_excluded_cert_authorities(); let validator = if flags.iter().any(|flag| flag.as_str() == ARG_VERIFY_BASIC) { validator.require_basic_authenticode_validation(file_hash) } else { &validator }; let validator = if flags.iter().any(|flag| flag.as_str() == ARG_VERIFY_SIGNING_CERTIFICATE) { validator .require_signing_certificate_check() .require_not_after_check() .require_not_before_check() .require_chain_check() .exact_date(time) } else { &validator }; let validator = if flags.iter().any(|flag| flag.as_str() == ARG_VERIFY_CHAIN) { let validator = validator.require_chain_check(); if let Some(ctl) = ctl { validator.ctl(ctl) } else { validator } } else { &validator }; if flags.iter().any(|flag| flag.as_str() == ARG_VERIFY_CA) { validator.require_ca_against_ctl_check() } else { validator } } pub fn extract_signed_ps_file_content(raw_content: String) -> String { let end = match raw_content.find(PS_AUTHENTICODE_HEADER) { Some(index) => index - 2, // -2 to remove \r\n from the `# SIG # Begin signature block` line None => return raw_content, }; let (raw_content, _) = raw_content.split_at(end); raw_content.to_string() } #[cfg(test)] mod tests { use super::*; #[test] fn try_to_extract_ps_authenticode_signature() { let ps_authenticode_signature = " # SIG # Begin signature block\r\n# MIIFjAYJKoZIhvcNAQcCoIIFfTCCBXkCAQExDzANBglghkgBZQMEAgEFADB5Bgor\r\n# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG\r\n# SIG # End signature block\r\n"; let ps_authenticode_signature = extract_ps_authenticode_signature(ps_authenticode_signature.to_string()).unwrap(); assert_eq!(ps_authenticode_signature.as_str(), "MIIFjAYJKoZIhvcNAQcCoIIFfTCCBXkCAQExDzANBglghkgBZQMEAgEFADB5BgorBgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG"); } }
37.771218
268
0.623779
1ade77dc30cc260c9736b7d400bb74a3ab47b511
975
// Copyright 2020-2021 IOTA Stiftung // SPDX-License-Identifier: Apache-2.0 use core::fmt::Display; use core::fmt::Formatter; use core::fmt::Result; /// Supported types for the JSON Web Key `typ` property. /// /// [More Info](https://www.iana.org/assignments/jose/jose.xhtml#web-key-types) #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] pub enum JwkType { /// Elliptic Curve. #[serde(rename = "EC")] Ec, /// RSA. #[serde(rename = "RSA")] Rsa, /// Octet sequence. #[serde(rename = "oct")] Oct, /// Octet string key pairs. #[serde(rename = "OKP")] Okp, } impl JwkType { /// Returns the JWK "typ" as a `str` slice. pub const fn name(self) -> &'static str { match self { Self::Ec => "EC", Self::Rsa => "RSA", Self::Oct => "oct", Self::Okp => "OKP", } } } impl Display for JwkType { fn fmt(&self, f: &mut Formatter<'_>) -> Result { f.write_str(self.name()) } }
22.159091
91
0.6
f77de517db5864853a7264a29090f0a41b0b4ffa
336
pub mod error; pub use error::ApiError; pub mod v1; use ftl::*; use crate::ServerState; pub async fn api(mut route: Route<ServerState>) -> Response { match route.next().segment() { // ANY /api/v1 Exact("v1") => v1::api_v1(route).await.into_response(), _ => ApiError::not_found().into_response(), } }
19.764706
63
0.613095
28a4b821f1871cd4c965a43b520a434bf1964758
2,371
use std::io::{self, BufRead}; use std::str::FromStr; #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct Seat { row: usize, column: usize, } impl FromStr for Seat { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { let row = s[0..7] .chars() .map(|c| match c { 'F' => 0, 'B' => 1, _ => panic!(), }) .fold(0, |r, c| 2 * r + c); let column = s[7..10] .chars() .map(|c| match c { 'R' => 1, 'L' => 0, _ => panic!(), }) .fold(0, |r, c| 2 * r + c); Ok(Seat { row, column }) } } impl Seat { fn seat_id(&self) -> usize { self.row * 8 + self.column } } #[test] fn test_seat_from_str() { assert_eq!( "FFFFFFFLLL".parse::<Seat>().unwrap(), Seat { row: 0, column: 0 } ); assert_eq!( "FFFFFFBLLR".parse::<Seat>().unwrap(), Seat { row: 1, column: 1 } ); assert_eq!( "BBBBBBBRRR".parse::<Seat>().unwrap(), Seat { row: 127, column: 7 } ); assert_eq!( "FBFBBFFRLR".parse::<Seat>().unwrap(), Seat { row: 44, column: 5 } ); assert_eq!( "BFFFBBFRRR".parse::<Seat>().unwrap(), Seat { row: 70, column: 7 } ); assert_eq!( "FFFBBBFRRR".parse::<Seat>().unwrap(), Seat { row: 14, column: 7 } ); assert_eq!( "BBFFBBFRLL".parse::<Seat>().unwrap(), Seat { row: 102, column: 4 } ); } fn main() { let seats = stdin_to_seats(); let mut seat_ids: Vec<_> = seats.iter().map(|n| n.seat_id()).collect(); seat_ids.sort(); println!("min is {}", seat_ids.first().unwrap()); println!("max is {}", seat_ids.last().unwrap()); let missing = seat_ids .iter() .zip(seat_ids.iter().skip(1)) .find(|(a, b)| **b - **a != 1) .unwrap(); println!("missing seat is {}", missing.0 + 1); } fn stdin_to_seats() -> Vec<Seat> { let mut seats = Vec::new(); let stdin = io::stdin(); for line in stdin.lock().lines() { let line = line.unwrap(); let seat = line.parse::<Seat>().unwrap(); seats.push(seat); } seats }
22.798077
75
0.444538
e9b9505280c87f13293fc92e4465329a91289e20
9,466
//! Elm code generator. // TODO: Fix lints and remove this. #![allow(clippy::write_literal)] use crate::{ast, Artifact, LibError, Spec}; use anyhow::Result; use inflector::cases::camelcase::to_camel_case; use std::io::{self, BufWriter}; use std::{ fs::{self, File}, io::Write, path::{Path, PathBuf}, }; const BACKEND_NAME: &str = "elm"; pub mod decoder_generation; pub mod encoder_generation; pub mod endpoint_generation; pub mod type_generation; pub(crate) struct IndentWriter { indent: usize, outstream: Box<dyn io::Write>, } impl IndentWriter { pub(crate) fn for_file(outdir: &Path, filename: &str) -> Result<Self, LibError> { let data_path = { let mut p = PathBuf::from(outdir); p.push(filename); p }; let outfile = File::create(&data_path).map_err(LibError::IoError)?; let outstream = BufWriter::new(outfile); Ok(Self { outstream: Box::new(outstream), indent: 0, }) } fn kill_indent(&mut self) { self.indent = 0; } fn increase_indent(&mut self) -> String { self.indent += 1; self.newline() } fn decrease_indent(&mut self) -> String { self.indent -= 1; self.newline() } fn tabs(&self) -> String { " ".repeat(self.indent) } fn newline(&self) -> String { format!("\n{}", self.tabs()) } fn start_line(&mut self) -> Result<&mut dyn io::Write, LibError> { write!(self.outstream, "\n{}", self.tabs())?; Ok(&mut self.outstream) } fn handle(&mut self) -> &mut dyn io::Write { &mut self.outstream } fn empty_lines(&mut self, num: usize) -> Result<(), LibError> { write!(self.outstream, "{}", "\n".repeat(num + 1))?; Ok(()) } } fn generate_doc_comment(_doc_comment: &Option<String>) -> String { // TODO: figure out escaping rules // match doc_comment { // Some(ref ds) => format!("{{-| {ds}\n-}}", ds = ds), // None => "".to_owned(), // } "".to_owned() } fn to_atom(s: String) -> String { if s.contains(' ') && !(s.starts_with('(') && s.ends_with(')')) { format!("({})", s) } else { s } } fn field_name(ident: &str) -> String { to_camel_case(ident) } pub struct Generator { module_prefix: String, _artifact: Artifact, } impl Generator { pub fn new(artifact: Artifact, module_prefix: String) -> Result<Self, LibError> { match artifact { Artifact::TypesOnly | Artifact::ClientEndpoints => Ok(Self { module_prefix, _artifact: artifact, }), Artifact::ServerEndpoints => Err(LibError::UnsupportedArtifact { artifact, backend: BACKEND_NAME, }), } } fn make_file(&self, _spec: &Spec, outdir: &Path, name: &str) -> Result<IndentWriter, LibError> { // TODO: populate mem filesystem or temp folder first, then make everything visible at once // to avoid partial write out on error let mut file = IndentWriter::for_file(outdir, &format!("{}.elm", name))?; // TODO: make module path prefix configurable write!( file.handle(), "module {}.{} exposing (..)", self.module_prefix, name.replace("/", ".") )?; file.empty_lines(2)?; // TODO: write timestamp and info that this file is generated Ok(file) } pub fn generate_user_defined_types(&self, spec: &Spec, outdir: &Path) -> Result<(), LibError> { { let mut builtin_dir = PathBuf::from(outdir); builtin_dir.push("BuiltIn"); fs::create_dir(builtin_dir)?; } { let mut file = self.make_file(spec, outdir, "BuiltIn/Bytes")?; write!( file.handle(), "{}", include_str!("./elm/builtin_type_bytes.elm"), )?; } { let mut file = self.make_file(spec, outdir, "BuiltIn/Uuid")?; write!( file.handle(), "{}", include_str!("./elm/builtin_type_uuid.elm"), )?; } let mut file = self.make_file(spec, outdir, "Data")?; write!( file.start_line()?, include_str!("./elm/preamble_types.elm"), module_prefix = self.module_prefix )?; file.empty_lines(2)?; for spec_item in spec.iter() { match spec_item { ast::SpecItem::StructDef(sdef) => { type_generation::generate_struct_def(sdef, &mut file)? } ast::SpecItem::EnumDef(edef) => { type_generation::generate_enum_def(edef, &mut file)? } ast::SpecItem::ServiceDef(_) => {} }; } Ok(()) } pub fn generate_decoders(&self, spec: &Spec, outdir: &Path) -> Result<(), LibError> { let mut file = self.make_file(spec, outdir, "Decode")?; write!( file.start_line()?, "import {}.Data exposing (..)", self.module_prefix )?; write!( file.start_line()?, include_str!("./elm/preamble_decoder.elm"), module_prefix = self.module_prefix )?; file.empty_lines(2)?; write!( file.handle(), "{}", decoder_generation::generate_type_decoders(spec) )?; Ok(()) } pub fn generate_encoders(&self, spec: &Spec, outdir: &Path) -> Result<(), LibError> { let mut file = self.make_file(spec, outdir, "Encode")?; write!( file.start_line()?, "import {}.Data exposing (..)", self.module_prefix )?; write!( file.start_line()?, include_str!("./elm/preamble_encoder.elm"), module_prefix = self.module_prefix )?; file.empty_lines(2)?; write!( file.handle(), "{}", encoder_generation::generate_struct_and_enum_encoders(spec) )?; Ok(()) } pub fn generate_endpoints(&self, spec: &Spec, outdir: &Path) -> Result<(), LibError> { { let mut service_dir = PathBuf::from(outdir); service_dir.push("Service"); fs::create_dir(service_dir)?; } { let mut file = self.make_file(spec, outdir, "ServiceBuiltIn")?; write!( file.handle(), "{}", include_str!("./elm/builtin_service.elm"), )?; } for spec_item in spec.iter() { match spec_item { ast::SpecItem::StructDef(..) | ast::SpecItem::EnumDef(..) => {} ast::SpecItem::ServiceDef(service) => { let mut file = self.make_file(spec, outdir, &format!("Service/{}", service.name))?; write!( file.start_line()?, "import {}.Data as Ty", self.module_prefix )?; write!(file.start_line()?, "{}", "import Json.Decode as D")?; write!(file.start_line()?, "{}", "import Json.Encode as E")?; write!( file.start_line()?, "import {}.Encode as AE", self.module_prefix )?; write!( file.start_line()?, "import {}.Decode as AD", self.module_prefix )?; write!( file.start_line()?, "import {}.ServiceBuiltIn exposing (..)", self.module_prefix )?; write!(file.start_line()?, "import Url.Builder")?; write!(file.start_line()?, "{}", "import Http")?; write!( file.start_line()?, include_str!("./elm/preamble_service.elm"), module_prefix = self.module_prefix )?; file.empty_lines(2)?; endpoint_generation::generate(service, &mut file)?; } }; } Ok(()) } pub fn validate_output_dir(path: &Path) -> Result<(), LibError> { if !path.is_dir() { return Err(LibError::OutputMustBeFolder { backend: BACKEND_NAME, }); } let is_empty = path.read_dir().map_err(LibError::IoError)?.next().is_none(); if !is_empty { return Err(LibError::OutputFolderNotEmpty { backend: BACKEND_NAME, }); } Ok(()) } } impl crate::CodeGenerator for Generator { fn generate(&self, spec: &Spec, output: &Path) -> Result<(), LibError> { Self::validate_output_dir(&output)?; self.generate_user_defined_types(&spec, &output)?; self.generate_decoders(&spec, &output)?; self.generate_encoders(&spec, &output)?; self.generate_endpoints(&spec, &output)?; Ok(()) } }
29.397516
100
0.488696
cc9a9258fd703e401284bb25b3131a19c639cf0c
1,945
use std::marker::PhantomData; use super::FeatureIdentifier; use crate::feature::Feature; pub(super) mod transforms; pub(super) trait Transform<'a, TOutput> { fn transform(&self, input: &'a str) -> TOutput; } pub(super) struct Tokenize<'a, T: Transform<'a, TOutput>, TOutput> { transformer: T, _output: PhantomData<&'a TOutput>, } impl<'a> Tokenize<'a, transforms::feature::FeatureTransform, Feature<'a>> { pub fn new() -> Self { Self { transformer: transforms::feature::FeatureTransform {}, _output: PhantomData, } } } impl<'a> Tokenize<'a, transforms::identity::IdentityTransform, &'a str> { /// Return a Tokenize that does not return Features. Only useful for tests. fn identity() -> Self { Self { transformer: transforms::identity::IdentityTransform {}, _output: PhantomData, } } } impl<'a, T: Transform<'a, TOutput>, TOutput> Tokenize<'a, T, TOutput> { fn tokenize(&self, input: &'a str) -> Option<Vec<TOutput>> { let mut outputs = vec![]; for token in input.split_whitespace() { outputs.push(self.transformer.transform(token)); } Some(outputs) } } impl<'a, T: Transform<'a, Feature<'a>>> FeatureIdentifier<'a> for Tokenize<'a, T, Feature<'a>> { fn identify(&self, input: &'a str) -> Option<Vec<Feature<'a>>> { self.tokenize(input) } } #[cfg(test)] mod tests { use super::*; #[test] fn works_as_expected() { let tokens = Tokenize::identity() .tokenize(r#"12:42:53.546 INFO AppDelegate.loadSplashscreen():153 - Opening trackers"#); assert_eq!( tokens.unwrap(), vec![ "12:42:53.546", "INFO", "AppDelegate.loadSplashscreen():153", "-", "Opening", "trackers", ] ); } }
25.933333
100
0.559383
4b12eaceba14f49c7a0962f7ffd46ab579aaab51
8,634
//! Easy to use utilities for confirmations. use crate::api::{Eth, EthFilter, Namespace}; use crate::types::{Bytes, TransactionReceipt, TransactionRequest, H256, U64}; use crate::{error, Transport}; use futures::{Future, StreamExt}; use std::time::Duration; /// Checks whether an event has been confirmed. pub trait ConfirmationCheck { /// Future resolved when is known whether an event has been confirmed. type Check: Future<Output = error::Result<Option<U64>>>; /// Should be called to get future which resolves when confirmation state is known. fn check(&self) -> Self::Check; } impl<F, T> ConfirmationCheck for F where F: Fn() -> T, T: Future<Output = error::Result<Option<U64>>>, { type Check = T; fn check(&self) -> Self::Check { (*self)() } } /// Should be used to wait for confirmations pub async fn wait_for_confirmations<T, V, F>( eth: Eth<T>, eth_filter: EthFilter<T>, poll_interval: Duration, confirmations: usize, check: V, ) -> error::Result<()> where T: Transport, V: ConfirmationCheck<Check = F>, F: Future<Output = error::Result<Option<U64>>>, { let filter = eth_filter.create_blocks_filter().await?; // TODO #396: The stream should have additional checks. // * We should not continue calling next on a stream that has completed (has returned None). We expect this to never // happen for the blocks filter but to be safe we should handle this case for example by `fuse`ing the stream or // erroring when it does complete. // * We do not handle the case where the stream returns an error which means we are wrongly counting it as a // confirmation. let filter_stream = filter.stream(poll_interval).skip(confirmations); futures::pin_mut!(filter_stream); loop { let _ = filter_stream.next().await; if let Some(confirmation_block_number) = check.check().await? { let block_number = eth.block_number().await?; if confirmation_block_number.low_u64() + confirmations as u64 <= block_number.low_u64() { return Ok(()); } } } } async fn transaction_receipt_block_number_check<T: Transport>(eth: &Eth<T>, hash: H256) -> error::Result<Option<U64>> { let receipt = eth.transaction_receipt(hash).await?; Ok(receipt.and_then(|receipt| receipt.block_number)) } async fn send_transaction_with_confirmation_<T: Transport>( hash: H256, transport: T, poll_interval: Duration, confirmations: usize, ) -> error::Result<TransactionReceipt> { let eth = Eth::new(transport.clone()); if confirmations > 0 { let confirmation_check = || transaction_receipt_block_number_check(&eth, hash); let eth_filter = EthFilter::new(transport.clone()); let eth = eth.clone(); wait_for_confirmations(eth, eth_filter, poll_interval, confirmations, confirmation_check).await?; } // TODO #397: We should remove this `expect`. No matter what happens inside the node, this shouldn't be a panic. let receipt = eth .transaction_receipt(hash) .await? .expect("receipt can't be null after wait for confirmations; qed"); Ok(receipt) } /// Sends transaction and returns future resolved after transaction is confirmed pub async fn send_transaction_with_confirmation<T>( transport: T, tx: TransactionRequest, poll_interval: Duration, confirmations: usize, ) -> error::Result<TransactionReceipt> where T: Transport, { let hash = Eth::new(&transport).send_transaction(tx).await?; send_transaction_with_confirmation_(hash, transport, poll_interval, confirmations).await } /// Sends raw transaction and returns future resolved after transaction is confirmed pub async fn send_raw_transaction_with_confirmation<T>( transport: T, tx: Bytes, poll_interval: Duration, confirmations: usize, ) -> error::Result<TransactionReceipt> where T: Transport, { let hash = Eth::new(&transport).send_raw_transaction(tx).await?; send_transaction_with_confirmation_(hash, transport, poll_interval, confirmations).await } #[cfg(test)] mod tests { use super::send_transaction_with_confirmation; use crate::rpc::Value; use crate::transports::test::TestTransport; use crate::types::{Address, TransactionReceipt, TransactionRequest, H256, U64}; use serde_json::json; use std::time::Duration; #[test] fn test_send_transaction_with_confirmation() { let mut transport = TestTransport::default(); let confirmations = 3; let transaction_request = TransactionRequest { from: Address::from_low_u64_be(0x123), to: Some(Address::from_low_u64_be(0x123)), gas: None, gas_price: Some(1.into()), value: Some(1.into()), data: None, nonce: None, condition: None, }; let transaction_receipt = TransactionReceipt { transaction_hash: H256::zero(), transaction_index: U64::zero(), block_hash: Some(H256::zero()), block_number: Some(2.into()), cumulative_gas_used: 0.into(), gas_used: Some(0.into()), contract_address: None, logs: vec![], status: Some(1.into()), root: Some(H256::zero()), logs_bloom: Default::default(), }; let poll_interval = Duration::from_secs(0); transport.add_response(Value::String( r#"0x0000000000000000000000000000000000000000000000000000000000000111"#.into(), )); transport.add_response(Value::String("0x123".into())); transport.add_response(Value::Array(vec![ Value::String(r#"0x0000000000000000000000000000000000000000000000000000000000000456"#.into()), Value::String(r#"0x0000000000000000000000000000000000000000000000000000000000000457"#.into()), ])); transport.add_response(Value::Array(vec![Value::String( r#"0x0000000000000000000000000000000000000000000000000000000000000458"#.into(), )])); transport.add_response(Value::Array(vec![Value::String( r#"0x0000000000000000000000000000000000000000000000000000000000000459"#.into(), )])); transport.add_response(Value::Null); transport.add_response(Value::Array(vec![ Value::String(r#"0x0000000000000000000000000000000000000000000000000000000000000460"#.into()), Value::String(r#"0x0000000000000000000000000000000000000000000000000000000000000461"#.into()), ])); transport.add_response(Value::Null); transport.add_response(json!(transaction_receipt)); transport.add_response(Value::String("0x6".into())); transport.add_response(json!(transaction_receipt)); transport.add_response(Value::Bool(true)); let confirmation = { let future = send_transaction_with_confirmation(&transport, transaction_request, poll_interval, confirmations); futures::executor::block_on(future) }; transport.assert_request("eth_sendTransaction", &[r#"{"from":"0x0000000000000000000000000000000000000123","gasPrice":"0x1","to":"0x0000000000000000000000000000000000000123","value":"0x1"}"#.into()]); transport.assert_request("eth_newBlockFilter", &[]); transport.assert_request("eth_getFilterChanges", &[r#""0x123""#.into()]); transport.assert_request("eth_getFilterChanges", &[r#""0x123""#.into()]); transport.assert_request("eth_getFilterChanges", &[r#""0x123""#.into()]); transport.assert_request( "eth_getTransactionReceipt", &[r#""0x0000000000000000000000000000000000000000000000000000000000000111""#.into()], ); transport.assert_request("eth_getFilterChanges", &[r#""0x123""#.into()]); transport.assert_request( "eth_getTransactionReceipt", &[r#""0x0000000000000000000000000000000000000000000000000000000000000111""#.into()], ); transport.assert_request( "eth_getTransactionReceipt", &[r#""0x0000000000000000000000000000000000000000000000000000000000000111""#.into()], ); transport.assert_request("eth_blockNumber", &[]); transport.assert_request( "eth_getTransactionReceipt", &[r#""0x0000000000000000000000000000000000000000000000000000000000000111""#.into()], ); transport.assert_no_more_requests(); assert_eq!(confirmation, Ok(transaction_receipt)); } }
40.345794
207
0.666551
4a8bfdf3570c4fa61c86bb6127aa12130d812994
5,842
use std::{ fmt::{self, Debug}, str::FromStr, }; use nom::{ bytes::complete::{take, take_while}, character::complete::char, combinator::{map, map_res, rest}, sequence::{preceded, tuple}, IResult, }; use nom_supreme::final_parser::{final_parser, Location}; use crate::md5::MD5Sum; /* fn from_hex(input: &str) -> Result<u8, std::num::ParseIntError> { u8::from_str_radix(input, 16) } fn is_hex_digit(c: char) -> bool { c.is_digit(16) } fn hex_primary(input: &str) -> IResult<&str, u8> { map_res(take_while_m_n(2, 2, is_hex_digit), from_hex)(input) } */ pub(crate) fn md5(input: &str) -> IResult<&str, MD5Sum> { map_res(take(32usize), MD5Sum::from_str)(input) } /// The line in the `[version]` section #[derive(Clone, Debug, PartialEq, Eq)] pub struct VersionLine { /// The version of this manifest pub version: u32, /// The hash of `version` as a string pub hash: MD5Sum, /// The name of this manifest /// /// This name in this field in `trunk.txt` is appended to the version on the loading screen. pub name: String, } impl fmt::Display for VersionLine { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{},{},{}", self.version, self.hash, self.name) } } impl VersionLine { /// Create a new version line pub fn new(version: u32, name: String) -> VersionLine { let hash = MD5Sum(md5::compute(version.to_string()).0); VersionLine { version, hash, name, } } /// Hash the version number pub fn hash_version(&self) -> MD5Sum { MD5Sum(md5::compute(self.version.to_string()).0) } /// Check whether the version and it's hash match pub fn verify(&self) -> bool { self.hash == self.hash_version() } } fn decimal(input: &str) -> IResult<&str, u32> { map_res(take_while(|c: char| c.is_ascii_digit()), str::parse)(input) } fn _version_line(input: &str) -> IResult<&str, VersionLine> { map( tuple(( decimal, preceded(char(','), md5), preceded(char(','), map(rest, String::from)), )), |(version, hash, name)| VersionLine { version, hash, name, }, )(input) } pub(crate) fn version_line(input: &str) -> Result<VersionLine, nom::error::Error<Location>> { final_parser(_version_line)(input) } /// Metadata for a single file pub struct FileMeta { /// Size of the file pub size: u32, /// md5sum of the file pub hash: MD5Sum, } /// One line in the `[files]` section /// /// This doesn't include the path, which is a key #[derive(Clone, Debug, PartialEq, Eq)] pub struct FileLine { /// Size of the file pub filesize: u32, /// md5sum of the file pub hash: MD5Sum, /// Size of the compressed file pub compressed_filesize: u32, /// md5sum of the compressed file pub compressed_hash: MD5Sum, /// Hash of the comma separated line pub line_hash: MD5Sum, } impl fmt::Display for FileLine { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{},{},{},{},{}", self.filesize, self.hash, self.compressed_filesize, self.compressed_hash, self.line_hash ) } } impl FileLine { /// Get the (relative) patcher URL for this file pub fn to_path(&self) -> String { let hash = format!("{:?}", self.hash); let mut chars = hash.chars(); let c1 = chars.next().unwrap(); let c2 = chars.next().unwrap(); format!("{}/{}/{}.sd0", c1, c2, hash) } /// Create a new file line pub fn new(raw: FileMeta, compressed: FileMeta) -> Self { let text = format!( "{},{},{},{}", raw.size, raw.hash, compressed.size, compressed.hash ); Self { filesize: raw.size, hash: raw.hash, compressed_filesize: compressed.size, compressed_hash: compressed.hash, line_hash: MD5Sum(md5::compute(&text).0), } } } fn _file_line(input: &str) -> IResult<&str, (&str, FileLine)> { map( tuple(( take_while(|c: char| c != ','), preceded(char(','), decimal), preceded(char(','), md5), preceded(char(','), decimal), preceded(char(','), md5), preceded(char(','), md5), )), |(filename, filesize, hash, compressed_filesize, compressed_hash, line_hash)| { ( filename, FileLine { filesize, hash, compressed_filesize, compressed_hash, line_hash, }, ) }, )(input) } pub(crate) fn file_line(input: &str) -> Result<(&str, FileLine), nom::error::Error<Location>> { final_parser(_file_line)(input) } #[cfg(test)] mod tests { use super::{MD5Sum, VersionLine}; const BYTES: [u8; 16] = [ 0xe1, 0x77, 0x1d, 0x0f, 0x4c, 0x93, 0xe3, 0x27, 0xc6, 0x62, 0x1a, 0x0e, 0xf2, 0xe1, 0xbd, 0xce, ]; #[test] fn parse_md5() { assert_eq!( super::md5("e1771d0f4c93e327c6621a0ef2e1bdce"), Ok(("", MD5Sum(BYTES))) ); } #[test] fn parse_version_line() { let hash = MD5Sum([ 0x97, 0x78, 0xd5, 0xd2, 0x19, 0xc5, 0x08, 0x0b, 0x9a, 0x6a, 0x17, 0xbe, 0xf0, 0x29, 0x33, 0x1c, ]); assert_eq!( super::version_line("82,9778d5d219c5080b9a6a17bef029331c,0"), Ok(VersionLine { version: 82, hash, name: "0".into(), }) ); } }
25.849558
97
0.537145
56ed2eeceab11b2f78ccc8f5cfa2dc55e23b8e66
11,365
use std::fmt; use bytes::{Buf, IntoBuf}; use bytes::buf::{Chain, Take}; use iovec::IoVec; use crate::common::StaticBuf; use super::io::WriteBuf; /// Encoders to handle different Transfer-Encodings. #[derive(Debug, Clone, PartialEq)] pub struct Encoder { kind: Kind, is_last: bool, } #[derive(Debug)] pub struct EncodedBuf<B> { kind: BufKind<B>, } #[derive(Debug)] pub struct NotEof; #[derive(Debug, PartialEq, Clone)] enum Kind { /// An Encoder for when Transfer-Encoding includes `chunked`. Chunked, /// An Encoder for when Content-Length is set. /// /// Enforces that the body is not longer than the Content-Length header. Length(u64), /// An Encoder for when neither Content-Length nore Chunked encoding is set. /// /// This is mostly only used with HTTP/1.0 with a length. This kind requires /// the connection to be closed when the body is finished. CloseDelimited, } #[derive(Debug)] enum BufKind<B> { Exact(B), Limited(Take<B>), Chunked(Chain<Chain<ChunkSize, B>, StaticBuf>), ChunkedEnd(StaticBuf), } impl Encoder { fn new(kind: Kind) -> Encoder { Encoder { kind: kind, is_last: false, } } pub fn chunked() -> Encoder { Encoder::new(Kind::Chunked) } pub fn length(len: u64) -> Encoder { Encoder::new(Kind::Length(len)) } pub fn close_delimited() -> Encoder { Encoder::new(Kind::CloseDelimited) } pub fn is_eof(&self) -> bool { match self.kind { Kind::Length(0) => true, _ => false } } pub fn set_last(mut self, is_last: bool) -> Self { self.is_last = is_last; self } pub fn is_last(&self) -> bool { self.is_last } pub fn end<B>(&self) -> Result<Option<EncodedBuf<B>>, NotEof> { match self.kind { Kind::Length(0) => Ok(None), Kind::Chunked => Ok(Some(EncodedBuf { kind: BufKind::ChunkedEnd(StaticBuf(b"0\r\n\r\n")), })), _ => Err(NotEof), } } pub fn encode<B>(&mut self, msg: B) -> EncodedBuf<B::Buf> where B: IntoBuf, { let msg = msg.into_buf(); let len = msg.remaining(); debug_assert!(len > 0, "encode() called with empty buf"); let kind = match self.kind { Kind::Chunked => { trace!("encoding chunked {}B", len); let buf = ChunkSize::new(len) .chain(msg) .chain(StaticBuf(b"\r\n")); BufKind::Chunked(buf) }, Kind::Length(ref mut remaining) => { trace!("sized write, len = {}", len); if len as u64 > *remaining { let limit = *remaining as usize; *remaining = 0; BufKind::Limited(msg.take(limit)) } else { *remaining -= len as u64; BufKind::Exact(msg) } }, Kind::CloseDelimited => { trace!("close delimited write {}B", len); BufKind::Exact(msg) } }; EncodedBuf { kind, } } pub(super) fn encode_and_end<B>(&self, msg: B, dst: &mut WriteBuf<EncodedBuf<B::Buf>>) -> bool where B: IntoBuf, { let msg = msg.into_buf(); let len = msg.remaining(); debug_assert!(len > 0, "encode() called with empty buf"); match self.kind { Kind::Chunked => { trace!("encoding chunked {}B", len); let buf = ChunkSize::new(len) .chain(msg) .chain(StaticBuf(b"\r\n0\r\n\r\n")); dst.buffer(buf); !self.is_last }, Kind::Length(remaining) => { use std::cmp::Ordering; trace!("sized write, len = {}", len); match (len as u64).cmp(&remaining) { Ordering::Equal => { dst.buffer(msg); !self.is_last }, Ordering::Greater => { dst.buffer(msg.take(remaining as usize)); !self.is_last }, Ordering::Less => { dst.buffer(msg); false } } }, Kind::CloseDelimited => { trace!("close delimited write {}B", len); dst.buffer(msg); false } } } /// Encodes the full body, without verifying the remaining length matches. /// /// This is used in conjunction with Payload::__hyper_full_data(), which /// means we can trust that the buf has the correct size (the buf itself /// was checked to make the headers). pub(super) fn danger_full_buf<B>(self, msg: B, dst: &mut WriteBuf<EncodedBuf<B::Buf>>) where B: IntoBuf, { let msg = msg.into_buf(); debug_assert!(msg.remaining() > 0, "encode() called with empty buf"); debug_assert!(match self.kind { Kind::Length(len) => len == msg.remaining() as u64, _ => true, }, "danger_full_buf length mismatches"); match self.kind { Kind::Chunked => { let len = msg.remaining(); trace!("encoding chunked {}B", len); let buf = ChunkSize::new(len) .chain(msg) .chain(StaticBuf(b"\r\n0\r\n\r\n")); dst.buffer(buf); }, _ => { dst.buffer(msg); }, } } } impl<B> Buf for EncodedBuf<B> where B: Buf, { #[inline] fn remaining(&self) -> usize { match self.kind { BufKind::Exact(ref b) => b.remaining(), BufKind::Limited(ref b) => b.remaining(), BufKind::Chunked(ref b) => b.remaining(), BufKind::ChunkedEnd(ref b) => b.remaining(), } } #[inline] fn bytes(&self) -> &[u8] { match self.kind { BufKind::Exact(ref b) => b.bytes(), BufKind::Limited(ref b) => b.bytes(), BufKind::Chunked(ref b) => b.bytes(), BufKind::ChunkedEnd(ref b) => b.bytes(), } } #[inline] fn advance(&mut self, cnt: usize) { match self.kind { BufKind::Exact(ref mut b) => b.advance(cnt), BufKind::Limited(ref mut b) => b.advance(cnt), BufKind::Chunked(ref mut b) => b.advance(cnt), BufKind::ChunkedEnd(ref mut b) => b.advance(cnt), } } #[inline] fn bytes_vec<'t>(&'t self, dst: &mut [&'t IoVec]) -> usize { match self.kind { BufKind::Exact(ref b) => b.bytes_vec(dst), BufKind::Limited(ref b) => b.bytes_vec(dst), BufKind::Chunked(ref b) => b.bytes_vec(dst), BufKind::ChunkedEnd(ref b) => b.bytes_vec(dst), } } } #[cfg(target_pointer_width = "32")] const USIZE_BYTES: usize = 4; #[cfg(target_pointer_width = "64")] const USIZE_BYTES: usize = 8; // each byte will become 2 hex const CHUNK_SIZE_MAX_BYTES: usize = USIZE_BYTES * 2; #[derive(Clone, Copy)] struct ChunkSize { bytes: [u8; CHUNK_SIZE_MAX_BYTES + 2], pos: u8, len: u8, } impl ChunkSize { fn new(len: usize) -> ChunkSize { use std::fmt::Write; let mut size = ChunkSize { bytes: [0; CHUNK_SIZE_MAX_BYTES + 2], pos: 0, len: 0, }; write!(&mut size, "{:X}\r\n", len) .expect("CHUNK_SIZE_MAX_BYTES should fit any usize"); size } } impl Buf for ChunkSize { #[inline] fn remaining(&self) -> usize { (self.len - self.pos).into() } #[inline] fn bytes(&self) -> &[u8] { &self.bytes[self.pos.into() .. self.len.into()] } #[inline] fn advance(&mut self, cnt: usize) { assert!(cnt <= self.remaining()); self.pos += cnt as u8; // just asserted cnt fits in u8 } } impl fmt::Debug for ChunkSize { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("ChunkSize") .field("bytes", &&self.bytes[..self.len.into()]) .field("pos", &self.pos) .finish() } } impl fmt::Write for ChunkSize { fn write_str(&mut self, num: &str) -> fmt::Result { use std::io::Write; (&mut self.bytes[self.len.into()..]).write(num.as_bytes()) .expect("&mut [u8].write() cannot error"); self.len += num.len() as u8; // safe because bytes is never bigger than 256 Ok(()) } } impl<B: Buf> From<B> for EncodedBuf<B> { fn from(buf: B) -> Self { EncodedBuf { kind: BufKind::Exact(buf), } } } impl<B: Buf> From<Take<B>> for EncodedBuf<B> { fn from(buf: Take<B>) -> Self { EncodedBuf { kind: BufKind::Limited(buf), } } } impl<B: Buf> From<Chain<Chain<ChunkSize, B>, StaticBuf>> for EncodedBuf<B> { fn from(buf: Chain<Chain<ChunkSize, B>, StaticBuf>) -> Self { EncodedBuf { kind: BufKind::Chunked(buf), } } } #[cfg(test)] mod tests { use bytes::{BufMut}; use super::super::io::Cursor; use super::Encoder; #[test] fn chunked() { let mut encoder = Encoder::chunked(); let mut dst = Vec::new(); let msg1 = b"foo bar".as_ref(); let buf1 = encoder.encode(msg1); dst.put(buf1); assert_eq!(dst, b"7\r\nfoo bar\r\n"); let msg2 = b"baz quux herp".as_ref(); let buf2 = encoder.encode(msg2); dst.put(buf2); assert_eq!(dst, b"7\r\nfoo bar\r\nD\r\nbaz quux herp\r\n"); let end = encoder.end::<Cursor<Vec<u8>>>().unwrap().unwrap(); dst.put(end); assert_eq!(dst, b"7\r\nfoo bar\r\nD\r\nbaz quux herp\r\n0\r\n\r\n".as_ref()); } #[test] fn length() { let max_len = 8; let mut encoder = Encoder::length(max_len as u64); let mut dst = Vec::new(); let msg1 = b"foo bar".as_ref(); let buf1 = encoder.encode(msg1); dst.put(buf1); assert_eq!(dst, b"foo bar"); assert!(!encoder.is_eof()); encoder.end::<()>().unwrap_err(); let msg2 = b"baz".as_ref(); let buf2 = encoder.encode(msg2); dst.put(buf2); assert_eq!(dst.len(), max_len); assert_eq!(dst, b"foo barb"); assert!(encoder.is_eof()); assert!(encoder.end::<()>().unwrap().is_none()); } #[test] fn eof() { let mut encoder = Encoder::close_delimited(); let mut dst = Vec::new(); let msg1 = b"foo bar".as_ref(); let buf1 = encoder.encode(msg1); dst.put(buf1); assert_eq!(dst, b"foo bar"); assert!(!encoder.is_eof()); encoder.end::<()>().unwrap_err(); let msg2 = b"baz".as_ref(); let buf2 = encoder.encode(msg2); dst.put(buf2); assert_eq!(dst, b"foo barbaz"); assert!(!encoder.is_eof()); encoder.end::<()>().unwrap_err(); } }
26.93128
98
0.49934
2fe9333fa9920f5cc9a5a73cdcfd97ad527ccd6c
2,977
#[derive(Copy, Clone, Eq, PartialEq, Debug)] pub struct RiffChunk { id: [u8; 4], length: u32 } use std::fmt; impl fmt::Display for RiffChunk { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::ascii::escape_default as esc; write!(f, "chunk \"{}{}{}{}\", length: {}", esc(self.id[0]), esc(self.id[1]), esc(self.id[2]), esc(self.id[3]), self.length ) } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub struct WavFormat { pub channels: u16, pub sample_rate: u32, pub bit_depth: u16 } impl fmt::Display for WavFormat { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{} channels, {} Hz, {}-bit", self.channels, self.sample_rate, self.bit_depth ) } } #[derive(Clone)] pub struct WavSample { pub format: WavFormat, pub data: Vec<u8> } impl fmt::Display for WavSample { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}, {} samples", self.format, // num_bytes / bytes_per_sample self.data.len() / ((self.format.bit_depth / 8) * self.format.channels) as usize ) } } use byteorder::{LE, ReadBytesExt}; use std::io; impl RiffChunk { pub fn read_from<R: io::Read>(mut f: R) -> io::Result<RiffChunk> { let mut id = [0; 4]; f.read_exact(&mut id)?; let length = f.read_u32::<LE>()?; Ok(RiffChunk { id, length }) } } impl WavSample { pub fn read_from<R: io::Read>(mut f: R) -> io::Result<WavSample> { let riff = RiffChunk::read_from(&mut f)?; match &riff.id { b"RIFF" => {}, b"RIFX" => panic!("Cannot handle RIFX data!"), _ => panic!("Expected RIFF signature, found {}", riff) } let mut rfmt = [0; 4]; f.read_exact(&mut rfmt)?; assert_eq!(rfmt, *b"WAVE"); let fmt = RiffChunk::read_from(&mut f)?; assert_eq!(fmt.id, *b"fmt "); //assert_eq!(fmt.length, 16); let afmt = f.read_u16::<LE>()?; debug_assert!(afmt == 1); let channels = f.read_u16::<LE>()?; let samples = f.read_u32::<LE>()?; let _brate = f.read_u32::<LE>()?; let _balgn = f.read_u16::<LE>()?; let bits = f.read_u16::<LE>()?; let data = RiffChunk::read_from(&mut f)?; assert_eq!(data.id, *b"data"); let mut buf = vec![0; data.length as usize]; f.read_exact(&mut buf)?; Ok( WavSample { format: WavFormat { channels, sample_rate: samples, bit_depth: bits }, data: buf } ) } }
24.603306
91
0.472288
e2e8850af8f3da28e24c94de68d3e10d212c9ed0
3,396
use std::{net::SocketAddr, str::FromStr}; use anyhow::Context; use axum::{ extract::OriginalUri, handler::Handler, response::{Html, IntoResponse, Redirect}, routing::get, AddExtensionLayer, Router, }; use clap::{crate_authors, crate_name, crate_version}; use config::Config; use http::{Method, StatusCode, Uri}; use tokio::{fs::File, io::AsyncReadExt}; use tokio_modbus::client::rtu; use tokio_serial::SerialStream; use tower::ServiceBuilder; use tower_http::{cors::CorsLayer, trace::TraceLayer}; use tracing::{error, info, instrument, warn}; use crate::{api::api_routes, state::State}; mod api; mod bus_state; mod cli; mod config; mod model; mod state; mod swagger_ui; #[cfg(test)] mod tests; async fn default_404(method: Method, original_uri: OriginalUri) -> impl IntoResponse { warn!( method = %method, uri = %original_uri.0, "HTTP request on unknown path" ); ( StatusCode::NOT_FOUND, Html(include_str!("resources/404.html")), ) } #[instrument] async fn load_config(path: &str) -> anyhow::Result<Config> { let mut config_string = String::new(); File::open(path) .await .with_context(|| "Error opening the config file")? .read_to_string(&mut config_string) .await .with_context(|| "Error reading the config file")?; match toml::from_str::<Config>(&config_string) { Ok(config) => { info!(device_count = config.devices.len(), "read config"); Ok(config) } Err(err) => Err(err.into()), } } #[tokio::main(flavor = "current_thread")] async fn main() -> anyhow::Result<()> { tracing_subscriber::fmt::init(); let params = cli::app().map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidInput, err))?; info!(include_str!("motd.txt")); let config = load_config(&params.config_path) .await .context("Could not load config")?; info!( version = crate_version!(), authors = crate_authors!(), http_port = %params.port, serial_path = %params.serial_path, serial_boud = %params.serial_boud, "starting {}", crate_name!() ); let builder = tokio_serial::new(&params.serial_path, params.serial_boud); let port = SerialStream::open(&builder).context("Could not open the serial device")?; let modbus_ctx = rtu::connect(port).await?; let state = State::new(params.clone(), config, modbus_ctx)?; { let state = state.clone(); let _join_handle = tokio::spawn(async move { if let Err(err) = state.bus_state().check_state_from_device(&state).await { error!("{:?}", err); } }); } let cors = CorsLayer::permissive(); let middleware_stack = ServiceBuilder::new() .layer(TraceLayer::new_for_http()) .layer(AddExtensionLayer::new(state)) .layer(cors); let app = Router::new() .route( "/", get(|| async { Redirect::found(Uri::from_static("/api/swagger-ui/")) }), ) .nest("/api", api_routes()) .fallback(default_404.into_service()) .layer(middleware_stack); let addr = SocketAddr::from_str(&format!("[::]:{}", params.port))?; axum::Server::bind(&addr) .serve(app.into_make_service()) .await?; Ok(()) }
26.952381
94
0.603357
0eca257667213b74765d954f416b99e9436cb6f0
1,368
//! `NcSelector` widget. //! ╭──────────────────────────╮ //! │This is the primary header│ //! ╭──────────────────────this is the secondary header──────╮ //! │ ↑ │ //! │ option1 Long text #1 │ //! │ option2 Long text #2 │ //! │ option3 Long text #3 │ //! │ option4 Long text #4 │ //! │ option5 Long text #5 │ //! │ option6 Long text #6 │ //! │ ↓ │ //! ╰────────────────────────────────────here's the footer───╯ //! //! selection widget -- an ncplane with a title header and a body section. the //! body section supports infinite scrolling up and down. //! //! At all times, exactly one item is selected. use crate::c_api::ffi; mod builder; mod methods; pub use builder::NcSelectorBuilder; /// High-level widget for selecting one item from a set. pub type NcSelector = ffi::ncselector; /// Options structure for [`NcSelector`]. pub type NcSelectorOptions = ffi::ncselector_options; /// Item structure for [`NcSelector`]. pub type NcSelectorItem = ffi::ncselector_item;
39.085714
78
0.428363
1ec914a979aad5ed6d645ce22cd0055498f36b71
8,847
// Generated from definition io.k8s.api.extensions.v1beta1.DeploymentStatus /// DeploymentStatus is the most recently observed status of the Deployment. #[derive(Clone, Debug, Default, PartialEq)] pub struct DeploymentStatus { /// Total number of available pods (ready for at least minReadySeconds) targeted by this deployment. pub available_replicas: Option<i32>, /// Count of hash collisions for the Deployment. The Deployment controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ReplicaSet. pub collision_count: Option<i32>, /// Represents the latest available observations of a deployment's current state. pub conditions: Option<Vec<crate::v1_14::api::extensions::v1beta1::DeploymentCondition>>, /// The generation observed by the deployment controller. pub observed_generation: Option<i64>, /// Total number of ready pods targeted by this deployment. pub ready_replicas: Option<i32>, /// Total number of non-terminated pods targeted by this deployment (their labels match the selector). pub replicas: Option<i32>, /// Total number of unavailable pods targeted by this deployment. This is the total number of pods that are still required for the deployment to have 100% available capacity. They may either be pods that are running but not yet available or pods that still have not been created. pub unavailable_replicas: Option<i32>, /// Total number of non-terminated pods targeted by this deployment that have the desired template spec. pub updated_replicas: Option<i32>, } impl<'de> serde::Deserialize<'de> for DeploymentStatus { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { #[allow(non_camel_case_types)] enum Field { Key_available_replicas, Key_collision_count, Key_conditions, Key_observed_generation, Key_ready_replicas, Key_replicas, Key_unavailable_replicas, Key_updated_replicas, Other, } impl<'de> serde::Deserialize<'de> for Field { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> { struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = Field; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "field identifier") } fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error { Ok(match v { "availableReplicas" => Field::Key_available_replicas, "collisionCount" => Field::Key_collision_count, "conditions" => Field::Key_conditions, "observedGeneration" => Field::Key_observed_generation, "readyReplicas" => Field::Key_ready_replicas, "replicas" => Field::Key_replicas, "unavailableReplicas" => Field::Key_unavailable_replicas, "updatedReplicas" => Field::Key_updated_replicas, _ => Field::Other, }) } } deserializer.deserialize_identifier(Visitor) } } struct Visitor; impl<'de> serde::de::Visitor<'de> for Visitor { type Value = DeploymentStatus; fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "struct DeploymentStatus") } fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> { let mut value_available_replicas: Option<i32> = None; let mut value_collision_count: Option<i32> = None; let mut value_conditions: Option<Vec<crate::v1_14::api::extensions::v1beta1::DeploymentCondition>> = None; let mut value_observed_generation: Option<i64> = None; let mut value_ready_replicas: Option<i32> = None; let mut value_replicas: Option<i32> = None; let mut value_unavailable_replicas: Option<i32> = None; let mut value_updated_replicas: Option<i32> = None; while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? { match key { Field::Key_available_replicas => value_available_replicas = serde::de::MapAccess::next_value(&mut map)?, Field::Key_collision_count => value_collision_count = serde::de::MapAccess::next_value(&mut map)?, Field::Key_conditions => value_conditions = serde::de::MapAccess::next_value(&mut map)?, Field::Key_observed_generation => value_observed_generation = serde::de::MapAccess::next_value(&mut map)?, Field::Key_ready_replicas => value_ready_replicas = serde::de::MapAccess::next_value(&mut map)?, Field::Key_replicas => value_replicas = serde::de::MapAccess::next_value(&mut map)?, Field::Key_unavailable_replicas => value_unavailable_replicas = serde::de::MapAccess::next_value(&mut map)?, Field::Key_updated_replicas => value_updated_replicas = serde::de::MapAccess::next_value(&mut map)?, Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; }, } } Ok(DeploymentStatus { available_replicas: value_available_replicas, collision_count: value_collision_count, conditions: value_conditions, observed_generation: value_observed_generation, ready_replicas: value_ready_replicas, replicas: value_replicas, unavailable_replicas: value_unavailable_replicas, updated_replicas: value_updated_replicas, }) } } deserializer.deserialize_struct( "DeploymentStatus", &[ "availableReplicas", "collisionCount", "conditions", "observedGeneration", "readyReplicas", "replicas", "unavailableReplicas", "updatedReplicas", ], Visitor, ) } } impl serde::Serialize for DeploymentStatus { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer { let mut state = serializer.serialize_struct( "DeploymentStatus", self.available_replicas.as_ref().map_or(0, |_| 1) + self.collision_count.as_ref().map_or(0, |_| 1) + self.conditions.as_ref().map_or(0, |_| 1) + self.observed_generation.as_ref().map_or(0, |_| 1) + self.ready_replicas.as_ref().map_or(0, |_| 1) + self.replicas.as_ref().map_or(0, |_| 1) + self.unavailable_replicas.as_ref().map_or(0, |_| 1) + self.updated_replicas.as_ref().map_or(0, |_| 1), )?; if let Some(value) = &self.available_replicas { serde::ser::SerializeStruct::serialize_field(&mut state, "availableReplicas", value)?; } if let Some(value) = &self.collision_count { serde::ser::SerializeStruct::serialize_field(&mut state, "collisionCount", value)?; } if let Some(value) = &self.conditions { serde::ser::SerializeStruct::serialize_field(&mut state, "conditions", value)?; } if let Some(value) = &self.observed_generation { serde::ser::SerializeStruct::serialize_field(&mut state, "observedGeneration", value)?; } if let Some(value) = &self.ready_replicas { serde::ser::SerializeStruct::serialize_field(&mut state, "readyReplicas", value)?; } if let Some(value) = &self.replicas { serde::ser::SerializeStruct::serialize_field(&mut state, "replicas", value)?; } if let Some(value) = &self.unavailable_replicas { serde::ser::SerializeStruct::serialize_field(&mut state, "unavailableReplicas", value)?; } if let Some(value) = &self.updated_replicas { serde::ser::SerializeStruct::serialize_field(&mut state, "updatedReplicas", value)?; } serde::ser::SerializeStruct::end(state) } }
49.424581
283
0.588787
dd18d620f313cd8d4d7cef97a3d08ef9b6ace1f5
1,844
use gnuplot::{ AutoOption, AxesCommon, Figure, PlotOption::{Caption, Color, LineWidth}, }; use naca::{NACAAirfoil, NACA4}; fn main() -> Result<(), Box<dyn std::error::Error>> { let mut ths = Vec::new(); for i in 1..10 { let i = i; let thread = std::thread::spawn(move || { for j in 0..100 { let num = (i - 1) * 100 + j; println!("-- {}", num); let m = i as f32 / 100.0; let p = j as f32 / 100.0; let naca4 = NACA4 { m, p, t: 0.12 }; let ((u, l), m): ((Vec<_>, Vec<_>), Vec<_>) = (0..=10000) .map(|x| x as f32 / 10000.0) .map(|x| naca4.all(x)) .unzip(); let (xu, yu): (Vec<_>, Vec<_>) = u.into_iter().unzip(); let (xl, yl): (Vec<_>, Vec<_>) = l.into_iter().unzip(); let (xm, ym): (Vec<_>, Vec<_>) = m.into_iter().unzip(); let mut figure = Figure::new(); figure .axes2d() .lines(xu, yu, &[Color("red"), LineWidth(2.5)]) .lines(xl, yl, &[Color("red"), LineWidth(2.5)]) .lines(xm, ym, &[Caption("Mid"), Color("purple"), LineWidth(2.5)]) .lines([0, 1], [0, 0], &[Color("black"), LineWidth(1.0)]) .set_aspect_ratio(AutoOption::Fix(0.5)) .set_y_range(AutoOption::Fix(-0.25), AutoOption::Fix(0.25)) .set_x_range(AutoOption::Fix(0.0), AutoOption::Fix(1.0)); figure .save_to_png(format!("NACA{:0>4}.png", num), 1920, 1080) .ok(); } }); ths.push(thread); } for th in ths { th.join().unwrap(); } Ok(()) }
37.632653
86
0.41269
e646a202f23ee68484ae89d8d425a0fa2d235f32
33,671
/* * Copyright 2018 Bitwise IO, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ----------------------------------------------------------------------------- */ //! The `receipts` module contains structs that supply information on the processing //! of `Transaction`s use protobuf::Message; use crate::protos; use crate::protos::{ FromBytes, FromNative, FromProto, IntoBytes, IntoNative, IntoProto, ProtoConversionError, }; use std::error::Error as StdError; use std::fmt; /// A change to be applied to state, in terms of keys and values. /// /// A `StateChange` represents the basic level of changes that can be applied to /// values in state. This covers the setting of a key/value pair, or the /// deletion of a key. #[derive(Eq, Hash, PartialEq)] pub enum StateChange { Set { key: String, value: Vec<u8> }, Delete { key: String }, } impl StateChange { /// Compares StateChanges based on the key, regardless of variant pub fn has_key(&self, k: &str) -> bool { if let StateChange::Set { key, .. } = self { key == k } else if let StateChange::Delete { key } = self { key == k } else { false } } } impl fmt::Debug for StateChange { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { StateChange::Set { key, value } => { f.write_str("StateChange{ ")?; write!(f, "key: {:?}, ", key)?; let value_len = value.len(); write!( f, "value: <{} byte{}>", value_len, if value_len == 1 { "" } else { "s" } )?; f.write_str(" }") } StateChange::Delete { key } => write!(f, "StateChange::Delete{{ key: {:?} }})", key), } } } impl Clone for StateChange { fn clone(&self) -> Self { match self { StateChange::Set { key, value } => StateChange::Set { key: key.clone(), value: value.clone(), }, StateChange::Delete { key } => StateChange::Delete { key: key.clone() }, } } } impl FromProto<protos::transaction_receipt::StateChange> for StateChange { fn from_proto( state_change: protos::transaction_receipt::StateChange, ) -> Result<Self, ProtoConversionError> { match state_change.get_field_type() { protos::transaction_receipt::StateChange_Type::SET => Ok(StateChange::Set { key: state_change.get_address().to_string(), value: state_change.get_value().to_vec(), }), protos::transaction_receipt::StateChange_Type::DELETE => Ok(StateChange::Delete { key: state_change.get_address().to_string(), }), protos::transaction_receipt::StateChange_Type::TYPE_UNSET => { Err(ProtoConversionError::InvalidTypeError( "Cannot convert StateChange with type unset. / StageChange type must be StateChange_Type::SET or StateChange_Type::DELETE." .to_string(), )) } } } } impl FromNative<StateChange> for protos::transaction_receipt::StateChange { fn from_native(state_change: StateChange) -> Result<Self, ProtoConversionError> { let mut proto_state_change = protos::transaction_receipt::StateChange::new(); match state_change { StateChange::Set { key, value } => { proto_state_change.set_value(value); proto_state_change.set_address(key); proto_state_change .set_field_type(protos::transaction_receipt::StateChange_Type::SET); Ok(proto_state_change) } StateChange::Delete { key } => { proto_state_change.set_address(key); proto_state_change .set_field_type(protos::transaction_receipt::StateChange_Type::DELETE); Ok(proto_state_change) } } } } impl FromBytes<StateChange> for StateChange { fn from_bytes(bytes: &[u8]) -> Result<StateChange, ProtoConversionError> { let proto: protos::transaction_receipt::StateChange = protobuf::parse_from_bytes(bytes) .map_err(|_| { ProtoConversionError::SerializationError( "Unable to get StateChange from bytes".to_string(), ) })?; proto.into_native() } } impl IntoBytes for StateChange { fn into_bytes(self) -> Result<Vec<u8>, ProtoConversionError> { let proto = self.into_proto()?; let bytes = proto.write_to_bytes().map_err(|_| { ProtoConversionError::SerializationError( "Unable to get bytes from StateChanger".to_string(), ) })?; Ok(bytes) } } impl IntoProto<protos::transaction_receipt::StateChange> for StateChange {} impl IntoNative<StateChange> for protos::transaction_receipt::StateChange {} /// A `TransactionReceipt` has the state changes associated with a valid transaction. #[derive(Debug, Clone, Eq, Hash, PartialEq)] pub struct TransactionReceipt { /// Updates to state that were generated by the transaction. pub state_changes: Vec<StateChange>, /// Events fired by this transaction. pub events: Vec<Event>, /// Transaction family defined data. pub data: Vec<Vec<u8>>, pub transaction_id: String, } impl FromProto<protos::transaction_receipt::TransactionReceipt> for TransactionReceipt { fn from_proto( transaction_receipt: protos::transaction_receipt::TransactionReceipt, ) -> Result<Self, ProtoConversionError> { Ok(TransactionReceipt { state_changes: transaction_receipt .get_state_changes() .to_vec() .into_iter() .map(StateChange::from_proto) .collect::<Result<Vec<StateChange>, ProtoConversionError>>()?, events: transaction_receipt .get_events() .to_vec() .into_iter() .map(Event::from_proto) .collect::<Result<Vec<Event>, ProtoConversionError>>()?, data: transaction_receipt.get_data().to_vec(), transaction_id: transaction_receipt.get_transaction_id().to_string(), }) } } impl FromNative<TransactionReceipt> for protos::transaction_receipt::TransactionReceipt { fn from_native(transaction_receipt: TransactionReceipt) -> Result<Self, ProtoConversionError> { let mut proto_transaction_receipt = protos::transaction_receipt::TransactionReceipt::new(); proto_transaction_receipt.set_state_changes( protobuf::RepeatedField::from_vec(transaction_receipt .state_changes .into_iter() .map(protos::transaction_receipt::StateChange::from_native) .collect::<Result<Vec<protos::transaction_receipt::StateChange>, ProtoConversionError>>()?, )); proto_transaction_receipt.set_events(protobuf::RepeatedField::from_vec( transaction_receipt .events .into_iter() .map(protos::events::Event::from_native) .collect::<Result<Vec<protos::events::Event>, ProtoConversionError>>()?, )); proto_transaction_receipt .set_data(protobuf::RepeatedField::from_vec(transaction_receipt.data)); proto_transaction_receipt.set_transaction_id(transaction_receipt.transaction_id); Ok(proto_transaction_receipt) } } impl FromBytes<TransactionReceipt> for TransactionReceipt { fn from_bytes(bytes: &[u8]) -> Result<TransactionReceipt, ProtoConversionError> { let proto: protos::transaction_receipt::TransactionReceipt = protobuf::parse_from_bytes(bytes).map_err(|_| { ProtoConversionError::SerializationError( "Unable to get TransactionReceipt from bytes".to_string(), ) })?; proto.into_native() } } impl IntoBytes for TransactionReceipt { fn into_bytes(self) -> Result<Vec<u8>, ProtoConversionError> { let proto = self.into_proto()?; let bytes = proto.write_to_bytes().map_err(|_| { ProtoConversionError::SerializationError( "Unable to get bytes from TransactionReceipt".to_string(), ) })?; Ok(bytes) } } impl IntoProto<protos::transaction_receipt::TransactionReceipt> for TransactionReceipt {} impl IntoNative<TransactionReceipt> for protos::transaction_receipt::TransactionReceipt {} /// An `Event` is metadata about a `Transaction`'s processing. Events are /// transmitted by `transact` but are not verified or saved to state. `Event`s are /// generated by any of the ways that `Transaction`s are processed in `transact`. #[derive(Debug, Clone, Eq, Hash, PartialEq)] pub struct Event { /// A human readable string that identifies this event /// Determined by the client. pub event_type: String, /// Human readable information about the transaction. pub attributes: Vec<(String, String)>, /// Opaque data, about the transaction, that the client can deserialize pub data: Vec<u8>, } impl FromProto<protos::events::Event> for Event { fn from_proto(event: protos::events::Event) -> Result<Self, ProtoConversionError> { Ok(Event { event_type: event.get_event_type().to_string(), attributes: event .get_attributes() .to_vec() .into_iter() .map(|attr| (attr.get_key().to_string(), attr.get_value().to_string())) .collect(), data: event.get_data().to_vec(), }) } } impl FromNative<Event> for protos::events::Event { fn from_native(event: Event) -> Result<Self, ProtoConversionError> { let mut proto_event = protos::events::Event::new(); proto_event.set_event_type(event.event_type); proto_event.set_attributes( event .attributes .iter() .map(|(key, value)| { let mut event_attr = protos::events::Event_Attribute::new(); event_attr.set_key(key.to_string()); event_attr.set_value(value.to_string()); event_attr }) .collect::<protobuf::RepeatedField<protos::events::Event_Attribute>>(), ); proto_event.set_data(event.data); Ok(proto_event) } } impl FromBytes<Event> for Event { fn from_bytes(bytes: &[u8]) -> Result<Event, ProtoConversionError> { let proto: protos::events::Event = protobuf::parse_from_bytes(bytes).map_err(|_| { ProtoConversionError::SerializationError( "Unable to get TransactionReceipt from bytes".to_string(), ) })?; proto.into_native() } } impl IntoBytes for Event { fn into_bytes(self) -> Result<Vec<u8>, ProtoConversionError> { let proto = self.into_proto()?; let bytes = proto.write_to_bytes().map_err(|_| { ProtoConversionError::SerializationError("Unable to get bytes from Event".to_string()) })?; Ok(bytes) } } impl IntoProto<protos::events::Event> for Event {} impl IntoNative<Event> for protos::events::Event {} #[derive(Debug)] pub enum EventBuilderError { MissingField(String), } impl StdError for EventBuilderError { fn description(&self) -> &str { match *self { EventBuilderError::MissingField(ref msg) => msg, } } fn cause(&self) -> Option<&dyn StdError> { match *self { EventBuilderError::MissingField(_) => None, } } } impl std::fmt::Display for EventBuilderError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match *self { EventBuilderError::MissingField(ref s) => write!(f, "MissingField: {}", s), } } } #[derive(Default, Clone)] pub struct EventBuilder { pub event_type: Option<String>, pub attributes: Vec<(String, String)>, pub data: Vec<u8>, } impl EventBuilder { pub fn new() -> Self { EventBuilder::default() } pub fn with_event_type(mut self, event_type: String) -> EventBuilder { self.event_type = Some(event_type); self } pub fn with_attributes(mut self, attributes: Vec<(String, String)>) -> EventBuilder { self.attributes = attributes; self } pub fn with_data(mut self, data: Vec<u8>) -> EventBuilder { self.data = data; self } pub fn build(self) -> Result<Event, EventBuilderError> { let event_type = self.event_type.ok_or_else(|| { EventBuilderError::MissingField("'event_type' field is required".to_string()) })?; Ok(Event { event_type, attributes: self.attributes, data: self.data, }) } } #[derive(Debug)] pub enum TransactionReceiptBuilderError { MissingField(String), } impl StdError for TransactionReceiptBuilderError { fn description(&self) -> &str { match *self { TransactionReceiptBuilderError::MissingField(ref msg) => msg, } } fn cause(&self) -> Option<&dyn StdError> { match *self { TransactionReceiptBuilderError::MissingField(_) => None, } } } impl std::fmt::Display for TransactionReceiptBuilderError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match *self { TransactionReceiptBuilderError::MissingField(ref s) => write!(f, "MissingField: {}", s), } } } #[derive(Default, Clone)] pub struct TransactionReceiptBuilder { pub state_changes: Vec<StateChange>, pub events: Vec<Event>, pub data: Vec<Vec<u8>>, pub transaction_id: Option<String>, } impl TransactionReceiptBuilder { pub fn new() -> Self { TransactionReceiptBuilder::default() } pub fn with_state_changes( mut self, state_changes: Vec<StateChange>, ) -> TransactionReceiptBuilder { self.state_changes = state_changes; self } pub fn with_events(mut self, events: Vec<Event>) -> TransactionReceiptBuilder { self.events = events; self } pub fn with_data(mut self, data: Vec<Vec<u8>>) -> TransactionReceiptBuilder { self.data = data; self } pub fn with_transaction_id(mut self, transaction_id: String) -> TransactionReceiptBuilder { self.transaction_id = Some(transaction_id); self } pub fn build(self) -> Result<TransactionReceipt, TransactionReceiptBuilderError> { let transaction_id = self.transaction_id.ok_or_else(|| { TransactionReceiptBuilderError::MissingField( "'transaction_id' field is required".to_string(), ) })?; Ok(TransactionReceipt { state_changes: self.state_changes, events: self.events, data: self.data, transaction_id, }) } } #[cfg(test)] mod tests { use super::*; #[cfg(feature = "sawtooth-compat")] use sawtooth_sdk; static ADDRESS: &str = "5b7349700e158b598043efd6d7610345a75a00b22ac14c9278db53f586179a92b72fbd"; static BYTES1: [u8; 4] = [0x01, 0x02, 0x03, 0x04]; static BYTES2: [u8; 4] = [0x05, 0x06, 0x07, 0x08]; static BYTES3: [u8; 4] = [0x09, 0x0a, 0x0b, 0x0c]; static EVENT_TYPE1: &str = "sawtooth/block-commit"; static EVENT_TYPE2: &str = "sawtooth/state-delta"; static ATTR1: (&str, &str) = ( "block_id", "f40b90d06b4a9074af2ab09e0187223da7466be75ec0f472 \ f2edd5f22960d76e402e6c07c90b7816374891d698310dd25d9b88dce7dbcba8219d9f7c9cae1861", ); static ATTR2: (&str, &str) = ("block_num", "3"); static ATTR3: (&str, &str) = ( "address", "5b7349700e158b598043efd6d7610345a75a00b22ac14c9278db53f586179a92b72fbd", ); static TRANSACTION_ID: &str = "24b168aaf5ea4a76a6c316924a1c26df0878908682ea5740dd70814e \ 7c400d56354dee788191be8e28393c70398906fb467fac8db6279e90e4e61619589d42bf"; pub fn make_event_1() -> Event { Event { event_type: EVENT_TYPE1.to_string(), attributes: vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ], data: BYTES2.to_vec(), } } pub fn make_event_2() -> Event { Event { event_type: EVENT_TYPE2.to_string(), attributes: vec![(ATTR3.0.to_string(), ATTR3.1.to_string())], data: BYTES3.to_vec(), } } #[test] fn state_change_fields() { let state_change_set = StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }; check_state_change(state_change_set); let state_change_delete = StateChange::Delete { key: ADDRESS.to_string(), }; check_state_change(state_change_delete); } fn check_state_change(state_change: StateChange) { match state_change { StateChange::Set { key, value } => { assert_eq!(ADDRESS, key); assert_eq!(BYTES1.to_vec(), value); } StateChange::Delete { key } => { assert_eq!(ADDRESS, key); } } } #[test] fn event_fields() { check_event(make_event_1()); } fn check_event(event: Event) { assert_eq!(EVENT_TYPE1, event.event_type); assert_eq!( (ATTR1.0.to_string(), ATTR1.1.to_string()), event.attributes[0] ); assert_eq!( (ATTR2.0.to_string(), ATTR2.1.to_string()), event.attributes[1] ); assert_eq!(BYTES2.to_vec(), event.data); } #[test] fn transaction_receipt_fields() { let transaction_receipt = TransactionReceipt { state_changes: vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ], events: vec![make_event_1(), make_event_2()], data: vec![BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()], transaction_id: TRANSACTION_ID.to_string(), }; check_transaction_receipt(transaction_receipt) } #[test] // test that the transaction receipts can be converted into bytes and back correctly fn transaction_receipt_bytes() { let original = TransactionReceipt { state_changes: vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ], events: vec![make_event_1(), make_event_2()], data: vec![BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()], transaction_id: TRANSACTION_ID.to_string(), }; let receipt_bytes = original.clone().into_bytes().unwrap(); let receipt = TransactionReceipt::from_bytes(&receipt_bytes).unwrap(); check_transaction_receipt(receipt.clone()); assert_eq!(original.transaction_id, receipt.transaction_id); } fn check_transaction_receipt(transaction_receipt: TransactionReceipt) { for state_change in transaction_receipt.state_changes { check_state_change(state_change) } assert_eq!( vec!(make_event_1(), make_event_2()), transaction_receipt.events ); assert_eq!( vec!(BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()), transaction_receipt.data ); } #[cfg(feature = "sawtooth-compat")] #[test] fn transaction_receipt_sawtooth10_compatibility() { let mut proto_transaction_receipt = sawtooth_sdk::messages::transaction_receipt::TransactionReceipt::new(); let mut proto_state_change_set = sawtooth_sdk::messages::transaction_receipt::StateChange::new(); proto_state_change_set.set_address(ADDRESS.to_string()); proto_state_change_set .set_field_type(sawtooth_sdk::messages::transaction_receipt::StateChange_Type::SET); proto_state_change_set.set_value(BYTES1.to_vec()); let mut proto_state_change_delete = sawtooth_sdk::messages::transaction_receipt::StateChange::new(); proto_state_change_delete.set_address(ADDRESS.to_string()); proto_state_change_delete .set_field_type(sawtooth_sdk::messages::transaction_receipt::StateChange_Type::DELETE); proto_transaction_receipt.set_state_changes(protobuf::RepeatedField::from_vec(vec![ proto_state_change_set, proto_state_change_delete, ])); let mut proto_event = sawtooth_sdk::messages::events::Event::new(); proto_event.set_event_type(EVENT_TYPE2.to_string()); let mut event_attr = sawtooth_sdk::messages::events::Event_Attribute::new(); event_attr.set_key(ATTR3.0.to_string()); event_attr.set_value(ATTR3.1.to_string()); proto_event.set_attributes(protobuf::RepeatedField::from_vec(vec![event_attr])); proto_event.set_data(BYTES3.to_vec()); proto_transaction_receipt.set_events(protobuf::RepeatedField::from_vec(vec![proto_event])); proto_transaction_receipt .set_data(protobuf::RepeatedField::from_vec(vec![BYTES2.to_vec()])); proto_transaction_receipt.set_transaction_id(TRANSACTION_ID.to_string()); let transaction_receipt_bytes = protobuf::Message::write_to_bytes(&proto_transaction_receipt).unwrap(); let proto: protos::transaction_receipt::TransactionReceipt = protobuf::parse_from_bytes(&transaction_receipt_bytes).unwrap(); let transaction_receipt: TransactionReceipt = proto.into_native().unwrap(); for state_change in transaction_receipt.state_changes { check_state_change(state_change) } assert_eq!(vec!(make_event_2()), transaction_receipt.events); assert_eq!(vec!(BYTES2.to_vec(),), transaction_receipt.data); } #[test] fn transaction_receipt_builder_chain() { let transaction_receipt = TransactionReceiptBuilder::new() .with_state_changes(vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ]) .with_events(vec![make_event_1(), make_event_2()]) .with_data(vec![BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()]) .with_transaction_id(TRANSACTION_ID.to_string()) .build() .unwrap(); check_transaction_receipt(transaction_receipt) } #[test] fn transaction_receipt_builder_separate() { let mut transaction_receipt_builder = TransactionReceiptBuilder::new(); transaction_receipt_builder = transaction_receipt_builder.with_state_changes(vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ]); transaction_receipt_builder = transaction_receipt_builder.with_events(vec![make_event_1(), make_event_2()]); transaction_receipt_builder = transaction_receipt_builder.with_data(vec![ BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec(), ]); transaction_receipt_builder = transaction_receipt_builder.with_transaction_id(TRANSACTION_ID.to_string()); let transaction_receipt = transaction_receipt_builder.build().unwrap(); check_transaction_receipt(transaction_receipt) } #[test] fn event_builder_chain() { let event = EventBuilder::new() .with_event_type(EVENT_TYPE1.to_string()) .with_attributes(vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ]) .with_data(BYTES2.to_vec()) .build() .unwrap(); check_event(event); } #[test] // test that the transaction receipts can be converted into bytes and back correctly fn event_builder_chain_bytes() { let original = EventBuilder::new() .with_event_type(EVENT_TYPE1.to_string()) .with_attributes(vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ]) .with_data(BYTES2.to_vec()) .build() .unwrap(); let event_bytes = original.clone().into_bytes().unwrap(); let event = Event::from_bytes(&event_bytes).unwrap(); check_event(event); } #[test] fn event_builder_separate() { let mut event_builder = EventBuilder::new(); event_builder = event_builder.with_event_type(EVENT_TYPE1.to_string()); event_builder = event_builder.with_attributes(vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ]); event_builder = event_builder.with_data(BYTES2.to_vec()); let event = event_builder.build().unwrap(); check_event(event); } } #[cfg(all(feature = "nightly", test))] mod benchmarks { extern crate test; use super::tests::{make_event_1, make_event_2}; use super::*; use test::Bencher; static ADDRESS: &str = "5b7349700e158b598043efd6d7610345a75a00b22ac14c9278db53f586179a92b72fbd"; static BYTES1: [u8; 4] = [0x01, 0x02, 0x03, 0x04]; static BYTES2: [u8; 4] = [0x05, 0x06, 0x07, 0x08]; static BYTES3: [u8; 4] = [0x09, 0x0a, 0x0b, 0x0c]; static EVENT_TYPE1: &str = "sawtooth/block-commit"; static ATTR1: (&str, &str) = ( "block_id", "f40b90d06b4a9074af2ab09e0187223da7466be75ec0f472 \ f2edd5f22960d76e402e6c07c90b7816374891d698310dd25d9b88dce7dbcba8219d9f7c9cae1861", ); static ATTR2: (&str, &str) = ("block_num", "3"); static ATTR3: (&str, &str) = ( "address", "5b7349700e158b598043efd6d7610345a75a00b22ac14c9278db53f586179a92b72fbd", ); static TRANSACTION_ID: &str = "24b168aaf5ea4a76a6c316924a1c26df0878908682ea5740dd70814e \ 7c400d56354dee788191be8e28393c70398906fb467fac8db6279e90e4e61619589d42bf"; #[bench] fn bench_txn_receipt_creation(b: &mut Bencher) { b.iter(|| TransactionReceipt { state_changes: vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ], events: vec![make_event_1(), make_event_2()], data: vec![BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()], transaction_id: TRANSACTION_ID.to_string(), }); } #[bench] fn bench_txn_receipt_builder(b: &mut Bencher) { let transaction_receipt = TransactionReceiptBuilder::new() .with_state_changes(vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ]) .with_events(vec![make_event_1(), make_event_2()]) .with_data(vec![BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()]) .with_transaction_id(TRANSACTION_ID.to_string()); b.iter(|| transaction_receipt.clone().build()); } #[bench] fn bench_txn_receipt_into_native(b: &mut Bencher) { let mut proto_transaction_receipt = protos::transaction_receipt::TransactionReceipt::new(); let mut proto_state_change = protos::transaction_receipt::StateChange::new(); proto_state_change.set_address(ADDRESS.to_string()); proto_state_change.set_value(BYTES1.to_vec()); proto_state_change.set_field_type(protos::transaction_receipt::StateChange_Type::SET); proto_transaction_receipt .set_state_changes(protobuf::RepeatedField::from_vec(vec![proto_state_change])); let mut proto_event = protos::events::Event::new(); proto_event.set_event_type(EVENT_TYPE1.to_string()); let mut proto_event_attribute = protos::events::Event_Attribute::new(); proto_event_attribute.set_key(ATTR3.0.to_string()); proto_event_attribute.set_value(ATTR3.1.to_string()); proto_event.set_attributes(protobuf::RepeatedField::from_vec(vec![ proto_event_attribute, ])); proto_event.set_data(BYTES3.to_vec()); proto_transaction_receipt.set_events(protobuf::RepeatedField::from_vec(vec![proto_event])); proto_transaction_receipt .set_data(protobuf::RepeatedField::from_vec(vec![BYTES2.to_vec()])); proto_transaction_receipt.set_transaction_id(TRANSACTION_ID.to_string()); b.iter(|| proto_transaction_receipt.clone().into_native()); } #[bench] fn bench_txn_receipt_into_proto(b: &mut Bencher) { let transaction_receipt = TransactionReceipt { state_changes: vec![ StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }, StateChange::Delete { key: ADDRESS.to_string(), }, ], events: vec![make_event_1(), make_event_2()], data: vec![BYTES1.to_vec(), BYTES2.to_vec(), BYTES3.to_vec()], transaction_id: TRANSACTION_ID.to_string(), }; b.iter(|| transaction_receipt.clone().into_proto()); } #[bench] fn bench_state_change_into_native(b: &mut Bencher) { let mut proto_state_change = protos::transaction_receipt::StateChange::new(); proto_state_change.set_address(ADDRESS.to_string()); proto_state_change.set_value(BYTES1.to_vec()); proto_state_change.set_field_type(protos::transaction_receipt::StateChange_Type::SET); b.iter(|| proto_state_change.clone().into_native()); } #[bench] fn bench_state_change_into_proto(b: &mut Bencher) { let state_change_set = StateChange::Set { key: ADDRESS.to_string(), value: BYTES1.to_vec(), }; b.iter(|| state_change_set.clone().into_proto()); } #[bench] fn bench_event_creation(b: &mut Bencher) { b.iter(|| Event { event_type: EVENT_TYPE1.to_string(), attributes: vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ], data: BYTES2.to_vec(), }); } #[bench] fn bench_event_builder(b: &mut Bencher) { let event = EventBuilder::new() .with_event_type(EVENT_TYPE1.to_string()) .with_attributes(vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ]) .with_data(BYTES2.to_vec()); b.iter(|| event.clone().build()); } #[bench] fn bench_event_into_proto(b: &mut Bencher) { let event = Event { event_type: EVENT_TYPE1.to_string(), attributes: vec![ (ATTR1.0.to_string(), ATTR1.1.to_string()), (ATTR2.0.to_string(), ATTR2.1.to_string()), ], data: BYTES2.to_vec(), }; b.iter(|| event.clone().into_proto()); } #[bench] fn bench_event_into_native(b: &mut Bencher) { let mut proto_event = protos::events::Event::new(); proto_event.set_event_type(EVENT_TYPE1.to_string()); let mut proto_event_attribute = protos::events::Event_Attribute::new(); proto_event_attribute.set_key(ATTR3.0.to_string()); proto_event_attribute.set_value(ATTR3.1.to_string()); proto_event.set_attributes(protobuf::RepeatedField::from_vec(vec![ proto_event_attribute, ])); proto_event.set_data(BYTES3.to_vec()); b.iter(|| proto_event.clone().into_native()); } }
36.08896
107
0.602804
7596a7103fd6554c84a5c5cbfe52a106708cb27d
984
//! # Thread Local Storage Models //! //! A variable may be defined as thread_local, which means that it will //! not be shared by threads (each thread will have a separated copy //! of the variable). Not all targets support thread-local variables. //! Optionally, a TLS model may be specified. //! //! If no explicit model is given, the “general dynamic” model is used. //! //! https://llvm.org/docs/LangRef.html#thread-local-storage-models #[derive(Debug, Eq, PartialEq, Clone)] pub enum ThreadLocalStorage { LocalDynamic, InitialExec, LocalExec, } impl std::fmt::Display for ThreadLocalStorage { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let s = match self { ThreadLocalStorage::LocalDynamic => "thread_local(localdynamic)", ThreadLocalStorage::InitialExec => "thread_local(initialexec)", ThreadLocalStorage::LocalExec => "thread_local(localexec)", }; write!(f, "{}", s) } }
32.8
77
0.666667
481b3c7aa7f4822b139538e6498542fca1268f71
16,816
use { super::*, crate::{ app::{AppContext, LineNumber}, command::{ScrollCommand, move_sel}, display::{Screen, W}, errors::*, pattern::{InputPattern, NameMatch}, skin::PanelSkin, task_sync::Dam, }, crossterm::{ cursor, style::{Color, Print, SetBackgroundColor, SetForegroundColor}, QueueableCommand, }, memmap::Mmap, once_cell::sync::Lazy, std::{ fs::File, io::{BufRead, BufReader}, path::{Path, PathBuf}, str, }, syntect::highlighting::Style, termimad::{Area, CropWriter, SPACE_FILLING}, }; /// Homogeneously colored piece of a line #[derive(Debug)] pub struct Region { pub fg: Color, pub string: String, } /// when the file is bigger, we don't style it and we don't keep /// it in memory: we just keep the offsets of the lines in the /// file. const MAX_SIZE_FOR_STYLING: u64 = 2_000_000; impl Region { pub fn from_syntect(region: &(Style, &str)) -> Self { let fg = Color::Rgb { r: region.0.foreground.r, g: region.0.foreground.g, b: region.0.foreground.b, }; let string = region.1.to_string(); Self { fg, string } } } #[derive(Debug)] pub struct Line { pub number: LineNumber, // starting at 1 pub start: usize, // offset in the file, in bytes pub len: usize, // len in bytes pub regions: Vec<Region>, // not always computed pub name_match: Option<NameMatch>, } pub struct SyntacticView { pub path: PathBuf, pub pattern: InputPattern, lines: Vec<Line>, scroll: usize, page_height: usize, selection_idx: Option<usize>, // index in lines of the selection, if any total_lines_count: usize, // including lines not filtered out } impl SyntacticView { /// Return a prepared text view with syntax coloring if possible. /// May return Ok(None) only when a pattern is given and there /// was an event before the end of filtering. pub fn new( path: &Path, pattern: InputPattern, dam: &mut Dam, con: &AppContext, no_style: bool, ) -> Result<Option<Self>, ProgramError> { let mut sv = Self { path: path.to_path_buf(), pattern, lines: Vec::new(), scroll: 0, page_height: 0, selection_idx: None, total_lines_count: 0, }; if sv.read_lines(dam, con, no_style)? { sv.select_first(); Ok(Some(sv)) } else { Ok(None) } } /// Return true when there was no interruption fn read_lines( &mut self, dam: &mut Dam, con: &AppContext, no_style: bool, ) -> Result<bool, ProgramError> { let f = File::open(&self.path)?; { // if we detect the file isn't mappable, we'll // let the ZeroLenFilePreview try to read it let mmap = unsafe { Mmap::map(&f) }; if mmap.is_err() { return Err(ProgramError::UnmappableFile); } } let md = f.metadata()?; if md.len() == 0 { return Err(ProgramError::ZeroLenFile); } let with_style = !no_style && md.len() < MAX_SIZE_FOR_STYLING; let mut reader = BufReader::new(f); self.lines.clear(); let mut line = String::new(); self.total_lines_count = 0; let mut offset = 0; let mut number = 0; static SYNTAXER: Lazy<Syntaxer> = Lazy::new(Syntaxer::default); let mut highlighter = if with_style { SYNTAXER.highlighter_for(&self.path, con) } else { None }; let pattern = &self.pattern.pattern; while reader.read_line(&mut line)? > 0 { number += 1; self.total_lines_count += 1; let start = offset; offset += line.len(); for c in line.chars() { if !is_char_printable(c) { debug!("unprintable char: {:?}", c); return Err(ProgramError::UnprintableFile); } } // We don't remove '\n' or '\r' at this point because some syntax sets // need them for correct detection of comments. See #477 // Those chars are removed on printing if pattern.is_empty() || pattern.score_of_string(&line).is_some() { let name_match = pattern.search_string(&line); let regions = if let Some(highlighter) = highlighter.as_mut() { highlighter .highlight(&line, &SYNTAXER.syntax_set) .map_err(|e| ProgramError::SyntectCrashed { details: e.to_string() })? .iter() .map(Region::from_syntect) .collect() } else { Vec::new() }; self.lines.push(Line { regions, start, len: line.len(), name_match, number, }); } line.clear(); if dam.has_event() { info!("event interrupted preview filtering"); return Ok(false); } } Ok(true) } /// Give the count of lines which can be seen when scrolling, /// total count including filtered ones pub fn line_counts(&self) -> (usize, usize) { (self.lines.len(), self.total_lines_count) } fn ensure_selection_is_visible(&mut self) { if self.page_height >= self.lines.len() { self.scroll = 0; } else if let Some(idx) = self.selection_idx { let padding = self.padding(); if idx < self.scroll + padding || idx + padding > self.scroll + self.page_height { if idx <= padding { self.scroll = 0; } else if idx + padding > self.lines.len() { self.scroll = self.lines.len() - self.page_height; } else if idx < self.scroll + self.page_height / 2 { self.scroll = idx - padding; } else { self.scroll = idx + padding - self.page_height; } } } } fn padding(&self) -> usize { (self.page_height / 4).min(4) } pub fn get_selected_line(&self) -> Option<String> { self.selection_idx .and_then(|idx| self.lines.get(idx)) .and_then(|line| { File::open(&self.path) .and_then(|file| unsafe { Mmap::map(&file) }) .ok() .filter(|mmap| mmap.len() >= line.start + line.len) .and_then(|mmap| { String::from_utf8( (&mmap[line.start..line.start + line.len]).to_vec(), ).ok() }) }) } pub fn get_selected_line_number(&self) -> Option<LineNumber> { self.selection_idx .map(|idx| self.lines[idx].number) } pub fn unselect(&mut self) { self.selection_idx = None; } pub fn try_select_y(&mut self, y: u16) -> bool { let idx = y as usize + self.scroll; if idx < self.lines.len() { self.selection_idx = Some(idx); true } else { false } } pub fn select_first(&mut self) { if !self.lines.is_empty() { self.selection_idx = Some(0); self.scroll = 0; } } pub fn select_last(&mut self) { self.selection_idx = Some(self.lines.len() - 1); if self.page_height < self.lines.len() { self.scroll = self.lines.len() - self.page_height; } } pub fn try_select_line_number(&mut self, number: LineNumber) -> bool { // this could obviously be optimized for (idx, line) in self.lines.iter().enumerate() { if line.number == number { self.selection_idx = Some(idx); self.ensure_selection_is_visible(); return true; } } false } pub fn move_selection(&mut self, dy: i32, cycle: bool) { if let Some(idx) = self.selection_idx { self.selection_idx = Some(move_sel(idx, self.lines.len(), dy, cycle)); } else if !self.lines.is_empty() { self.selection_idx = Some(0) } self.ensure_selection_is_visible(); } pub fn try_scroll( &mut self, cmd: ScrollCommand, ) -> bool { let old_scroll = self.scroll; self.scroll = cmd.apply(self.scroll, self.lines.len(), self.page_height); if let Some(idx) = self.selection_idx { if self.scroll == old_scroll { let old_selection = self.selection_idx; if cmd.is_up() { self.selection_idx = Some(0); } else { self.selection_idx = Some(self.lines.len() - 1); } return self.selection_idx == old_selection; } else if idx >= old_scroll && idx < old_scroll + self.page_height { if idx + self.scroll < old_scroll { self.selection_idx = Some(0); } else if idx + self.scroll - old_scroll >= self.lines.len() { self.selection_idx = Some(self.lines.len() - 1); } else { self.selection_idx = Some(idx + self.scroll - old_scroll); } } } self.scroll != old_scroll } pub fn display( &mut self, w: &mut W, _screen: Screen, panel_skin: &PanelSkin, area: &Area, con: &AppContext, ) -> Result<(), ProgramError> { if area.height as usize != self.page_height { self.page_height = area.height as usize; self.ensure_selection_is_visible(); } let max_number_len = self.lines.last().map_or(0, |l|l.number).to_string().len(); let show_line_number = area.width > 55 || ( self.pattern.is_some() && area.width > 8 ); let line_count = area.height as usize; let styles = &panel_skin.styles; let normal_fg = styles.preview.get_fg() .or_else(|| styles.default.get_fg()) .unwrap_or(Color::AnsiValue(252)); let normal_bg = styles.preview.get_bg() .or_else(|| styles.default.get_bg()) .unwrap_or(Color::AnsiValue(238)); let selection_bg = styles.selected_line.get_bg() .unwrap_or(Color::AnsiValue(240)); let match_bg = styles.preview_match.get_bg().unwrap_or(Color::AnsiValue(28)); let code_width = area.width as usize - 1; // 1 char left for scrollbar let scrollbar = area.scrollbar(self.scroll, self.lines.len()); let scrollbar_fg = styles.scrollbar_thumb.get_fg() .or_else(|| styles.preview.get_fg()) .unwrap_or(Color::White); for y in 0..line_count { w.queue(cursor::MoveTo(area.left, y as u16 + area.top))?; let mut cw = CropWriter::new(w, code_width); let line_idx = self.scroll as usize + y; let selected = self.selection_idx == Some(line_idx); let bg = if selected { selection_bg } else { normal_bg }; let mut op_mmap: Option<Mmap> = None; if let Some(line) = self.lines.get(line_idx) { let mut regions = &line.regions; let regions_ur; if regions.is_empty() && line.len > 0 { if op_mmap.is_none() { let file = File::open(&self.path)?; let mmap = unsafe { Mmap::map(&file)? }; op_mmap = Some(mmap); } if op_mmap.as_ref().unwrap().len() < line.start + line.len { warn!("file truncated since parsing"); } else { // an UTF8 error can only happen if file modified during display let string = String::from_utf8( // we copy the memmap slice, as it's not immutable (&op_mmap.unwrap()[line.start..line.start + line.len]).to_vec(), ) .unwrap_or_else(|_| "Bad UTF8".to_string()); regions_ur = vec![Region { fg: normal_fg, string, }]; regions = &regions_ur; } } cw.w.queue(SetBackgroundColor(bg))?; if show_line_number { cw.queue_g_string( &styles.preview_line_number, format!(" {:w$} ", line.number, w = max_number_len), )?; } else { cw.queue_unstyled_str(" ")?; } cw.w.queue(SetBackgroundColor(bg))?; if con.show_selection_mark { cw.queue_unstyled_char(if selected { '▶' } else { ' ' })?; } if let Some(nm) = &line.name_match { let mut dec = 0; let pos = &nm.pos; let mut pos_idx: usize = 0; for content in regions { let s = content.string.trim_end_matches(is_char_end_of_line); cw.w.queue(SetForegroundColor(content.fg))?; if pos_idx < pos.len() { for (cand_idx, cand_char) in s.chars().enumerate() { if pos_idx < pos.len() && pos[pos_idx] == cand_idx + dec { cw.w.queue(SetBackgroundColor(match_bg))?; cw.queue_unstyled_char(cand_char)?; cw.w.queue(SetBackgroundColor(bg))?; pos_idx += 1; } else { cw.queue_unstyled_char(cand_char)?; } } dec += s.chars().count(); } else { cw.queue_unstyled_str(s)?; } } } else { for content in regions { cw.w.queue(SetForegroundColor(content.fg))?; cw.queue_unstyled_str(content.string.trim_end_matches(is_char_end_of_line))?; } } } cw.fill( if selected { &styles.selected_line } else { &styles.preview }, &SPACE_FILLING, )?; w.queue(SetBackgroundColor(bg))?; if is_thumb(y + area.top as usize, scrollbar) { w.queue(SetForegroundColor(scrollbar_fg))?; w.queue(Print('▐'))?; } else { w.queue(Print(' '))?; } } Ok(()) } pub fn display_info( &mut self, w: &mut W, _screen: Screen, panel_skin: &PanelSkin, area: &Area, ) -> Result<(), ProgramError> { let width = area.width as usize; let mut s = if self.pattern.is_some() { format!("{}/{}", self.lines.len(), self.total_lines_count) } else { format!("{}", self.total_lines_count) }; if s.len() > width { return Ok(()); } if s.len() + "lines: ".len() < width { s = format!("lines: {}", s); } w.queue(cursor::MoveTo( area.left + area.width - s.len() as u16, area.top, ))?; panel_skin.styles.default.queue(w, s)?; Ok(()) } } fn is_thumb(y: usize, scrollbar: Option<(u16, u16)>) -> bool { scrollbar.map_or(false, |(sctop, scbottom)| { let y = y as u16; sctop <= y && y <= scbottom }) } /// Tell whether the character is normal enough to be displayed by the /// syntactic view (if not we'll use a hex view) fn is_char_printable(c: char) -> bool { // the tab is printable because it's replaced by spaces c == '\t' || c == '\n' || c == '\r' || !c.is_control() } fn is_char_end_of_line(c: char) -> bool { c == '\n' || c == '\r' }
35.931624
101
0.485728
f99420ab92e81a019f3a0133013c8bf2b3b15333
748
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "TestTanh.rs" #pragma rs_fp_relaxed // Don't edit this file! It is auto-generated by frameworks/rs/api/gen_runtime.
35.619048
80
0.740642
0965c22edb55cf5e16a2e76a3ca063f1acc56724
5,343
#![allow(clippy::unwrap_used)] use crate::tls_stub::cert_chain::{CspCertificateChain, CspCertificateChainCreationError}; use ic_crypto_test_utils::tls::x509_certificates::CertWithPrivateKey; use openssl::stack::Stack; use openssl::x509::{X509Ref, X509}; use rand::thread_rng; use rand::Rng; use std::convert::TryFrom; #[test] fn should_fail_to_create_empty_chain() { let err = CspCertificateChain::new(vec![]).unwrap_err(); assert_eq!(err, CspCertificateChainCreationError::ChainEmpty); } #[test] fn should_return_correct_root_and_leaf_for_chain_with_single_entry() { let cert = x509_cert(); let certs = vec![cert.clone()]; let cert_chain = CspCertificateChain::new(certs).unwrap(); assert_eq_certs(cert_chain.root(), &cert); assert_eq_certs(cert_chain.leaf(), &cert); assert_eq!(cert_chain.chain().len(), 1); } #[test] fn should_return_correct_root_and_leaf_for_chain_with_two_entries() { let (root, leaf, _) = three_x509_certs(); let certs = vec![root.clone(), leaf.clone()]; let cert_chain = CspCertificateChain::new(certs).unwrap(); assert_eq_certs(cert_chain.root(), &root); assert_eq_certs(cert_chain.leaf(), &leaf); assert_eq!(cert_chain.chain().len(), 2); } #[test] fn should_return_correct_root_and_leaf_for_chain_with_three_entries() { let (root, intermediate, leaf) = three_x509_certs(); let certs = vec![root.clone(), intermediate.clone(), leaf.clone()]; let cert_chain = CspCertificateChain::new(certs).unwrap(); assert_eq_certs(cert_chain.root(), &root); assert_eq_certs(cert_chain.leaf(), &leaf); assert_eq_certs(cert_chain.chain().get(1).unwrap(), &intermediate); assert_eq!(cert_chain.chain().len(), 3); } #[test] fn should_correctly_convert_from_x509_stackref_with_single_entry() { let cert = x509_cert(); let mut x509_stack = Stack::<X509>::new().unwrap(); assert!(x509_stack.push(cert.clone()).is_ok()); let cert_chain = CspCertificateChain::try_from(x509_stack.as_ref()).unwrap(); assert_eq_certs(cert_chain.root(), &cert); assert_eq_certs(cert_chain.leaf(), &cert); assert_eq!(x509_stack.len(), cert_chain.chain().len()); } #[test] fn should_correctly_convert_from_x509_stackref_with_two_entries() { let (root, leaf, _) = three_x509_certs(); let x509_stack = { let mut x509_stack = Stack::<X509>::new().unwrap(); assert!(x509_stack.push(leaf.clone()).is_ok()); assert!(x509_stack.push(root.clone()).is_ok()); let mut x509_stack_iter = x509_stack.iter(); assert_eq_cert_refs(x509_stack_iter.next().unwrap(), &leaf); assert_eq_cert_refs(x509_stack_iter.next().unwrap(), &root); assert!(x509_stack_iter.next().is_none()); x509_stack }; let cert_chain = CspCertificateChain::try_from(x509_stack.as_ref()).unwrap(); assert_eq_certs(cert_chain.root(), &root); assert_eq_certs(cert_chain.leaf(), &leaf); assert_eq!(x509_stack.len(), cert_chain.chain().len()); } #[test] fn should_correctly_convert_from_x509_stackref_with_three_entries() { let (root, intermediate, leaf) = three_x509_certs(); let x509_stack = { let mut x509_stack = Stack::<X509>::new().unwrap(); assert!(x509_stack.push(leaf.clone()).is_ok()); assert!(x509_stack.push(intermediate.clone()).is_ok()); assert!(x509_stack.push(root.clone()).is_ok()); let mut x509_stack_iter = x509_stack.iter(); assert_eq_cert_refs(x509_stack_iter.next().unwrap(), &leaf); assert_eq_cert_refs(x509_stack_iter.next().unwrap(), &intermediate); assert_eq_cert_refs(x509_stack_iter.next().unwrap(), &root); assert!(x509_stack_iter.next().is_none()); x509_stack }; let cert_chain = CspCertificateChain::try_from(x509_stack.as_ref()).unwrap(); assert_eq_certs(cert_chain.root(), &root); assert_eq_certs(cert_chain.leaf(), &leaf); assert_eq_certs(cert_chain.chain().get(1).unwrap(), &intermediate); assert_eq!(x509_stack.len(), cert_chain.chain().len()); } #[test] fn should_fail_to_convert_from_x509_stackref_if_empty() { let x509_stack = Stack::<X509>::new().unwrap(); let err = CspCertificateChain::try_from(x509_stack.as_ref()).unwrap_err(); assert_eq!(err, CspCertificateChainCreationError::ChainEmpty); } fn x509_cert() -> X509 { CertWithPrivateKey::builder() .cn(format!("{}", thread_rng().gen::<u64>())) .build_ed25519() .x509() } fn three_x509_certs() -> (X509, X509, X509) { let c1 = x509_cert(); let c2 = x509_cert(); let c3 = x509_cert(); assert_ne_certs(&c1, &c2); assert_ne_certs(&c1, &c3); assert_ne_certs(&c2, &c3); (c1, c2, c3) } fn assert_eq_certs(c1: &X509, c2: &X509) { let c1_der = c1.to_der().expect("failed to encode as DER"); let c2_der = c2.to_der().expect("failed to encode as DER"); assert_eq!(c1_der, c2_der); } fn assert_eq_cert_refs(c1: &X509Ref, c2: &X509Ref) { let c1_der = c1.to_der().expect("failed to encode as DER"); let c2_der = c2.to_der().expect("failed to encode as DER"); assert_eq!(c1_der, c2_der); } fn assert_ne_certs(c1: &X509, c2: &X509) { let c1_der = c1.to_der().expect("failed to encode as DER"); let c2_der = c2.to_der().expect("failed to encode as DER"); assert_ne!(c1_der, c2_der); }
34.031847
89
0.683698
4a01ada3f11bfca91a990c89f30614f0399b4315
2,547
//! When MSRV is 1.48, replace with `core::future::Ready` and `core::future::ready()`. use core::{ future::Future, pin::Pin, task::{Context, Poll}, }; /// Future for the [`ready`](ready()) function. /// /// Panic will occur if polled more than once. /// /// # Examples /// ``` /// use actix_utils::future::ready; /// /// // async /// # async fn run() { /// let a = ready(1); /// assert_eq!(a.await, 1); /// # } /// /// // sync /// let a = ready(1); /// assert_eq!(a.into_inner(), 1); /// ``` #[derive(Debug, Clone)] #[must_use = "futures do nothing unless you `.await` or poll them"] pub struct Ready<T> { val: Option<T>, } impl<T> Ready<T> { /// Unwraps the value from this immediately ready future. #[inline] pub fn into_inner(mut self) -> T { self.val.take().unwrap() } } impl<T> Unpin for Ready<T> {} impl<T> Future for Ready<T> { type Output = T; #[inline] fn poll(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<T> { let val = self.val.take().expect("Ready polled after completion"); Poll::Ready(val) } } /// Creates a future that is immediately ready with a value. /// /// # Examples /// ```no_run /// use actix_utils::future::ready; /// /// # async fn run() { /// let a = ready(1); /// assert_eq!(a.await, 1); /// # } /// /// // sync /// let a = ready(1); /// assert_eq!(a.into_inner(), 1); /// ``` pub fn ready<T>(val: T) -> Ready<T> { Ready { val: Some(val) } } /// Creates a future that is immediately ready with a success value. /// /// # Examples /// ```no_run /// use actix_utils::future::ok; /// /// # async fn run() { /// let a = ok::<_, ()>(1); /// assert_eq!(a.await, Ok(1)); /// # } /// ``` pub fn ok<T, E>(val: T) -> Ready<Result<T, E>> { Ready { val: Some(Ok(val)) } } /// Creates a future that is immediately ready with an error value. /// /// # Examples /// ```no_run /// use actix_utils::future::err; /// /// # async fn run() { /// let a = err::<(), _>(1); /// assert_eq!(a.await, Err(1)); /// # } /// ``` pub fn err<T, E>(err: E) -> Ready<Result<T, E>> { Ready { val: Some(Err(err)), } } #[cfg(test)] mod tests { use futures_util::task::noop_waker; use super::*; #[test] #[should_panic] fn multiple_poll_panics() { let waker = noop_waker(); let mut cx = Context::from_waker(&waker); let mut ready = ready(1); assert_eq!(Pin::new(&mut ready).poll(&mut cx), Poll::Ready(1)); // panic! let _ = Pin::new(&mut ready).poll(&mut cx); } }
20.707317
86
0.542207
d6c907dac5c3f38c896679570719c2ffc41930e4
1,880
use ramhorns::{Template, Content}; pub fn big_table(b: &mut criterion::Bencher<'_>, size: &usize) { let mut table = Vec::with_capacity(*size); for _ in 0..*size { let mut inner = Vec::with_capacity(*size); for i in 0..*size { inner.push(i); } table.push(inner); } let tpl = Template::new(SOURCE).unwrap(); let ctx = BigTable { table }; b.iter(|| { tpl.render(&ctx) }); } #[derive(Content)] struct BigTable { table: Vec<Vec<usize>>, } static SOURCE: &'static str = "<html> {{#table}} <tr>{{#.}}<td>{{.}}</td>{{/.}}</tr> {{/table}} </html>"; pub fn teams(b: &mut criterion::Bencher<'_>) { let tpl = Template::new(TEAMS_TEMPLATE).unwrap(); let teams = Teams { year: 2015, teams: vec![ Team { name: "Jiangsu".into(), class: "champion".into(), score: 43, }, Team { name: "Beijing".into(), class: String::new(), score: 27, }, Team { name: "Guangzhou".into(), class: String::new(), score: 22, }, Team { name: "Shandong".into(), class: String::new(), score: 12, }, ], }; b.iter(|| { tpl.render(&teams) }); } #[derive(Content)] struct Teams { year: u16, teams: Vec<Team>, } #[derive(Content)] struct Team { name: String, class: String, score: u8, } static TEAMS_TEMPLATE: &'static str = "<html> <head> <title>{{year}}</title> </head> <body> <h1>CSL {{year}}</h1> <ul> {{#teams}} <li class=\"{{class}}\"> <b>{{name}}</b>: {{score}} </li> {{/teams}} </ul> </body> </html>";
20.888889
64
0.439362
fbe02f34b770e495625e8d4ebc8be0bfc75c33f3
265
use std::rc::Rc; #[derive(Debug, Clone)] pub struct Description(pub Rc<String>); pub struct ContentLog<'a> { provider: &'a dyn ContentProvider, } pub trait ContentProvider { fn description(&self) -> Option<Description>; fn log(&self) -> ContentLog; }
20.384615
49
0.679245
ed138210e4748d16e7632bf4565d4ac6863b56e0
1,464
use std::path::Path; use super::{CliError, LalResult, Lockfile, Manifest}; use crate::storage::CachedBackend; /// Saves current build `./OUTPUT` to the local cache under a specific name /// /// This tars up `/OUTPUT` similar to how `build` is generating a tarball, /// then copies this to `~/.lal/cache/stash/${name}/`. /// /// This file can then be installed via `update` using a component=${name} argument. pub fn stash(component_dir: &Path, backend: &dyn CachedBackend, mf: &Manifest, name: &str) -> LalResult<()> { info!("Stashing OUTPUT into cache under {}/{}", mf.name, name); // sanity: verify name does NOT parse as a u32 if let Ok(n) = name.parse::<u32>() { return Err(CliError::InvalidStashName(n)); } let outputdir = component_dir.join("./OUTPUT"); if !outputdir.is_dir() { return Err(CliError::MissingBuild); } // convenience edit for lal status here: // we edit the lockfile's version key to be "${stashname}" // rather than the ugly colony default of "EXPERIMENTAL-${hex}" // stashed builds are only used locally so this allows easier inspection // full version list is available in `lal ls -f` let lf_path = component_dir.join("OUTPUT").join("lockfile.json"); let mut lf = Lockfile::from_path(&lf_path, &mf.name)?; lf.version = name.to_string(); lf.write(&lf_path)?; // main operation: backend.stash_output(&component_dir, &mf.name, name)?; Ok(()) }
36.6
109
0.659836
678d837a7efcb52098f367100d056c70b4f3159e
12,806
//! Navigate an endless amount of content with a scrollbar. use crate::{ column, input::{mouse, ButtonState}, layout, Align, Column, Element, Event, Hasher, Layout, Length, Point, Rectangle, Size, Widget, }; use std::{f32, hash::Hash, u32}; /// A widget that can vertically display an infinite amount of content with a /// scrollbar. #[allow(missing_debug_implementations)] pub struct Scrollable<'a, Message, Renderer> { state: &'a mut State, height: Length, max_height: u32, content: Column<'a, Message, Renderer>, } impl<'a, Message, Renderer> Scrollable<'a, Message, Renderer> { /// Creates a new [`Scrollable`] with the given [`State`]. /// /// [`Scrollable`]: struct.Scrollable.html /// [`State`]: struct.State.html pub fn new(state: &'a mut State) -> Self { Scrollable { state, height: Length::Shrink, max_height: u32::MAX, content: Column::new(), } } /// Sets the vertical spacing _between_ elements. /// /// Custom margins per element do not exist in Iced. You should use this /// method instead! While less flexible, it helps you keep spacing between /// elements consistent. pub fn spacing(mut self, units: u16) -> Self { self.content = self.content.spacing(units); self } /// Sets the padding of the [`Scrollable`]. /// /// [`Scrollable`]: struct.Scrollable.html pub fn padding(mut self, units: u16) -> Self { self.content = self.content.padding(units); self } /// Sets the width of the [`Scrollable`]. /// /// [`Scrollable`]: struct.Scrollable.html pub fn width(mut self, width: Length) -> Self { self.content = self.content.width(width); self } /// Sets the height of the [`Scrollable`]. /// /// [`Scrollable`]: struct.Scrollable.html pub fn height(mut self, height: Length) -> Self { self.height = height; self } /// Sets the maximum width of the [`Scrollable`]. /// /// [`Scrollable`]: struct.Scrollable.html pub fn max_width(mut self, max_width: u32) -> Self { self.content = self.content.max_width(max_width); self } /// Sets the maximum height of the [`Scrollable`] in pixels. /// /// [`Scrollable`]: struct.Scrollable.html pub fn max_height(mut self, max_height: u32) -> Self { self.max_height = max_height; self } /// Sets the horizontal alignment of the contents of the [`Scrollable`] . /// /// [`Scrollable`]: struct.Scrollable.html pub fn align_items(mut self, align_items: Align) -> Self { self.content = self.content.align_items(align_items); self } /// Adds an element to the [`Scrollable`]. /// /// [`Scrollable`]: struct.Scrollable.html pub fn push<E>(mut self, child: E) -> Self where E: Into<Element<'a, Message, Renderer>>, { self.content = self.content.push(child); self } } impl<'a, Message, Renderer> Widget<Message, Renderer> for Scrollable<'a, Message, Renderer> where Renderer: self::Renderer + column::Renderer, { fn width(&self) -> Length { Length::Fill } fn height(&self) -> Length { self.height } fn layout( &self, renderer: &Renderer, limits: &layout::Limits, ) -> layout::Node { let limits = limits .max_height(self.max_height) .width(Length::Fill) .height(self.height); let child_limits = layout::Limits::new( Size::new(limits.min().width, 0.0), Size::new(limits.max().width, f32::INFINITY), ); let content = self.content.layout(renderer, &child_limits); let size = limits.resolve(content.size()); layout::Node::with_children(size, vec![content]) } fn on_event( &mut self, event: Event, layout: Layout<'_>, cursor_position: Point, messages: &mut Vec<Message>, renderer: &Renderer, ) { let bounds = layout.bounds(); let is_mouse_over = bounds.contains(cursor_position); let content = layout.children().next().unwrap(); let content_bounds = content.bounds(); let is_mouse_over_scrollbar = renderer.is_mouse_over_scrollbar( bounds, content_bounds, cursor_position, ); // TODO: Event capture. Nested scrollables should capture scroll events. if is_mouse_over { match event { Event::Mouse(mouse::Event::WheelScrolled { delta }) => { match delta { mouse::ScrollDelta::Lines { y, .. } => { // TODO: Configurable speed (?) self.state.scroll(y * 60.0, bounds, content_bounds); } mouse::ScrollDelta::Pixels { y, .. } => { self.state.scroll(y, bounds, content_bounds); } } } _ => {} } } if self.state.is_scrollbar_grabbed() || is_mouse_over_scrollbar { match event { Event::Mouse(mouse::Event::Input { button: mouse::Button::Left, state, }) => match state { ButtonState::Pressed => { self.state.scroll_to( cursor_position.y / (bounds.y + bounds.height), bounds, content_bounds, ); self.state.scrollbar_grabbed_at = Some(cursor_position); } ButtonState::Released => { self.state.scrollbar_grabbed_at = None; } }, Event::Mouse(mouse::Event::CursorMoved { .. }) => { if let Some(scrollbar_grabbed_at) = self.state.scrollbar_grabbed_at { let ratio = content_bounds.height / bounds.height; let delta = scrollbar_grabbed_at.y - cursor_position.y; self.state.scroll( delta * ratio, bounds, content_bounds, ); self.state.scrollbar_grabbed_at = Some(cursor_position); } } _ => {} } } let cursor_position = if is_mouse_over && !(is_mouse_over_scrollbar || self.state.scrollbar_grabbed_at.is_some()) { Point::new( cursor_position.x, cursor_position.y + self.state.offset(bounds, content_bounds) as f32, ) } else { // TODO: Make `cursor_position` an `Option<Point>` so we can encode // cursor availability. // This will probably happen naturally once we add multi-window // support. Point::new(cursor_position.x, -1.0) }; self.content.on_event( event, content, cursor_position, messages, renderer, ) } fn draw( &self, renderer: &mut Renderer, layout: Layout<'_>, cursor_position: Point, ) -> Renderer::Output { let bounds = layout.bounds(); let content_layout = layout.children().next().unwrap(); let content_bounds = content_layout.bounds(); let offset = self.state.offset(bounds, content_bounds); let is_mouse_over = bounds.contains(cursor_position); let is_mouse_over_scrollbar = renderer.is_mouse_over_scrollbar( bounds, content_bounds, cursor_position, ); let content = { let cursor_position = if is_mouse_over && !is_mouse_over_scrollbar { Point::new(cursor_position.x, cursor_position.y + offset as f32) } else { Point::new(cursor_position.x, -1.0) }; self.content.draw(renderer, content_layout, cursor_position) }; self::Renderer::draw( renderer, &self.state, bounds, content_layout.bounds(), is_mouse_over, is_mouse_over_scrollbar, offset, content, ) } fn hash_layout(&self, state: &mut Hasher) { std::any::TypeId::of::<Scrollable<'static, (), ()>>().hash(state); self.height.hash(state); self.max_height.hash(state); self.content.hash_layout(state) } } /// The local state of a [`Scrollable`]. /// /// [`Scrollable`]: struct.Scrollable.html #[derive(Debug, Clone, Copy, Default)] pub struct State { scrollbar_grabbed_at: Option<Point>, offset: f32, } impl State { /// Creates a new [`State`] with the scrollbar located at the top. /// /// [`State`]: struct.State.html pub fn new() -> Self { State::default() } /// Apply a scrolling offset to the current [`State`], given the bounds of /// the [`Scrollable`] and its contents. /// /// [`Scrollable`]: struct.Scrollable.html /// [`State`]: struct.State.html pub fn scroll( &mut self, delta_y: f32, bounds: Rectangle, content_bounds: Rectangle, ) { if bounds.height >= content_bounds.height { return; } self.offset = (self.offset - delta_y) .max(0.0) .min((content_bounds.height - bounds.height) as f32); } /// Moves the scroll position to a relative amount, given the bounds of /// the [`Scrollable`] and its contents. /// /// `0` represents scrollbar at the top, while `1` represents scrollbar at /// the bottom. /// /// [`Scrollable`]: struct.Scrollable.html /// [`State`]: struct.State.html pub fn scroll_to( &mut self, percentage: f32, bounds: Rectangle, content_bounds: Rectangle, ) { self.offset = ((content_bounds.height - bounds.height) * percentage).max(0.0); } /// Returns the current scrolling offset of the [`State`], given the bounds /// of the [`Scrollable`] and its contents. /// /// [`Scrollable`]: struct.Scrollable.html /// [`State`]: struct.State.html pub fn offset(&self, bounds: Rectangle, content_bounds: Rectangle) -> u32 { let hidden_content = (content_bounds.height - bounds.height).max(0.0).round() as u32; self.offset.min(hidden_content as f32) as u32 } /// Returns whether the scrollbar is currently grabbed or not. pub fn is_scrollbar_grabbed(&self) -> bool { self.scrollbar_grabbed_at.is_some() } } /// The renderer of a [`Scrollable`]. /// /// Your [renderer] will need to implement this trait before being /// able to use a [`Scrollable`] in your user interface. /// /// [`Scrollable`]: struct.Scrollable.html /// [renderer]: ../../renderer/index.html pub trait Renderer: crate::Renderer + Sized { /// Returns whether the mouse is over the scrollbar given the bounds of /// the [`Scrollable`] and its contents. /// /// [`Scrollable`]: struct.Scrollable.html fn is_mouse_over_scrollbar( &self, bounds: Rectangle, content_bounds: Rectangle, cursor_position: Point, ) -> bool; /// Draws the [`Scrollable`]. /// /// It receives: /// - the [`State`] of the [`Scrollable`] /// - the bounds of the [`Scrollable`] /// - whether the mouse is over the [`Scrollable`] or not /// - whether the mouse is over the scrollbar or not /// - the scrolling offset /// - the drawn content /// /// [`Scrollable`]: struct.Scrollable.html /// [`State`]: struct.State.html fn draw( &mut self, scrollable: &State, bounds: Rectangle, content_bounds: Rectangle, is_mouse_over: bool, is_mouse_over_scrollbar: bool, offset: u32, content: Self::Output, ) -> Self::Output; } impl<'a, Message, Renderer> From<Scrollable<'a, Message, Renderer>> for Element<'a, Message, Renderer> where Renderer: 'a + self::Renderer + column::Renderer, Message: 'static, { fn from( scrollable: Scrollable<'a, Message, Renderer>, ) -> Element<'a, Message, Renderer> { Element::new(scrollable) } }
30.490476
80
0.543183
eb135a5ec3710feed8865c5ea77be56476d12afc
2,205
use crate::{ error::Error, uma2::{Uma2Config, Uma2Provider}, Claims, Client, Config, Configurable, Provider, }; use biscuit::CompactJson; use url::Url; pub struct DiscoveredUma2(Uma2Config); impl Provider for DiscoveredUma2 { fn auth_uri(&self) -> &Url { &self.config().authorization_endpoint } fn token_uri(&self) -> &Url { &self.config().token_endpoint } } impl Configurable for DiscoveredUma2 { fn config(&self) -> &Config { &self.0.config } } impl From<Uma2Config> for DiscoveredUma2 { fn from(value: Uma2Config) -> Self { Self(value) } } impl Uma2Provider for DiscoveredUma2 { fn uma2_discovered(&self) -> bool { self.0.resource_registration_endpoint.is_some() } fn resource_registration_uri(&self) -> Option<&Url> { self.0.resource_registration_endpoint.as_ref() } fn permission_uri(&self) -> Option<&Url> { self.0.permission_endpoint.as_ref() } fn uma_policy_uri(&self) -> Option<&Url> { self.0.policy_endpoint.as_ref() } } impl<C: CompactJson + Claims> Client<DiscoveredUma2, C> { /// Constructs a client from an issuer url and client parameters via discovery pub async fn discover_uma2( id: String, secret: String, redirect: Option<String>, issuer: Url, ) -> Result<Self, Error> { let http_client = reqwest::Client::new(); let uma2_config = discover_uma2(&http_client, &issuer).await?; let jwks = crate::discovered::jwks(&http_client, uma2_config.config.jwks_uri.clone()).await?; let provider = uma2_config.into(); Ok(Self::new( provider, id, secret, redirect, http_client, Some(jwks), )) } } pub async fn discover_uma2(client: &reqwest::Client, issuer: &Url) -> Result<Uma2Config, Error> { let mut issuer = issuer.clone(); issuer .path_segments_mut() .map_err(|_| Error::CannotBeABase)? .extend(&[".well-known", "uma2-configuration"]); let resp = client.get(issuer).send().await?; resp.json().await.map_err(Error::from) }
25.639535
97
0.611791
21e8315ea893c36386974963427817cf106965dd
891
//! IRI specs. use core::fmt; // Note that this MUST be private module. // See <https://rust-lang.github.io/api-guidelines/future-proofing.html> about // sealed trait. mod internal; /// A trait for spec types. /// /// This trait is not intended to be implemented by crate users. // Note that all types which implement `Spec` also implement `SpecInternal`. pub trait Spec: internal::Sealed + Copy + fmt::Debug {} /// A type that represents specification of IRI. /// /// About IRI, see [RFC 3987]. /// /// [RFC 3987]: https://tools.ietf.org/html/rfc3987 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum IriSpec {} impl Spec for IriSpec {} /// A type that represents specification of URI. /// /// About URI, see [RFC 3986]. /// /// [RFC 3986]: https://tools.ietf.org/html/rfc3986 #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum UriSpec {} impl Spec for UriSpec {}
25.457143
78
0.683502
2ff0163a949cc142cca998c698aafbd3277cfb90
804
// Copyright 2018-2021 Cargill Incorporated // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod handler; mod payloads; pub use handler::{fetch_organization, list_organizations}; pub use payloads::{ AlternateIdSlice, OrganizationListSlice, OrganizationMetadataSlice, OrganizationSlice, };
36.545455
90
0.764925
9c7e3bb544336bb1210769d9d367b9ffdc6b28d0
1,433
use super::output::*; use futures::prelude::*; /// /// Converts a stream of command outputs into characters for display on a terminal /// pub fn to_char_output<InputStream>(input: InputStream, _format_width: u32) -> impl Stream<Item=char>+Send+Unpin where InputStream: Stream<Item=FloCommandOutput>+Send+Unpin { input .map(|output| { use self::FloCommandOutput::*; match output { BeginCommand(_cmd) => stream::iter(vec![]).boxed(), Message(msg) => stream::iter((msg + "\n").chars().collect::<Vec<_>>()).boxed(), BeginOutput(_file) => stream::iter(vec![]).boxed(), Output(_output) => stream::iter(vec![]).boxed(), Error(err) => stream::iter((err + "\n").chars().collect::<Vec<_>>()).boxed(), FinishCommand(_cmd) => stream::iter(vec![]).boxed(), State(_new_state) => stream::iter(vec![]).boxed(), Failure(error) => stream::iter(format!("\nERROR: {}\n", error).chars().collect::<Vec<_>>()).boxed(), StartTask(_task_name) => stream::iter(vec![]).boxed(), TaskProgress(_done, _todo) => stream::iter(vec![]).boxed(), FinishTask => stream::iter(vec![]).boxed() } }) .flatten() }
46.225806
129
0.497558
8f63d5253266f0d7bcf6a1a2df1115305bcadc9c
25,442
// Internal use crate::{ build::{AppSettings as AS, Arg, ArgSettings}, output::Usage, parse::{ errors::{Error, ErrorKind, Result as ClapResult}, ArgMatcher, MatchedArg, ParseResult, Parser, ValueType, }, util::{ChildGraph, Id}, INTERNAL_ERROR_MSG, INVALID_UTF8, }; pub(crate) struct Validator<'help, 'app, 'parser> { p: &'parser mut Parser<'help, 'app>, c: ChildGraph<Id>, } impl<'help, 'app, 'parser> Validator<'help, 'app, 'parser> { pub(crate) fn new(p: &'parser mut Parser<'help, 'app>) -> Self { Validator { p, c: ChildGraph::with_capacity(5), } } pub(crate) fn validate( &mut self, needs_val_of: ParseResult, is_subcmd: bool, matcher: &mut ArgMatcher, ) -> ClapResult<()> { debug!("Validator::validate"); let mut reqs_validated = false; self.p.add_env(matcher)?; self.p.add_defaults(matcher)?; if let ParseResult::Opt(a) = needs_val_of { debug!("Validator::validate: needs_val_of={:?}", a); self.validate_required(matcher)?; let o = &self.p.app[&a]; reqs_validated = true; let should_err = if let Some(v) = matcher.0.args.get(&o.id) { v.vals.is_empty() && !(o.min_vals.is_some() && o.min_vals.unwrap() == 0) } else { true }; if should_err { return Err(Error::empty_value( o, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } } if matcher.is_empty() && matcher.subcommand_name().is_none() && self.p.is_set(AS::ArgRequiredElseHelp) { let message = self.p.write_help_err()?; return Err(Error { message, kind: ErrorKind::DisplayHelpOnMissingArgumentOrSubcommand, info: vec![], }); } self.validate_conflicts(matcher)?; if !(self.p.is_set(AS::SubcommandsNegateReqs) && is_subcmd || reqs_validated) { self.validate_required(matcher)?; self.validate_required_unless(matcher)?; } self.validate_matched_args(matcher)?; Ok(()) } fn validate_arg_values( &self, arg: &Arg, ma: &MatchedArg, matcher: &ArgMatcher, ) -> ClapResult<()> { debug!("Validator::validate_arg_values: arg={:?}", arg.name); for val in &ma.vals { if self.p.is_set(AS::StrictUtf8) && val.to_str().is_none() { debug!( "Validator::validate_arg_values: invalid UTF-8 found in val {:?}", val ); return Err(Error::invalid_utf8( Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } if !arg.possible_vals.is_empty() { debug!( "Validator::validate_arg_values: possible_vals={:?}", arg.possible_vals ); let val_str = val.to_string_lossy(); let ok = if arg.is_set(ArgSettings::IgnoreCase) { arg.possible_vals .iter() .any(|pv| pv.eq_ignore_ascii_case(&val_str)) } else { arg.possible_vals.contains(&&*val_str) }; if !ok { let used: Vec<Id> = matcher .arg_names() .filter(|&n| { self.p.app.find(n).map_or(true, |a| { !(a.is_set(ArgSettings::Hidden) || self.p.required.contains(&a.id)) }) }) .cloned() .collect(); return Err(Error::invalid_value( val_str.to_string(), &arg.possible_vals, arg, Usage::new(self.p).create_usage_with_title(&used), self.p.app.color(), )); } } if !arg.is_set(ArgSettings::AllowEmptyValues) && val.is_empty() && matcher.contains(&arg.id) { debug!("Validator::validate_arg_values: illegal empty val found"); return Err(Error::empty_value( arg, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } // FIXME: `(&mut *vtor)(args...)` can be simplified to `vtor(args...)` // once https://github.com/rust-lang/rust/pull/72280 is landed on stable // (about 10 weeks from now) if let Some(ref vtor) = arg.validator { debug!("Validator::validate_arg_values: checking validator..."); let mut vtor = vtor.lock().unwrap(); if let Err(e) = (&mut *vtor)(&*val.to_string_lossy()) { debug!("error"); return Err(Error::value_validation( arg.to_string(), val.to_string_lossy().to_string(), e, self.p.app.color(), )); } else { debug!("good"); } } if let Some(ref vtor) = arg.validator_os { debug!("Validator::validate_arg_values: checking validator_os..."); let mut vtor = vtor.lock().unwrap(); if let Err(e) = (&mut *vtor)(val) { debug!("error"); return Err(Error::value_validation( arg.to_string(), val.to_string_lossy().into(), e, self.p.app.color(), )); } else { debug!("good"); } } } Ok(()) } fn build_conflict_err_usage( &self, matcher: &ArgMatcher, retained_arg: &Arg, conflicting_key: &Id, ) -> String { let retained_blacklist = &retained_arg.blacklist; let used_filtered: Vec<Id> = matcher .arg_names() .filter(|key| *key != conflicting_key) .filter(|key| !retained_blacklist.contains(key)) .cloned() .collect(); let required: Vec<Id> = used_filtered .iter() .filter_map(|key| self.p.app.find(key)) .flat_map(|key_arg| key_arg.requires.iter().map(|item| &item.1)) .filter(|item| !used_filtered.contains(item)) .filter(|key| *key != conflicting_key) .filter(|key| !retained_blacklist.contains(key)) .chain(used_filtered.iter()) .cloned() .collect(); Usage::new(self.p).create_usage_with_title(&required) } fn build_conflict_err(&self, name: &Id, matcher: &ArgMatcher) -> ClapResult<()> { debug!("Validator::build_conflict_err: name={:?}", name); if let Some(checked_arg) = self.p.app.find(name) { for k in matcher.arg_names() { if let Some(a) = self.p.app.find(k) { if a.blacklist.contains(&name) { let (_former, former_arg, latter, latter_arg) = { let name_pos = matcher.arg_names().position(|key| key == name); let k_pos = matcher.arg_names().position(|key| key == k); if name_pos < k_pos { (name, checked_arg, k, a) } else { (k, a, name, checked_arg) } }; let usg = self.build_conflict_err_usage(matcher, former_arg, latter); return Err(Error::argument_conflict( latter_arg, Some(former_arg.to_string()), usg, self.p.app.color(), )); } } } } else if let Some(g) = self.p.app.groups.iter().find(|x| x.id == *name) { let usg = Usage::new(self.p).create_usage_with_title(&[]); let args_in_group = self.p.app.unroll_args_in_group(&g.id); let first = matcher .arg_names() .find(|x| args_in_group.contains(x)) .expect(INTERNAL_ERROR_MSG); let c_with = matcher .arg_names() .find(|x| x != &first && args_in_group.contains(x)) .map(|x| self.p.app[x].to_string()); debug!("Validator::build_conflict_err: c_with={:?}:group", c_with); return Err(Error::argument_conflict( &self.p.app[first], c_with, usg, self.p.app.color(), )); } panic!(INTERNAL_ERROR_MSG); } fn validate_conflicts(&mut self, matcher: &mut ArgMatcher) -> ClapResult<()> { debug!("Validator::validate_conflicts"); self.validate_exclusive(matcher)?; self.gather_conflicts(matcher); for name in self.c.iter() { debug!("Validator::validate_conflicts:iter:{:?}", name); let mut should_err = false; if let Some(g) = self .p .app .groups .iter() .find(|g| !g.multiple && &g.id == name) { let conf_with_self = self .p .app .unroll_args_in_group(&g.id) .iter() .filter(|&a| matcher.contains(a)) .count() > 1; let conf_with_arg = g.conflicts.iter().any(|x| matcher.contains(x)); let arg_conf_with_gr = matcher .arg_names() .filter_map(|x| self.p.app.find(x)) .any(|x| x.blacklist.iter().any(|c| *c == g.id)); should_err = conf_with_self || conf_with_arg || arg_conf_with_gr; } else if let Some(ma) = matcher.get(name) { debug!( "Validator::validate_conflicts:iter:{:?}: matcher contains it...", name ); should_err = ma.occurs > 0; } if should_err { return self.build_conflict_err(name, matcher); } } Ok(()) } fn validate_exclusive(&mut self, matcher: &mut ArgMatcher) -> ClapResult<()> { debug!("Validator::validate_exclusive"); let args_count = matcher.arg_names().count(); for name in matcher.arg_names() { debug!("Validator::validate_exclusive:iter:{:?}", name); if let Some(arg) = self.p.app.find(name) { if arg.exclusive && args_count > 1 { let c_with: Option<String> = None; return Err(Error::argument_conflict( arg, c_with, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } } } Ok(()) } // Gathers potential conflicts based on used argument, but without considering requirements // and such fn gather_conflicts(&mut self, matcher: &mut ArgMatcher) { debug!("Validator::gather_conflicts"); for name in matcher.arg_names() { debug!("Validator::gather_conflicts:iter: id={:?}", name); // if arg is "present" only because it got default value // it doesn't conflict with anything // // TODO: @refactor Do it in a more elegant way if matcher .get(name) .map_or(false, |a| a.ty == ValueType::DefaultValue) { debug!("Validator::gather_conflicts:iter: This is default value, skipping.",); continue; } if let Some(arg) = self.p.app.find(name) { // Since an arg was used, every arg it conflicts with is added to the conflicts for conf in &arg.blacklist { if self.p.app.find(conf).is_some() { if conf != name { self.c.insert(conf.clone()); } } else { // for g_arg in self.p.app.unroll_args_in_group(conf) { // if &g_arg != name { self.c.insert(conf.clone()); // TODO ERROR is here - groups allow one arg but this line disallows all group args // } // } } } // Now we need to know which groups this arg was a member of, to add all other // args in that group to the conflicts, as well as any args those args conflict // with for grp in self.p.app.groups_for_arg(&name) { if let Some(g) = self .p .app .groups .iter() .find(|g| !g.multiple && g.id == grp) { // for g_arg in self.p.app.unroll_args_in_group(&g.name) { // if &g_arg != name { self.c.insert(g.id.clone()); // } // } } } } else if let Some(g) = self .p .app .groups .iter() .find(|g| !g.multiple && g.id == *name) { debug!("Validator::gather_conflicts:iter:{:?}:group", name); self.c.insert(g.id.clone()); } } } fn gather_requirements(&mut self, matcher: &ArgMatcher) { debug!("Validator::gather_requirements"); for name in matcher.arg_names() { debug!("Validator::gather_requirements:iter:{:?}", name); if let Some(arg) = self.p.app.find(name) { for req in self.p.app.unroll_requirements_for_arg(&arg.id, matcher) { self.p.required.insert(req); } } else if let Some(g) = self.p.app.groups.iter().find(|grp| grp.id == *name) { debug!("Validator::gather_conflicts:iter:{:?}:group", name); for r in &g.requires { self.p.required.insert(r.clone()); } } } } fn validate_matched_args(&self, matcher: &mut ArgMatcher) -> ClapResult<()> { debug!("Validator::validate_matched_args"); for (name, ma) in matcher.iter() { debug!( "Validator::validate_matched_args:iter:{:?}: vals={:#?}", name, ma.vals ); if let Some(arg) = self.p.app.find(name) { self.validate_arg_num_vals(arg, ma)?; self.validate_arg_values(arg, ma, matcher)?; self.validate_arg_requires(arg, ma, matcher)?; self.validate_arg_num_occurs(arg, ma)?; } else { let grp = self .p .app .groups .iter() .find(|g| g.id == *name) .expect(INTERNAL_ERROR_MSG); if grp.requires.iter().any(|n| !matcher.contains(n)) { return self.missing_required_error(matcher, Some(name)); } } } Ok(()) } fn validate_arg_num_occurs(&self, a: &Arg, ma: &MatchedArg) -> ClapResult<()> { debug!( "Validator::validate_arg_num_occurs: {:?}={}", a.name, ma.occurs ); if ma.occurs > 1 && !a.is_set(ArgSettings::MultipleOccurrences) { // Not the first time, and we don't allow multiples return Err(Error::unexpected_multiple_usage( a, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } Ok(()) } fn validate_arg_num_vals(&self, a: &Arg, ma: &MatchedArg) -> ClapResult<()> { debug!("Validator::validate_arg_num_vals"); if let Some(num) = a.num_vals { debug!("Validator::validate_arg_num_vals: num_vals set...{}", num); let should_err = if a.is_set(ArgSettings::MultipleValues) { ((ma.vals.len() as u64) % num) != 0 } else { num != (ma.vals.len() as u64) }; if should_err { debug!("Validator::validate_arg_num_vals: Sending error WrongNumberOfValues"); return Err(Error::wrong_number_of_values( a, num, if a.is_set(ArgSettings::MultipleValues) { ma.vals.len() % num as usize } else { ma.vals.len() }, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } } if let Some(num) = a.max_vals { debug!("Validator::validate_arg_num_vals: max_vals set...{}", num); if (ma.vals.len() as u64) > num { debug!("Validator::validate_arg_num_vals: Sending error TooManyValues"); return Err(Error::too_many_values( ma.vals .iter() .last() .expect(INTERNAL_ERROR_MSG) .to_str() .expect(INVALID_UTF8) .to_string(), a, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } } let min_vals_zero = if let Some(num) = a.min_vals { debug!("Validator::validate_arg_num_vals: min_vals set: {}", num); if (ma.vals.len() as u64) < num && num != 0 { debug!("Validator::validate_arg_num_vals: Sending error TooFewValues"); return Err(Error::too_few_values( a, num, ma.vals.len(), Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } num == 0 } else { false }; // Issue 665 (https://github.com/kbknapp/clap-rs/issues/665) // Issue 1105 (https://github.com/kbknapp/clap-rs/issues/1105) if a.is_set(ArgSettings::TakesValue) && !min_vals_zero && ma.vals.is_empty() { return Err(Error::empty_value( a, Usage::new(self.p).create_usage_with_title(&[]), self.p.app.color(), )); } Ok(()) } fn validate_arg_requires( &self, a: &Arg<'help>, ma: &MatchedArg, matcher: &ArgMatcher, ) -> ClapResult<()> { debug!("Validator::validate_arg_requires:{:?}", a.name); for (val, name) in &a.requires { if let Some(val) = val { let missing_req = |v| v == val && !matcher.contains(&name); if ma.vals.iter().any(missing_req) { return self.missing_required_error(matcher, Some(&a.id)); } } else if !matcher.contains(&name) { return self.missing_required_error(matcher, Some(&name)); } } Ok(()) } fn validate_required(&mut self, matcher: &ArgMatcher) -> ClapResult<()> { debug!( "Validator::validate_required: required={:?}", self.p.required ); self.gather_requirements(matcher); for arg_or_group in self.p.required.iter().filter(|r| !matcher.contains(r)) { debug!("Validator::validate_required:iter:aog={:?}", arg_or_group); if let Some(arg) = self.p.app.find(&arg_or_group) { debug!("Validator::validate_required:iter: This is an arg"); if !self.is_missing_required_ok(arg, matcher) { return self.missing_required_error(matcher, None); } } else if let Some(group) = self.p.app.groups.iter().find(|g| g.id == *arg_or_group) { debug!("Validator::validate_required:iter: This is a group"); if !self .p .app .unroll_args_in_group(&group.id) .iter() .any(|a| matcher.contains(a)) { return self.missing_required_error(matcher, None); } } } // Validate the conditionally required args for a in self.p.app.args.args.iter() { for (other, val) in &a.r_ifs { if let Some(ma) = matcher.get(other) { if ma.contains_val(val) && !matcher.contains(&a.id) { return self.missing_required_error(matcher, Some(&a.id)); } } } } Ok(()) } fn is_missing_required_ok(&self, a: &Arg<'help>, matcher: &ArgMatcher) -> bool { debug!("Validator::is_missing_required_ok: {}", a.name); self.validate_arg_conflicts(a, matcher) || self.p.overridden.contains(&a.id) } fn validate_arg_conflicts(&self, a: &Arg<'help>, matcher: &ArgMatcher) -> bool { debug!("Validator::validate_arg_conflicts: a={:?}", a.name); a.blacklist.iter().any(|conf| { matcher.contains(conf) || self .p .app .groups .iter() .find(|g| g.id == *conf) .map_or(false, |g| g.args.iter().any(|arg| matcher.contains(arg))) }) } fn validate_required_unless(&self, matcher: &ArgMatcher) -> ClapResult<()> { debug!("Validator::validate_required_unless"); for a in self .p .app .args .args .iter() .filter(|a| !a.r_unless.is_empty()) .filter(|a| !matcher.contains(&a.id)) { debug!("Validator::validate_required_unless:iter:{}", a.name); if self.fails_arg_required_unless(a, matcher) { return self.missing_required_error(matcher, Some(&a.id)); } } Ok(()) } // Failing a required unless means, the arg's "unless" wasn't present, and neither were they fn fails_arg_required_unless(&self, a: &Arg<'help>, matcher: &ArgMatcher) -> bool { debug!("Validator::fails_arg_required_unless: a={:?}", a.name); if a.is_set(ArgSettings::RequiredUnlessAll) { debug!("Validator::fails_arg_required_unless:{}:All", a.name); !a.r_unless.iter().all(|id| matcher.contains(id)) } else { debug!("Validator::fails_arg_required_unless:{}:Any", a.name); !a.r_unless.iter().any(|id| matcher.contains(id)) } } // `incl`: an arg to include in the error even if not used fn missing_required_error(&self, matcher: &ArgMatcher, incl: Option<&Id>) -> ClapResult<()> { debug!("Validator::missing_required_error; incl={:?}", incl); debug!( "Validator::missing_required_error: reqs={:?}", self.p.required ); let usg = Usage::new(self.p); let req_args = if let Some(x) = incl { usg.get_required_usage_from(&[x.clone()], Some(matcher), true) } else { usg.get_required_usage_from(&[], Some(matcher), true) }; debug!( "Validator::missing_required_error: req_args={:#?}", req_args ); let used: Vec<Id> = matcher .arg_names() .filter(|n| { self.p.app.find(n).map_or(true, |a| { !(a.is_set(ArgSettings::Hidden) || self.p.required.contains(&a.id)) }) }) .cloned() .chain(incl.cloned()) .collect(); Err(Error::missing_required_argument( req_args, usg.create_usage_with_title(&*used), self.p.app.color(), )) } }
38.316265
136
0.459752
21908b71dabdee07d680f1f14bd946cb79d63353
661
use std::str::FromStr; use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; use crate::ActorId; impl<'de> Deserialize<'de> for ActorId { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let s = String::deserialize(deserializer)?; ActorId::from_str(&s) .map_err(|_| de::Error::invalid_value(de::Unexpected::Str(&s), &"A valid ActorID")) } } impl Serialize for ActorId { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(self.to_hex_string().as_str()) } }
25.423077
95
0.617247
e852250eca2a2e69313d2741e480ad3be6a39496
2,228
use std::env; use std::fs::File; use std::io::{self, BufRead}; use std::path::Path; use std::collections::HashSet; #[derive(PartialEq,Eq,Hash,Debug,Copy,Clone)] struct Point { x: u32, y: u32, } fn flip(offset: u32, val: u32) -> u32 { (2 * offset).checked_sub(val).unwrap() } impl Point { fn flip_x(&self, offset: u32) -> Point { if self.x > offset { Point { x: flip(offset, self.x), y: self.y } } else { *self } } fn flip_y(&self, offset: u32) -> Point { if self.y > offset { Point { x: self.x, y: flip(offset, self.y) } } else { *self } } } fn main() { let args: Vec<String> = env::args().collect(); if args.len() != 2 { panic!("Expected exactly 1 argument, got {}", args.len()); } if let Ok(lines) = read_lines(&args[1]) { let mut dots: HashSet<Point> = HashSet::new(); for line in lines { let l = line.unwrap(); if ! l.starts_with("fold along") { if l.len() > 0 { let parts = l.split(',').collect::<Vec<_>>(); let x = parts[0].parse::<u32>().unwrap(); let y = parts[1].parse::<u32>().unwrap(); dots.insert(Point { x: x, y: y }); } } else { let offset: u32; { let offset_str = &l[13..]; offset = offset_str.parse::<u32>().unwrap(); } let mut new_dots: HashSet<Point> = HashSet::new(); match l.chars().take(12).last().unwrap() { 'x' => { for d in dots { new_dots.insert(d.flip_x(offset)); } }, 'y' => { for d in dots { new_dots.insert(d.flip_y(offset)); } }, c => panic!("unexpected axis \"{}\"", c), } dots = new_dots; println!("{}", dots.len()); return; } } } else { panic!("Failed to read file"); } } // The output is wrapped in a Result to allow matching on errors // Returns an Iterator to the Reader of the lines of the file. fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>> where P: AsRef<Path>, { let file = File::open(filename)?; Ok(io::BufReader::new(file).lines()) }
22.734694
75
0.504937
f728d42efb87c62a4d3887ec0c6e12cb425c8bad
794
/* * Copyright (C) 2015 Benjamin Fry <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ //! TCP protocol related components for DNS mod tcp_client_stream; mod tcp_stream; pub use self::tcp_client_stream::TcpClientStream; pub use self::tcp_stream::TcpStream;
33.083333
75
0.74937
eb7050e9dbff8b332dda0064d799c9a9a810bc60
5,969
// Copyright 2021 IOTA Stiftung // SPDX-License-Identifier: Apache-2.0 //! Delete access operations. use crate::{storage::Storage, trees::*}; use bee_common::packable::Packable; use bee_ledger::types::{ snapshot::info::SnapshotInfo, Balance, ConsumedOutput, CreatedOutput, LedgerIndex, OutputDiff, Receipt, TreasuryOutput, Unspent, }; use bee_message::{ address::{Address, Ed25519Address}, milestone::{Milestone, MilestoneIndex}, output::OutputId, payload::indexation::PaddedIndex, Message, MessageId, }; use bee_storage::{access::Delete, backend::StorageBackend}; use bee_tangle::{ metadata::MessageMetadata, solid_entry_point::SolidEntryPoint, unreferenced_message::UnreferencedMessage, }; impl Delete<MessageId, Message> for Storage { fn delete(&self, message_id: &MessageId) -> Result<(), <Self as StorageBackend>::Error> { self.inner.open_tree(TREE_MESSAGE_ID_TO_MESSAGE)?.remove(message_id)?; Ok(()) } } impl Delete<MessageId, MessageMetadata> for Storage { fn delete(&self, message_id: &MessageId) -> Result<(), <Self as StorageBackend>::Error> { self.inner.open_tree(TREE_MESSAGE_ID_TO_METADATA)?.remove(message_id)?; Ok(()) } } impl Delete<(MessageId, MessageId), ()> for Storage { fn delete(&self, (parent, child): &(MessageId, MessageId)) -> Result<(), <Self as StorageBackend>::Error> { let mut key = parent.as_ref().to_vec(); key.extend_from_slice(child.as_ref()); self.inner.open_tree(TREE_MESSAGE_ID_TO_MESSAGE_ID)?.remove(key)?; Ok(()) } } impl Delete<(PaddedIndex, MessageId), ()> for Storage { fn delete(&self, (index, message_id): &(PaddedIndex, MessageId)) -> Result<(), <Self as StorageBackend>::Error> { let mut key = index.as_ref().to_vec(); key.extend_from_slice(message_id.as_ref()); self.inner.open_tree(TREE_INDEX_TO_MESSAGE_ID)?.remove(key)?; Ok(()) } } impl Delete<OutputId, CreatedOutput> for Storage { fn delete(&self, output_id: &OutputId) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_OUTPUT_ID_TO_CREATED_OUTPUT)? .remove(output_id.pack_new())?; Ok(()) } } impl Delete<OutputId, ConsumedOutput> for Storage { fn delete(&self, output_id: &OutputId) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_OUTPUT_ID_TO_CONSUMED_OUTPUT)? .remove(output_id.pack_new())?; Ok(()) } } impl Delete<Unspent, ()> for Storage { fn delete(&self, unspent: &Unspent) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_OUTPUT_ID_UNSPENT)? .remove(unspent.pack_new())?; Ok(()) } } impl Delete<(Ed25519Address, OutputId), ()> for Storage { fn delete(&self, (address, output_id): &(Ed25519Address, OutputId)) -> Result<(), <Self as StorageBackend>::Error> { let mut key = address.as_ref().to_vec(); key.extend_from_slice(&output_id.pack_new()); self.inner.open_tree(TREE_ED25519_ADDRESS_TO_OUTPUT_ID)?.remove(key)?; Ok(()) } } impl Delete<(), LedgerIndex> for Storage { fn delete(&self, (): &()) -> Result<(), <Self as StorageBackend>::Error> { self.inner.open_tree(TREE_LEDGER_INDEX)?.remove([0x00u8])?; Ok(()) } } impl Delete<MilestoneIndex, Milestone> for Storage { fn delete(&self, index: &MilestoneIndex) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_MILESTONE_INDEX_TO_MILESTONE)? .remove(index.pack_new())?; Ok(()) } } impl Delete<(), SnapshotInfo> for Storage { fn delete(&self, (): &()) -> Result<(), <Self as StorageBackend>::Error> { self.inner.open_tree(TREE_SNAPSHOT_INFO)?.remove([0x00u8])?; Ok(()) } } impl Delete<SolidEntryPoint, MilestoneIndex> for Storage { fn delete(&self, sep: &SolidEntryPoint) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_SOLID_ENTRY_POINT_TO_MILESTONE_INDEX)? .remove(sep.as_ref())?; Ok(()) } } impl Delete<MilestoneIndex, OutputDiff> for Storage { fn delete(&self, index: &MilestoneIndex) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_MILESTONE_INDEX_TO_OUTPUT_DIFF)? .remove(index.pack_new())?; Ok(()) } } impl Delete<Address, Balance> for Storage { fn delete(&self, address: &Address) -> Result<(), <Self as StorageBackend>::Error> { self.inner .open_tree(TREE_ADDRESS_TO_BALANCE)? .remove(address.pack_new())?; Ok(()) } } impl Delete<(MilestoneIndex, UnreferencedMessage), ()> for Storage { fn delete( &self, (index, unreferenced_message): &(MilestoneIndex, UnreferencedMessage), ) -> Result<(), <Self as StorageBackend>::Error> { let mut key = index.pack_new(); key.extend_from_slice(unreferenced_message.as_ref()); self.inner .open_tree(TREE_MILESTONE_INDEX_TO_UNREFERENCED_MESSAGE)? .remove(key)?; Ok(()) } } impl Delete<(MilestoneIndex, Receipt), ()> for Storage { fn delete(&self, (index, receipt): &(MilestoneIndex, Receipt)) -> Result<(), <Self as StorageBackend>::Error> { let mut key = index.pack_new(); key.extend_from_slice(&receipt.pack_new()); self.inner.open_tree(TREE_MILESTONE_INDEX_TO_RECEIPT)?.remove(key)?; Ok(()) } } impl Delete<(bool, TreasuryOutput), ()> for Storage { fn delete(&self, (spent, output): &(bool, TreasuryOutput)) -> Result<(), <Self as StorageBackend>::Error> { let mut key = spent.pack_new(); key.extend_from_slice(&output.pack_new()); self.inner.open_tree(TREE_SPENT_TO_TREASURY_OUTPUT)?.remove(key)?; Ok(()) } }
30.299492
120
0.633439
9bf35bd4284722c62a4f39880f5fbd70d543a042
25,008
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Type substitutions. pub use self::ParamSpace::*; pub use self::RegionSubsts::*; use middle::ty::{self, Ty}; use middle::ty_fold::{self, TypeFoldable, TypeFolder}; use util::ppaux::Repr; use std::fmt; use std::iter::IntoIterator; use std::slice::Iter; use std::vec::{Vec, IntoIter}; use syntax::codemap::{Span, DUMMY_SP}; /////////////////////////////////////////////////////////////////////////// /// A substitution mapping type/region parameters to new values. We /// identify each in-scope parameter by an *index* and a *parameter /// space* (which indices where the parameter is defined; see /// `ParamSpace`). #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub struct Substs<'tcx> { pub types: VecPerParamSpace<Ty<'tcx>>, pub regions: RegionSubsts, } /// Represents the values to use when substituting lifetime parameters. /// If the value is `ErasedRegions`, then this subst is occurring during /// trans, and all region parameters will be replaced with `ty::ReStatic`. #[derive(Clone, PartialEq, Eq, Hash, Debug)] pub enum RegionSubsts { ErasedRegions, NonerasedRegions(VecPerParamSpace<ty::Region>) } impl<'tcx> Substs<'tcx> { pub fn new(t: VecPerParamSpace<Ty<'tcx>>, r: VecPerParamSpace<ty::Region>) -> Substs<'tcx> { Substs { types: t, regions: NonerasedRegions(r) } } pub fn new_type(t: Vec<Ty<'tcx>>, r: Vec<ty::Region>) -> Substs<'tcx> { Substs::new(VecPerParamSpace::new(t, Vec::new(), Vec::new()), VecPerParamSpace::new(r, Vec::new(), Vec::new())) } pub fn new_trait(t: Vec<Ty<'tcx>>, r: Vec<ty::Region>, s: Ty<'tcx>) -> Substs<'tcx> { Substs::new(VecPerParamSpace::new(t, vec!(s), Vec::new()), VecPerParamSpace::new(r, Vec::new(), Vec::new())) } pub fn erased(t: VecPerParamSpace<Ty<'tcx>>) -> Substs<'tcx> { Substs { types: t, regions: ErasedRegions } } pub fn empty() -> Substs<'tcx> { Substs { types: VecPerParamSpace::empty(), regions: NonerasedRegions(VecPerParamSpace::empty()), } } pub fn trans_empty() -> Substs<'tcx> { Substs { types: VecPerParamSpace::empty(), regions: ErasedRegions } } pub fn is_noop(&self) -> bool { let regions_is_noop = match self.regions { ErasedRegions => false, // may be used to canonicalize NonerasedRegions(ref regions) => regions.is_empty(), }; regions_is_noop && self.types.is_empty() } pub fn type_for_def(&self, ty_param_def: &ty::TypeParameterDef) -> Ty<'tcx> { *self.types.get(ty_param_def.space, ty_param_def.index as uint) } pub fn has_regions_escaping_depth(&self, depth: u32) -> bool { self.types.iter().any(|&t| ty::type_escapes_depth(t, depth)) || { match self.regions { ErasedRegions => false, NonerasedRegions(ref regions) => regions.iter().any(|r| r.escapes_depth(depth)), } } } pub fn self_ty(&self) -> Option<Ty<'tcx>> { self.types.get_self().map(|&t| t) } pub fn with_self_ty(&self, self_ty: Ty<'tcx>) -> Substs<'tcx> { assert!(self.self_ty().is_none()); let mut s = (*self).clone(); s.types.push(SelfSpace, self_ty); s } pub fn erase_regions(self) -> Substs<'tcx> { let Substs { types, regions: _ } = self; Substs { types: types, regions: ErasedRegions } } /// Since ErasedRegions are only to be used in trans, most of the compiler can use this method /// to easily access the set of region substitutions. pub fn regions<'a>(&'a self) -> &'a VecPerParamSpace<ty::Region> { match self.regions { ErasedRegions => panic!("Erased regions only expected in trans"), NonerasedRegions(ref r) => r } } /// Since ErasedRegions are only to be used in trans, most of the compiler can use this method /// to easily access the set of region substitutions. pub fn mut_regions<'a>(&'a mut self) -> &'a mut VecPerParamSpace<ty::Region> { match self.regions { ErasedRegions => panic!("Erased regions only expected in trans"), NonerasedRegions(ref mut r) => r } } pub fn with_method(self, m_types: Vec<Ty<'tcx>>, m_regions: Vec<ty::Region>) -> Substs<'tcx> { let Substs { types, regions } = self; let types = types.with_vec(FnSpace, m_types); let regions = regions.map(m_regions, |r, m_regions| r.with_vec(FnSpace, m_regions)); Substs { types: types, regions: regions } } } impl RegionSubsts { fn map<A, F>(self, a: A, op: F) -> RegionSubsts where F: FnOnce(VecPerParamSpace<ty::Region>, A) -> VecPerParamSpace<ty::Region>, { match self { ErasedRegions => ErasedRegions, NonerasedRegions(r) => NonerasedRegions(op(r, a)) } } pub fn is_erased(&self) -> bool { match *self { ErasedRegions => true, NonerasedRegions(_) => false, } } } /////////////////////////////////////////////////////////////////////////// // ParamSpace #[derive(PartialOrd, Ord, PartialEq, Eq, Copy, Clone, Hash, RustcEncodable, RustcDecodable, Debug)] pub enum ParamSpace { TypeSpace, // Type parameters attached to a type definition, trait, or impl SelfSpace, // Self parameter on a trait FnSpace, // Type parameters attached to a method or fn } impl ParamSpace { pub fn all() -> [ParamSpace; 3] { [TypeSpace, SelfSpace, FnSpace] } pub fn to_uint(self) -> uint { match self { TypeSpace => 0, SelfSpace => 1, FnSpace => 2, } } pub fn from_uint(u: uint) -> ParamSpace { match u { 0 => TypeSpace, 1 => SelfSpace, 2 => FnSpace, _ => panic!("Invalid ParamSpace: {}", u) } } } /// Vector of things sorted by param space. Used to keep /// the set of things declared on the type, self, or method /// distinct. #[derive(PartialEq, Eq, Clone, Hash, RustcEncodable, RustcDecodable)] pub struct VecPerParamSpace<T> { // This was originally represented as a tuple with one Vec<T> for // each variant of ParamSpace, and that remains the abstraction // that it provides to its clients. // // Here is how the representation corresponds to the abstraction // i.e. the "abstraction function" AF: // // AF(self) = (self.content[..self.type_limit], // self.content[self.type_limit..self.self_limit], // self.content[self.self_limit..]) type_limit: uint, self_limit: uint, content: Vec<T>, } /// The `split` function converts one `VecPerParamSpace` into this /// `SeparateVecsPerParamSpace` structure. pub struct SeparateVecsPerParamSpace<T> { pub types: Vec<T>, pub selfs: Vec<T>, pub fns: Vec<T>, } impl<T: fmt::Debug> fmt::Debug for VecPerParamSpace<T> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { try!(write!(fmt, "VecPerParamSpace {{")); for space in &ParamSpace::all() { try!(write!(fmt, "{:?}: {:?}, ", *space, self.get_slice(*space))); } try!(write!(fmt, "}}")); Ok(()) } } impl<T> VecPerParamSpace<T> { fn limits(&self, space: ParamSpace) -> (uint, uint) { match space { TypeSpace => (0, self.type_limit), SelfSpace => (self.type_limit, self.self_limit), FnSpace => (self.self_limit, self.content.len()), } } pub fn empty() -> VecPerParamSpace<T> { VecPerParamSpace { type_limit: 0, self_limit: 0, content: Vec::new() } } pub fn params_from_type(types: Vec<T>) -> VecPerParamSpace<T> { VecPerParamSpace::empty().with_vec(TypeSpace, types) } /// `t` is the type space. /// `s` is the self space. /// `a` is the assoc space. /// `f` is the fn space. pub fn new(t: Vec<T>, s: Vec<T>, f: Vec<T>) -> VecPerParamSpace<T> { let type_limit = t.len(); let self_limit = type_limit + s.len(); let mut content = t; content.extend(s.into_iter()); content.extend(f.into_iter()); VecPerParamSpace { type_limit: type_limit, self_limit: self_limit, content: content, } } fn new_internal(content: Vec<T>, type_limit: uint, self_limit: uint) -> VecPerParamSpace<T> { VecPerParamSpace { type_limit: type_limit, self_limit: self_limit, content: content, } } /// Appends `value` to the vector associated with `space`. /// /// Unlike the `push` method in `Vec`, this should not be assumed /// to be a cheap operation (even when amortized over many calls). pub fn push(&mut self, space: ParamSpace, value: T) { let (_, limit) = self.limits(space); match space { TypeSpace => { self.type_limit += 1; self.self_limit += 1; } SelfSpace => { self.self_limit += 1; } FnSpace => { } } self.content.insert(limit, value); } /// Appends `values` to the vector associated with `space`. /// /// Unlike the `extend` method in `Vec`, this should not be assumed /// to be a cheap operation (even when amortized over many calls). pub fn extend<I:Iterator<Item=T>>(&mut self, space: ParamSpace, values: I) { // This could be made more efficient, obviously. for item in values { self.push(space, item); } } pub fn pop(&mut self, space: ParamSpace) -> Option<T> { let (start, limit) = self.limits(space); if start == limit { None } else { match space { TypeSpace => { self.type_limit -= 1; self.self_limit -= 1; } SelfSpace => { self.self_limit -= 1; } FnSpace => {} } if self.content.is_empty() { None } else { Some(self.content.remove(limit - 1)) } } } pub fn truncate(&mut self, space: ParamSpace, len: uint) { // FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n). while self.len(space) > len { self.pop(space); } } pub fn replace(&mut self, space: ParamSpace, elems: Vec<T>) { // FIXME (#15435): slow; O(n^2); could enhance vec to make it O(n). self.truncate(space, 0); for t in elems { self.push(space, t); } } pub fn get_self<'a>(&'a self) -> Option<&'a T> { let v = self.get_slice(SelfSpace); assert!(v.len() <= 1); if v.len() == 0 { None } else { Some(&v[0]) } } pub fn len(&self, space: ParamSpace) -> uint { self.get_slice(space).len() } pub fn is_empty_in(&self, space: ParamSpace) -> bool { self.len(space) == 0 } pub fn get_slice<'a>(&'a self, space: ParamSpace) -> &'a [T] { let (start, limit) = self.limits(space); &self.content[start.. limit] } pub fn get_mut_slice<'a>(&'a mut self, space: ParamSpace) -> &'a mut [T] { let (start, limit) = self.limits(space); &mut self.content[start.. limit] } pub fn opt_get<'a>(&'a self, space: ParamSpace, index: uint) -> Option<&'a T> { let v = self.get_slice(space); if index < v.len() { Some(&v[index]) } else { None } } pub fn get<'a>(&'a self, space: ParamSpace, index: uint) -> &'a T { &self.get_slice(space)[index] } pub fn iter<'a>(&'a self) -> Iter<'a,T> { self.content.iter() } pub fn into_iter(self) -> IntoIter<T> { self.content.into_iter() } pub fn iter_enumerated<'a>(&'a self) -> EnumeratedItems<'a,T> { EnumeratedItems::new(self) } pub fn as_slice(&self) -> &[T] { &self.content } pub fn into_vec(self) -> Vec<T> { self.content } pub fn all_vecs<P>(&self, mut pred: P) -> bool where P: FnMut(&[T]) -> bool, { let spaces = [TypeSpace, SelfSpace, FnSpace]; spaces.iter().all(|&space| { pred(self.get_slice(space)) }) } pub fn all<P>(&self, pred: P) -> bool where P: FnMut(&T) -> bool { self.iter().all(pred) } pub fn any<P>(&self, pred: P) -> bool where P: FnMut(&T) -> bool { self.iter().any(pred) } pub fn is_empty(&self) -> bool { self.all_vecs(|v| v.is_empty()) } pub fn map<U, P>(&self, pred: P) -> VecPerParamSpace<U> where P: FnMut(&T) -> U { let result = self.iter().map(pred).collect(); VecPerParamSpace::new_internal(result, self.type_limit, self.self_limit) } pub fn map_enumerated<U, P>(&self, pred: P) -> VecPerParamSpace<U> where P: FnMut((ParamSpace, uint, &T)) -> U, { let result = self.iter_enumerated().map(pred).collect(); VecPerParamSpace::new_internal(result, self.type_limit, self.self_limit) } pub fn map_move<U, F>(self, mut pred: F) -> VecPerParamSpace<U> where F: FnMut(T) -> U, { let SeparateVecsPerParamSpace { types: t, selfs: s, fns: f } = self.split(); VecPerParamSpace::new(t.into_iter().map(|p| pred(p)).collect(), s.into_iter().map(|p| pred(p)).collect(), f.into_iter().map(|p| pred(p)).collect()) } pub fn split(self) -> SeparateVecsPerParamSpace<T> { let VecPerParamSpace { type_limit, self_limit, content } = self; let mut content_iter = content.into_iter(); SeparateVecsPerParamSpace { types: content_iter.by_ref().take(type_limit).collect(), selfs: content_iter.by_ref().take(self_limit - type_limit).collect(), fns: content_iter.collect() } } pub fn with_vec(mut self, space: ParamSpace, vec: Vec<T>) -> VecPerParamSpace<T> { assert!(self.is_empty_in(space)); self.replace(space, vec); self } } #[derive(Clone)] pub struct EnumeratedItems<'a,T:'a> { vec: &'a VecPerParamSpace<T>, space_index: uint, elem_index: uint } impl<'a,T> EnumeratedItems<'a,T> { fn new(v: &'a VecPerParamSpace<T>) -> EnumeratedItems<'a,T> { let mut result = EnumeratedItems { vec: v, space_index: 0, elem_index: 0 }; result.adjust_space(); result } fn adjust_space(&mut self) { let spaces = ParamSpace::all(); while self.space_index < spaces.len() && self.elem_index >= self.vec.len(spaces[self.space_index]) { self.space_index += 1; self.elem_index = 0; } } } impl<'a,T> Iterator for EnumeratedItems<'a,T> { type Item = (ParamSpace, uint, &'a T); fn next(&mut self) -> Option<(ParamSpace, uint, &'a T)> { let spaces = ParamSpace::all(); if self.space_index < spaces.len() { let space = spaces[self.space_index]; let index = self.elem_index; let item = self.vec.get(space, index); self.elem_index += 1; self.adjust_space(); Some((space, index, item)) } else { None } } } impl<T> IntoIterator for VecPerParamSpace<T> { type Item = T; type IntoIter = IntoIter<T>; fn into_iter(self) -> IntoIter<T> { self.into_vec().into_iter() } } impl<'a,T> IntoIterator for &'a VecPerParamSpace<T> { type Item = &'a T; type IntoIter = Iter<'a, T>; fn into_iter(self) -> Iter<'a, T> { self.as_slice().into_iter() } } /////////////////////////////////////////////////////////////////////////// // Public trait `Subst` // // Just call `foo.subst(tcx, substs)` to perform a substitution across // `foo`. Or use `foo.subst_spanned(tcx, substs, Some(span))` when // there is more information available (for better errors). pub trait Subst<'tcx> : Sized { fn subst(&self, tcx: &ty::ctxt<'tcx>, substs: &Substs<'tcx>) -> Self { self.subst_spanned(tcx, substs, None) } fn subst_spanned(&self, tcx: &ty::ctxt<'tcx>, substs: &Substs<'tcx>, span: Option<Span>) -> Self; } impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { fn subst_spanned(&self, tcx: &ty::ctxt<'tcx>, substs: &Substs<'tcx>, span: Option<Span>) -> T { let mut folder = SubstFolder { tcx: tcx, substs: substs, span: span, root_ty: None, ty_stack_depth: 0, region_binders_passed: 0 }; (*self).fold_with(&mut folder) } } /////////////////////////////////////////////////////////////////////////// // The actual substitution engine itself is a type folder. struct SubstFolder<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, substs: &'a Substs<'tcx>, // The location for which the substitution is performed, if available. span: Option<Span>, // The root type that is being substituted, if available. root_ty: Option<Ty<'tcx>>, // Depth of type stack ty_stack_depth: uint, // Number of region binders we have passed through while doing the substitution region_binders_passed: u32, } impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { fn tcx(&self) -> &ty::ctxt<'tcx> { self.tcx } fn enter_region_binder(&mut self) { self.region_binders_passed += 1; } fn exit_region_binder(&mut self) { self.region_binders_passed -= 1; } fn fold_region(&mut self, r: ty::Region) -> ty::Region { // Note: This routine only handles regions that are bound on // type declarations and other outer declarations, not those // bound in *fn types*. Region substitution of the bound // regions that appear in a function signature is done using // the specialized routine `ty::replace_late_regions()`. match r { ty::ReEarlyBound(_, space, i, region_name) => { match self.substs.regions { ErasedRegions => ty::ReStatic, NonerasedRegions(ref regions) => match regions.opt_get(space, i as uint) { Some(&r) => { self.shift_region_through_binders(r) } None => { let span = self.span.unwrap_or(DUMMY_SP); self.tcx().sess.span_bug( span, &format!("Type parameter out of range \ when substituting in region {} (root type={}) \ (space={:?}, index={})", region_name.as_str(), self.root_ty.repr(self.tcx()), space, i)[]); } } } } _ => r } } fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { if !ty::type_needs_subst(t) { return t; } // track the root type we were asked to substitute let depth = self.ty_stack_depth; if depth == 0 { self.root_ty = Some(t); } self.ty_stack_depth += 1; let t1 = match t.sty { ty::ty_param(p) => { self.ty_for_param(p, t) } _ => { ty_fold::super_fold_ty(self, t) } }; assert_eq!(depth + 1, self.ty_stack_depth); self.ty_stack_depth -= 1; if depth == 0 { self.root_ty = None; } return t1; } } impl<'a,'tcx> SubstFolder<'a,'tcx> { fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> { // Look up the type in the substitutions. It really should be in there. let opt_ty = self.substs.types.opt_get(p.space, p.idx as uint); let ty = match opt_ty { Some(t) => *t, None => { let span = self.span.unwrap_or(DUMMY_SP); self.tcx().sess.span_bug( span, &format!("Type parameter `{}` ({}/{:?}/{}) out of range \ when substituting (root type={}) substs={}", p.repr(self.tcx()), source_ty.repr(self.tcx()), p.space, p.idx, self.root_ty.repr(self.tcx()), self.substs.repr(self.tcx()))[]); } }; self.shift_regions_through_binders(ty) } /// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs /// when we are substituting a type with escaping regions into a context where we have passed /// through region binders. That's quite a mouthful. Let's see an example: /// /// ``` /// type Func<A> = fn(A); /// type MetaFunc = for<'a> fn(Func<&'a int>) /// ``` /// /// The type `MetaFunc`, when fully expanded, will be /// /// for<'a> fn(fn(&'a int)) /// ^~ ^~ ^~~ /// | | | /// | | DebruijnIndex of 2 /// Binders /// /// Here the `'a` lifetime is bound in the outer function, but appears as an argument of the /// inner one. Therefore, that appearance will have a DebruijnIndex of 2, because we must skip /// over the inner binder (remember that we count Debruijn indices from 1). However, in the /// definition of `MetaFunc`, the binder is not visible, so the type `&'a int` will have a /// debruijn index of 1. It's only during the substitution that we can see we must increase the /// depth by 1 to account for the binder that we passed through. /// /// As a second example, consider this twist: /// /// ``` /// type FuncTuple<A> = (A,fn(A)); /// type MetaFuncTuple = for<'a> fn(FuncTuple<&'a int>) /// ``` /// /// Here the final type will be: /// /// for<'a> fn((&'a int, fn(&'a int))) /// ^~~ ^~~ /// | | /// DebruijnIndex of 1 | /// DebruijnIndex of 2 /// /// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the /// first case we do not increase the Debruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. fn shift_regions_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> { debug!("shift_regions(ty={:?}, region_binders_passed={:?}, type_has_escaping_regions={:?})", ty.repr(self.tcx()), self.region_binders_passed, ty::type_has_escaping_regions(ty)); if self.region_binders_passed == 0 || !ty::type_has_escaping_regions(ty) { return ty; } let result = ty_fold::shift_regions(self.tcx(), self.region_binders_passed, &ty); debug!("shift_regions: shifted result = {:?}", result.repr(self.tcx())); result } fn shift_region_through_binders(&self, region: ty::Region) -> ty::Region { ty_fold::shift_region(region, self.region_binders_passed) } }
32.64752
100
0.526991
eb2159d57bfa7458e027d9bf5f23a9dd0a465163
5,526
use std::hash::{BuildHasher, Hash}; use crate::{BloomFilter, DefaultHashBuilder}; // growth factor `s` const GROWTH_FACTOR: usize = 2; // tightening ratio `r` const TIGHTENING_RATIO: f64 = 0.85; /// A variant of a Bloom filter that can adapt to to the number of elements inserted into the /// filter, targeting a given false positive probability. /// /// This is effectively done by layering bloom filters with larger capacities. /// /// See "[Scalable Bloom Filters]" (2007) by Almeida, Paulo Sérgio, et al. for a formal /// description. /// /// [Scalable Bloom Filters]: https://dl.acm.org/citation.cfm?id=1224501 /// /// # Examples /// /// ``` /// use bbloom::ScalableBloomFilter; /// /// // false positive probability /// const P: f64 = 0.0001; /// // expected number of inserted values /// const N: usize = 64; /// /// let mut filter = ScalableBloomFilter::new(P, N); /// /// filter.insert("a"); /// filter.insert("b"); /// /// assert!(filter.contains("a")); /// assert!(filter.contains("b")); /// assert!(!filter.contains("c")); /// ``` pub struct ScalableBloomFilter<S = DefaultHashBuilder> { // total number of elements inserted n: usize, // total capacity of all filters total_capacity: usize, // a list of all filters in order they were created filters: Vec<BloomFilter<S>>, // the (tightened) false positive probably of the last created filter last_fpp: f64, } impl ScalableBloomFilter<DefaultHashBuilder> { /// Creates a new scalable Bloom filter that targets a false positive probability `p` ([0.0, /// 1.0]) with an initial expected number of inserted elements `n`. /// /// # Examples /// /// ``` /// use bbloom::ScalableBloomFilter; /// let _filter = ScalableBloomFilter::new(0.0001, 64); /// ``` pub fn new(p: f64, n: usize) -> Self { Self::with_hashers(p, n, DefaultHashBuilder::new(), DefaultHashBuilder::new()) } } impl<S> ScalableBloomFilter<S> where S: BuildHasher + Default, { /// Creates a new scalable Bloom filter that targets a false positive probability `p` ([0.0, /// 1.0]) with an initial expected number of inserted elements `n`, using `builder_1` and /// `builder_2` to hash the data in the initial filter. /// /// # Examples /// /// ``` /// use bbloom::ScalableBloomFilter; /// let _filter = ScalableBloomFilter::new(0.0001, 64); /// ``` pub fn with_hashers(p: f64, n: usize, builder_1: S, builder_2: S) -> Self { let initial_filter = BloomFilter::from_fpp_with_hashers(p, n, builder_1, builder_2); Self { n: 0, total_capacity: n, filters: vec![initial_filter], last_fpp: p, } } /// Tests all filters for whether an element may be in the filter or definitely not in the filter. /// /// # Examples /// /// ``` /// use bbloom::ScalableBloomFilter; /// /// let mut filter = ScalableBloomFilter::new(0.0001, 64); /// /// filter.insert("a"); /// /// assert!(filter.contains("a")); /// assert!(!filter.contains("b")); /// ``` pub fn contains<H: Hash + ?Sized>(&self, key: &H) -> bool { self.filters.iter().any(|f| f.contains(key)) } /// Adds a value to the bloom filter. /// /// Returns whether the value is already (maybe) in the _last_ filter or not. Duplicate values /// may be present in the scalable Bloom filter but not in the last filter. When a duplicate /// value is in the last filter, it does not affect the load factor. /// /// # Examples /// /// ``` /// use bbloom::ScalableBloomFilter; /// /// let mut filter = ScalableBloomFilter::new(0.0001, 64); /// assert!(filter.insert("a")); /// assert!(filter.insert("b")); /// assert!(!filter.insert("b")); /// ``` pub fn insert<H: Hash + ?Sized>(&mut self, key: &H) -> bool { if self.n >= self.total_capacity { self.grow(); } let i = self.filters.len() - 1; let filter = &mut self.filters[i]; let inserted = filter.insert(key); if inserted { self.n += 1; } inserted } /// Adds a value to a Bloom filter if it is not already present. /// /// When there are > 1 filters, this is only slightly faster than calling both `contains` and /// `insert`, as the last filter does not have to be checked twice. /// /// Returns whether the value is (maybe) in the filter or not. /// /// # Examples /// /// ``` /// use bbloom::ScalableBloomFilter; /// /// let mut filter = ScalableBloomFilter::new(0.0001, 64); /// /// assert!(!filter.contains_or_insert("a")); /// assert!(!filter.contains_or_insert("b")); /// assert!(filter.contains_or_insert("b")); /// ``` pub fn contains_or_insert<H: Hash + ?Sized>(&mut self, key: &H) -> bool { let n = if self.filters.len() == 1 { 1 } else { self.filters.len() - 1 }; if self.filters.iter().take(n).any(|f| f.contains(key)) { true } else { !self.insert(key) } } fn grow(&mut self) { let p = self.last_fpp * TIGHTENING_RATIO; let n = self.total_capacity * GROWTH_FACTOR; let filter = BloomFilter::from_fpp_with_hashers(p, n, S::default(), S::default()); self.filters.push(filter); self.total_capacity += n; self.last_fpp = p; } }
30.196721
102
0.586138
216bf25b500ac35e7d6c47dfe61d5d403803ef90
873
#[doc = "Writer for register CTR_NONCE1_W3"] pub type W = crate::W<u32, super::CTR_NONCE1_W3>; #[doc = "Register CTR_NONCE1_W3 `reset()`'s with value 0"] impl crate::ResetValue for super::CTR_NONCE1_W3 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Write proxy for field `NONCE13`"] pub struct NONCE13_W<'a> { w: &'a mut W, } impl<'a> NONCE13_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff); self.w } } impl W { #[doc = "Bits 0:31 - Nonce1 from software for CTR, for region1. Nonce1={Nonce13,Nonce12,Nonce11,Nonce10}"] #[inline(always)] pub fn nonce13(&mut self) -> NONCE13_W { NONCE13_W { w: self } } }
29.1
110
0.600229
03c0bd08723b4c8a4e06e9a45c1ebd5177701ba8
4,986
use crate::prelude::BACKEND_INTERNAL; use crate::Result; use amethyst::{ assets::Handle, assets::{AssetStorage, Loader}, ecs::prelude::*, renderer::{ImageFormat, SpriteSheet, Texture}, utils::application_root_dir, }; pub struct Font { pub tile_size: (u32, u32), pub filename: String, pub ss: Option<Handle<SpriteSheet>>, } impl Font { pub fn load<S: ToString>(filename: S, tile_size: (u32, u32)) -> Font { Font { tile_size, filename: filename.to_string(), ss: None, } } pub fn setup_gl_texture(&mut self, _gl: &crate::hal::BTermPlatform) -> Result<()> { Ok(()) } pub fn bind_texture(&self, _gl: &crate::hal::BTermPlatform) {} } pub fn initialize_fonts(world: &mut World) -> Result<()> { use crate::embedding; use amethyst::renderer::rendy::texture::TextureBuilder; use amethyst::renderer::types::TextureData; use amethyst::renderer::Format; use amethyst::renderer::Sprite; let loader = world.read_resource::<Loader>(); let texture_storage = world.read_resource::<AssetStorage<Texture>>(); let ss_storage = world.read_resource::<AssetStorage<SpriteSheet>>(); let app_root = application_root_dir().expect("Fail"); use amethyst::renderer::rendy::*; use image::GenericImageView; for font in BACKEND_INTERNAL.lock().unwrap().fonts.iter_mut() { let resource = embedding::EMBED .lock()? .get_resource(font.filename.to_string()); let handle; if let Some(data) = resource { let png = image::load_from_memory(data).expect("Failed to load texture from memory"); // This sets black pixels to be transparent const MIN_VAL: u8 = 10; let mut raw_pixels = png.raw_pixels().clone(); for i in 0..raw_pixels.len() / 4 { if raw_pixels[(i * 4)] < MIN_VAL && raw_pixels[(i * 4) + 1] < MIN_VAL && raw_pixels[(i * 4) + 2] < MIN_VAL { raw_pixels[(i * 4) + 3] = 0; // Make it transparent } } let texture_builder = TextureBuilder::new() .with_data_width(png.width()) .with_data_height(png.height()) .with_kind(hal::image::Kind::D2(png.width(), png.height(), 1, 1)) .with_view_kind(hal::image::ViewKind::D2) //.with_sampler_info(hal::image::SamplerInfo::new(hal::image::Filter::Nearest, hal::image::WrapMode::Clamp)) .with_sampler_info(hal::image::SamplerInfo { min_filter: hal::image::Filter::Nearest, mag_filter: hal::image::Filter::Nearest, mip_filter: hal::image::Filter::Nearest, wrap_mode: ( hal::image::WrapMode::Clamp, hal::image::WrapMode::Clamp, hal::image::WrapMode::Clamp, ), lod_bias: 0.0.into(), lod_range: std::ops::Range { start: 0.0.into(), end: 1000.0.into(), }, comparison: None, border: hal::image::PackedColor(0), anisotropic: hal::image::Anisotropic::Off, normalized: true, }) .with_raw_data(raw_pixels, Format::Rgba8Srgb); handle = loader.load_from_data(TextureData(texture_builder), (), &texture_storage); } else { let filename = app_root.join(font.filename.clone()); handle = loader.load( filename .to_str() .ok_or("Couldn't convert filename to string")?, ImageFormat::default(), (), &texture_storage, ); } // Make a font-specific sprite sheet let offsets = [ 0.0 - (font.tile_size.0 as f32 / 2.0), 0.0 - (font.tile_size.1 as f32 / 2.0), ]; let mut sprites = Vec::with_capacity(256); for y in 0..16 { for x in 0..16 { let sprite = Sprite::from_pixel_values( font.tile_size.0 * 16, font.tile_size.1 * 16, font.tile_size.0, font.tile_size.1, x * font.tile_size.0, y * font.tile_size.1, offsets, false, false, ); sprites.push(sprite); } } let ss_handle = loader.load_from_data( SpriteSheet { texture: handle.clone(), sprites, }, (), &ss_storage, ); font.ss = Some(ss_handle); } Ok(()) }
34.867133
124
0.497994
e6ac54e6cc8e87efc6b0a77c60857801f6fb0533
4,589
use std::env; use std::fs::{self, DirEntry}; use std::io; use std::path::PathBuf; use std::process; use std::ffi::{OsStr, OsString}; use std::iter::once; use std::mem; use std::os::windows::ffi::OsStrExt; use std::os::windows::prelude::*; use std::ptr::null_mut; extern crate winapi; use winapi::shared::minwindef; use winapi::shared::ntdef::NULL; use winapi::shared::winerror; use winapi::um::accctrl; use winapi::um::aclapi; use winapi::um::fileapi; use winapi::um::handleapi; use winapi::um::winbase; use winapi::um::winnt; mod config; mod format; const NORMAL: i32 = 0; const NON_FATAL: i32 = 1; const FATAL: i32 = 2; fn main() { let mut exit_code = NORMAL; let mut config = config::LsConfig::new(); if let Err(e) = config.parse(env::args().collect()) { eprintln!("{}", e); process::exit(FATAL); } for path in config.paths { let entries = match dir(&path) { Ok(entries) => entries, Err(e) => { eprintln!("{}", e); exit_code = NON_FATAL; continue; } }; if config.one_per_line { print!("{}", format::one_per_line(entries)); } else { print!("{}", format::long_list(entries)); } process::exit(exit_code); } } fn dir(path: &PathBuf) -> io::Result<Vec<DirEntry>> { let mut entries: Vec<DirEntry> = Vec::new(); for item in fs::read_dir(path)? { if let Ok(entry) = item { entries.push(entry); } else { eprintln!("{}", item.unwrap_err()); } } Ok(entries) } fn owner(entry: &DirEntry) -> String { let wide_path: Vec<u16> = OsStr::new(&entry.path()) .encode_wide() .chain(once(0)) .collect(); let h_file = unsafe { fileapi::CreateFileW( wide_path.as_ptr(), winnt::GENERIC_READ, winnt::FILE_SHARE_READ, null_mut(), fileapi::OPEN_EXISTING, winnt::FILE_ATTRIBUTE_NORMAL | winbase::FILE_FLAG_BACKUP_SEMANTICS, NULL, ) }; if h_file == handleapi::INVALID_HANDLE_VALUE { return format!("{:16} {:16}", "unknown", "unknown"); } let mut p_sid_owner: winnt::PSID = null_mut(); let mut p_descriptor: winnt::PSECURITY_DESCRIPTOR = null_mut(); let dw_code = unsafe { aclapi::GetSecurityInfo( h_file, accctrl::SE_FILE_OBJECT, winnt::OWNER_SECURITY_INFORMATION, &mut p_sid_owner, null_mut(), null_mut(), null_mut(), &mut p_descriptor, ) }; if dw_code != winerror::ERROR_SUCCESS { return format!("{:16} {:16}", "unknown", "unknown"); } let mut account_size: minwindef::DWORD = 50; let mut domain_size: minwindef::DWORD = 50; let mut account_name: Vec<u16> = unsafe { vec![mem::zeroed(); 50] }; let mut domain_name: Vec<u16> = unsafe { vec![mem::zeroed(); 50] }; let mut pe_use: winnt::SID_NAME_USE = winnt::SidTypeUnknown; let b_success = unsafe { winbase::LookupAccountSidW( null_mut(), p_sid_owner, account_name.as_mut_ptr(), &mut account_size, domain_name.as_mut_ptr(), &mut domain_size, &mut pe_use, ) }; if b_success == 0 { return format!("{:16} {:16}", "unknown", "unknown"); } let account_size = account_name.iter().take_while(|&&c| c != 0).count(); let domain_size = domain_name.iter().take_while(|&&c| c != 0).count(); let account_name = OsString::from_wide(&account_name[..account_size]); let domain_name = OsString::from_wide(&domain_name[..domain_size]); format!( "{:16} {:16}", domain_name.to_string_lossy(), account_name.to_string_lossy() ) } #[cfg(test)] mod tests { use super::*; use coreutils::test_utils; use std::path::PathBuf; #[test] fn normal_dir() { let path = PathBuf::from(test_utils::get_dir()); let entries: Vec<DirEntry> = crate::dir(&path).unwrap(); let names: Vec<String> = entries .iter() .map(|x| x.path().file_name().unwrap().to_string_lossy().into_owned()) .collect(); let expected: Vec<&str> = vec![ "ascii.txt", "hello.bin", "hidden", "link", "mklink", "normal", "notouch.txt", ]; assert_eq!(names, expected) } }
26.073864
82
0.54696
623d7e5f60d5b7594144c7713e1219e7d2fb2970
2,032
// Copyright 2020 ChainSafe Systems // SPDX-License-Identifier: Apache-2.0, MIT use super::TxnID; use address::Address; use cid::Cid; use clock::ChainEpoch; use encoding::{tuple::*, Cbor}; use num_bigint::bigint_ser; use vm::TokenAmount; /// Multisig actor state #[derive(Serialize_tuple, Deserialize_tuple, Clone)] pub struct State { pub signers: Vec<Address>, pub num_approvals_threshold: usize, pub next_tx_id: TxnID, // Linear unlock #[serde(with = "bigint_ser")] pub initial_balance: TokenAmount, pub start_epoch: ChainEpoch, pub unlock_duration: ChainEpoch, pub pending_txs: Cid, } impl State { /// Returns amount locked in multisig contract pub fn amount_locked(&self, elapsed_epoch: ChainEpoch) -> TokenAmount { if elapsed_epoch >= self.unlock_duration { return TokenAmount::from(0); } let unit_locked: TokenAmount = self.initial_balance.clone() / self.unlock_duration; unit_locked * (self.unlock_duration - elapsed_epoch) } pub(crate) fn check_available( &self, balance: TokenAmount, amount_to_spend: &TokenAmount, curr_epoch: ChainEpoch, ) -> Result<(), String> { if amount_to_spend < &0.into() { return Err(format!( "amount to spend {} less than zero", amount_to_spend )); } if &balance < amount_to_spend { return Err(format!( "current balance {} less than amount to spend {}", balance, amount_to_spend )); } let remaining_balance = balance - amount_to_spend; let amount_locked = self.amount_locked(curr_epoch - self.start_epoch); if remaining_balance < amount_locked { return Err(format!( "actor balance if spent {} would be less than required locked amount {}", remaining_balance, amount_locked )); } Ok(()) } } impl Cbor for State {}
29.028571
91
0.613189
394d05132611ebf4503e10d2f76abca5175a3057
3,173
#[doc = "Register `SHIFTBUFBIS[%s]` reader"] pub struct R(crate::R<SHIFTBUFBIS_SPEC>); impl core::ops::Deref for R { type Target = crate::R<SHIFTBUFBIS_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl From<crate::R<SHIFTBUFBIS_SPEC>> for R { #[inline(always)] fn from(reader: crate::R<SHIFTBUFBIS_SPEC>) -> Self { R(reader) } } #[doc = "Register `SHIFTBUFBIS[%s]` writer"] pub struct W(crate::W<SHIFTBUFBIS_SPEC>); impl core::ops::Deref for W { type Target = crate::W<SHIFTBUFBIS_SPEC>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } impl core::ops::DerefMut for W { #[inline(always)] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl From<crate::W<SHIFTBUFBIS_SPEC>> for W { #[inline(always)] fn from(writer: crate::W<SHIFTBUFBIS_SPEC>) -> Self { W(writer) } } #[doc = "Field `SHIFTBUFBIS` reader - Shift Buffer"] pub struct SHIFTBUFBIS_R(crate::FieldReader<u32, u32>); impl SHIFTBUFBIS_R { #[inline(always)] pub(crate) fn new(bits: u32) -> Self { SHIFTBUFBIS_R(crate::FieldReader::new(bits)) } } impl core::ops::Deref for SHIFTBUFBIS_R { type Target = crate::FieldReader<u32, u32>; #[inline(always)] fn deref(&self) -> &Self::Target { &self.0 } } #[doc = "Field `SHIFTBUFBIS` writer - Shift Buffer"] pub struct SHIFTBUFBIS_W<'a> { w: &'a mut W, } impl<'a> SHIFTBUFBIS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u32) -> &'a mut W { self.w.bits = value; self.w } } impl R { #[doc = "Bits 0:31 - Shift Buffer"] #[inline(always)] pub fn shiftbufbis(&self) -> SHIFTBUFBIS_R { SHIFTBUFBIS_R::new(self.bits) } } impl W { #[doc = "Bits 0:31 - Shift Buffer"] #[inline(always)] pub fn shiftbufbis(&mut self) -> SHIFTBUFBIS_W { SHIFTBUFBIS_W { w: self } } #[doc = "Writes raw bits to the register."] #[inline(always)] pub unsafe fn bits(&mut self, bits: u32) -> &mut Self { self.0.bits(bits); self } } #[doc = "Shifter Buffer N Bit Swapped Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [shiftbufbis](index.html) module"] pub struct SHIFTBUFBIS_SPEC; impl crate::RegisterSpec for SHIFTBUFBIS_SPEC { type Ux = u32; } #[doc = "`read()` method returns [shiftbufbis::R](R) reader structure"] impl crate::Readable for SHIFTBUFBIS_SPEC { type Reader = R; } #[doc = "`write(|w| ..)` method takes [shiftbufbis::W](W) writer structure"] impl crate::Writable for SHIFTBUFBIS_SPEC { type Writer = W; } #[doc = "`reset()` method sets SHIFTBUFBIS[%s] to value 0"] impl crate::Resettable for SHIFTBUFBIS_SPEC { #[inline(always)] fn reset_value() -> Self::Ux { 0 } }
30.219048
429
0.618973
339a11eb2347ccad5235e9dd9b535671459135fe
2,721
extern crate handlebars; #[macro_use] extern crate serde_json; use handlebars::{Handlebars, Helper, HelperDef, RenderContext, RenderError}; use serde_json::Value; struct GtHelper; impl HelperDef for GtHelper { fn call_inner( &self, h: &Helper, _: &Handlebars, _: &mut RenderContext, ) -> Result<Option<Value>, RenderError> { let p1 = try!( h.param(0,) .and_then(|v| v.value().as_i64(),) .ok_or(RenderError::new( "Param 0 with i64 type is required for gt helper." ),) ); let p2 = try!( h.param(1,) .and_then(|v| v.value().as_i64(),) .ok_or(RenderError::new( "Param 1 with i64 type is required for gt helper." ),) ); Ok(Some(Value::Bool(p1 > p2))) } } struct NotHelper; impl HelperDef for NotHelper { fn call_inner( &self, h: &Helper, _: &Handlebars, _: &mut RenderContext, ) -> Result<Option<Value>, RenderError> { let p1 = try!( h.param(0,) .and_then(|v| v.value().as_bool(),) .ok_or(RenderError::new( "Param 0 with bool type is required for not helper." ),) ); Ok(Some(Value::Bool(!p1))) } } #[test] fn test_subexpression() { let mut hbs = Handlebars::new(); hbs.register_helper("gt", Box::new(GtHelper)); hbs.register_helper("not", Box::new(NotHelper)); let data = json!({"a": 1, "b": 0, "c": 2}); assert_eq!( hbs.render_template("{{#if (gt a b)}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Success" ); assert_eq!( hbs.render_template("{{#if (gt a c)}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Failed" ); assert_eq!( hbs.render_template("{{#if (not (gt a c))}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Success" ); assert_eq!( hbs.render_template("{{#if (not (gt a b))}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Failed" ); // no argument provided for not assert_eq!( hbs.render_template("{{#if (not)}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Failed" ); // json literal assert_eq!( hbs.render_template("{{#if (not true)}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Failed" ); assert_eq!( hbs.render_template("{{#if (not false)}}Success{{else}}Failed{{/if}}", &data) .unwrap(), "Success" ); }
24.963303
88
0.493936
e92634714789495b383426a3d58b654235544f9d
12,931
use rustc_middle::mir; use rustc_middle::ty::layout::HasTyCtxt; use rustc_middle::ty::{self, Ty}; use std::borrow::{Borrow, Cow}; use std::collections::hash_map::Entry; use std::hash::Hash; use rustc_data_structures::fx::FxHashMap; use rustc_ast::ast::Mutability; use rustc_hir::def_id::DefId; use rustc_middle::mir::AssertMessage; use rustc_span::symbol::Symbol; use crate::interpret::{ self, AllocId, Allocation, GlobalId, ImmTy, InterpCx, InterpResult, Memory, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, }; use super::error::*; impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter> { /// Evaluate a const function where all arguments (if any) are zero-sized types. /// The evaluation is memoized thanks to the query system. /// /// Returns `true` if the call has been evaluated. fn try_eval_const_fn_call( &mut self, instance: ty::Instance<'tcx>, ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, args: &[OpTy<'tcx>], ) -> InterpResult<'tcx, bool> { trace!("try_eval_const_fn_call: {:?}", instance); // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot // perform this optimization on items tagged with it. if instance.def.requires_caller_location(self.tcx()) { return Ok(false); } // For the moment we only do this for functions which take no arguments // (or all arguments are ZSTs) so that we don't memoize too much. if args.iter().any(|a| !a.layout.is_zst()) { return Ok(false); } let dest = match ret { Some((dest, _)) => dest, // Don't memoize diverging function calls. None => return Ok(false), }; let gid = GlobalId { instance, promoted: None }; let place = self.const_eval_raw(gid)?; self.copy_op(place.into(), dest)?; self.return_to_block(ret.map(|r| r.1))?; self.dump_place(*dest); Ok(true) } /// "Intercept" a function call to a panic-related function /// because we have something special to do for it. /// If this returns successfully (`Ok`), the function should just be evaluated normally. fn hook_panic_fn( &mut self, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], ) -> InterpResult<'tcx> { let def_id = instance.def_id(); if Some(def_id) == self.tcx.lang_items().panic_fn() || Some(def_id) == self.tcx.lang_items().begin_panic_fn() { // &'static str assert!(args.len() == 1); let msg_place = self.deref_operand(args[0])?; let msg = Symbol::intern(self.read_str(msg_place)?); let span = self.find_closest_untracked_caller_location(); let (file, line, col) = self.location_triple_for_span(span); Err(ConstEvalErrKind::Panic { msg, file, line, col }.into()) } else { Ok(()) } } } /// Extra machine state for CTFE, and the Machine instance pub struct CompileTimeInterpreter { /// For now, the number of terminators that can be evaluated before we throw a resource /// exhuastion error. /// /// Setting this to `0` disables the limit and allows the interpreter to run forever. pub steps_remaining: usize, } #[derive(Copy, Clone, Debug)] pub struct MemoryExtra { /// Whether this machine may read from statics pub(super) can_access_statics: bool, } impl CompileTimeInterpreter { pub(super) fn new(const_eval_limit: usize) -> Self { CompileTimeInterpreter { steps_remaining: const_eval_limit } } } impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> { #[inline(always)] fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool where K: Borrow<Q>, { FxHashMap::contains_key(self, k) } #[inline(always)] fn insert(&mut self, k: K, v: V) -> Option<V> { FxHashMap::insert(self, k, v) } #[inline(always)] fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V> where K: Borrow<Q>, { FxHashMap::remove(self, k) } #[inline(always)] fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> { self.iter().filter_map(move |(k, v)| f(k, &*v)).collect() } #[inline(always)] fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> { match self.get(&k) { Some(v) => Ok(v), None => { vacant()?; bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading") } } } #[inline(always)] fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> { match self.entry(k) { Entry::Occupied(e) => Ok(e.into_mut()), Entry::Vacant(e) => { let v = vacant()?; Ok(e.insert(v)) } } } } crate type CompileTimeEvalContext<'mir, 'tcx> = InterpCx<'mir, 'tcx, CompileTimeInterpreter>; impl interpret::MayLeak for ! { #[inline(always)] fn may_leak(self) -> bool { // `self` is uninhabited self } } impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter { type MemoryKind = !; type PointerTag = (); type ExtraFnVal = !; type FrameExtra = (); type MemoryExtra = MemoryExtra; type AllocExtra = (); type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>; const GLOBAL_KIND: Option<!> = None; // no copying of globals from `tcx` to machine memory // We do not check for alignment to avoid having to carry an `Align` // in `ConstValue::ByRef`. const CHECK_ALIGN: bool = false; #[inline(always)] fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool { false // for now, we don't enforce validity } fn find_mir_or_eval_fn( ecx: &mut InterpCx<'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { debug!("find_mir_or_eval_fn: {:?}", instance); // Only check non-glue functions if let ty::InstanceDef::Item(def_id) = instance.def { // Execution might have wandered off into other crates, so we cannot do a stability- // sensitive check here. But we can at least rule out functions that are not const // at all. if ecx.tcx.is_const_fn_raw(def_id) { // If this function is a `const fn` then under certain circumstances we // can evaluate call via the query system, thus memoizing all future calls. if ecx.try_eval_const_fn_call(instance, ret, args)? { return Ok(None); } } else { // Some functions we support even if they are non-const -- but avoid testing // that for const fn! ecx.hook_panic_fn(instance, args)?; // We certainly do *not* want to actually call the fn // though, so be sure we return here. throw_unsup_format!("calling non-const function `{}`", instance) } } // This is a const fn. Call it. Ok(Some(match ecx.load_mir(instance.def, None) { Ok(body) => *body, Err(err) => { if let err_unsup!(NoMirFor(did)) = err.kind { let path = ecx.tcx.def_path_str(did); return Err(ConstEvalErrKind::NeedsRfc(format!( "calling extern function `{}`", path )) .into()); } return Err(err); } })) } fn call_extra_fn( _ecx: &mut InterpCx<'mir, 'tcx, Self>, fn_val: !, _args: &[OpTy<'tcx>], _ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, _unwind: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { match fn_val {} } fn call_intrinsic( ecx: &mut InterpCx<'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>, _unwind: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { if ecx.emulate_intrinsic(instance, args, ret)? { return Ok(()); } // An intrinsic that we do not support let intrinsic_name = ecx.tcx.item_name(instance.def_id()); Err(ConstEvalErrKind::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()) } fn assert_panic( ecx: &mut InterpCx<'mir, 'tcx, Self>, msg: &AssertMessage<'tcx>, _unwind: Option<mir::BasicBlock>, ) -> InterpResult<'tcx> { use rustc_middle::mir::AssertKind::*; // Convert `AssertKind<Operand>` to `AssertKind<u64>`. let err = match msg { BoundsCheck { ref len, ref index } => { let len = ecx .read_immediate(ecx.eval_operand(len, None)?) .expect("can't eval len") .to_scalar()? .to_machine_usize(&*ecx)?; let index = ecx .read_immediate(ecx.eval_operand(index, None)?) .expect("can't eval index") .to_scalar()? .to_machine_usize(&*ecx)?; BoundsCheck { len, index } } Overflow(op) => Overflow(*op), OverflowNeg => OverflowNeg, DivisionByZero => DivisionByZero, RemainderByZero => RemainderByZero, ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind), ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind), }; Err(ConstEvalErrKind::AssertFailure(err).into()) } fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> { Err(ConstEvalErrKind::NeedsRfc("pointer-to-integer cast".to_string()).into()) } fn binary_ptr_op( _ecx: &InterpCx<'mir, 'tcx, Self>, _bin_op: mir::BinOp, _left: ImmTy<'tcx>, _right: ImmTy<'tcx>, ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> { Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into()) } #[inline(always)] fn init_allocation_extra<'b>( _memory_extra: &MemoryExtra, _id: AllocId, alloc: Cow<'b, Allocation>, _kind: Option<MemoryKind<!>>, ) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) { // We do not use a tag so we can just cheaply forward the allocation (alloc, ()) } #[inline(always)] fn tag_global_base_pointer(_memory_extra: &MemoryExtra, _id: AllocId) -> Self::PointerTag {} fn box_alloc( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _dest: PlaceTy<'tcx>, ) -> InterpResult<'tcx> { Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into()) } fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { // The step limit has already been hit in a previous call to `before_terminator`. if ecx.machine.steps_remaining == 0 { return Ok(()); } ecx.machine.steps_remaining -= 1; if ecx.machine.steps_remaining == 0 { throw_exhaust!(StepLimitReached) } Ok(()) } #[inline(always)] fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { Ok(()) } fn before_access_global( memory_extra: &MemoryExtra, alloc_id: AllocId, allocation: &Allocation, static_def_id: Option<DefId>, is_write: bool, ) -> InterpResult<'tcx> { if is_write && allocation.mutability == Mutability::Not { Err(err_ub!(WriteToReadOnly(alloc_id)).into()) } else if is_write { Err(ConstEvalErrKind::ModifiedGlobal.into()) } else if memory_extra.can_access_statics || static_def_id.is_none() { // `static_def_id.is_none()` indicates this is not a static, but a const or so. Ok(()) } else { Err(ConstEvalErrKind::ConstAccessesStatic.into()) } } } // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable // at the bottom of this file.
35.04336
100
0.570954