hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
187d7a21dc0760234358aa6a2868f6f7c21b8488 | 130 | mod board;
pub mod constants;
mod hexo;
mod pos;
mod state;
pub use board::*;
pub use hexo::*;
pub use pos::*;
pub use state::*;
| 11.818182 | 18 | 0.653846 |
3836b8fb22dc20cf499e27d7479d2aab70df4339 | 5,146 | #![allow(dead_code)]
#![allow(unused_variables)]
#![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
// Allow some bindgen warnings for now
#![allow(deref_nullptr)]
#![allow(improper_ctypes)]
use std::{any::TypeId, mem::{MaybeUninit} };
// We generate bindings to an actual source file so that we get better IDE integration
// For now do not export Docs for all the Raw bindings.
// We will need to expose types that are part of the Rust api at some point
#[doc(hidden)]
mod bindings;
#[doc(hidden)]
pub use bindings::*;
mod binding_util;
pub use binding_util::*;
mod cache; // Internal only
mod component;
pub use component::*;
mod component_group;
pub use component_group::*;
mod entity;
pub use entity::*;
pub mod filter;
pub use filter::*;
pub mod id;
pub use id::*;
pub mod system;
pub use system::*;
pub mod world;
pub use world::*;
////////////////////////////////////////////////////////////////////////////////////////////////////////
// This Rust binding for flecs is a WIP!!!
//
// Possible TODOs:
// - audit & fix up ALL string usages. rust -> C must null terminate!
// - change all get<> component funcs to return Option<>?
// - validate that term components were named earlier in chain?
// - We can only safely store primitives and raw pointer types within
// components currently, due to how the raw memory is inserted/moved
// need to look in to hooking the lifecycle support to rust, etc
// This could become a bit of a deal breaker for idiomatic rust
// component storage if not solved
// - Implement proper Rusty Query / System APIs that use Tuple generics
pub trait Component : 'static { }
impl<T> Component for T where T: 'static {}
pub trait AsEcsId {
fn id(&self) -> ecs_id_t;
}
impl AsEcsId for EntityId {
fn id(&self) -> ecs_id_t {
*self
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////
// C Struct initializer Defaults
//
impl Default for ecs_entity_desc_t {
fn default() -> Self {
let desc: ecs_entity_desc_t = unsafe { MaybeUninit::zeroed().assume_init() };
desc
}
}
impl Default for ecs_system_desc_t {
fn default() -> Self {
let desc: ecs_system_desc_t = unsafe { MaybeUninit::zeroed().assume_init() };
desc
}
}
// TODO - port more C++ tests to Rust!!!
//
#[cfg(test)]
mod tests {
use super::*;
use std::alloc::Layout;
#[derive(Default, Debug, PartialEq)]
struct Position {
x: f32,
y: f32,
}
#[derive(Default, Debug, PartialEq)]
struct Velocity {
x: f32,
y: f32,
}
struct Serializable {}
#[test]
fn flecs_multiple_worlds() {
// Component registrations are unique across worlds!
let mut world1 = World::new();
let pos1_e = world1.component::<Position>();
let mut world2 = World::new();
world2.component::<Velocity>(); // insert another comp to steal 1st slot
let pos2_e = world2.component::<Position>();
assert_ne!(pos1_e, pos2_e);
}
#[test]
fn flecs_wrappers() {
let mut world = World::new();
let pos_e = world.component::<Position>();
let vel_e = world.component::<Velocity>();
assert_ne!(pos_e, vel_e);
let entity = world.entity()
.set(Position { x: 1.0, y: 2.0 })
.set(Velocity { x: 2.0, y: 4.0 });
// something broke here??
let pos = world.get::<Position>(entity).unwrap();
assert_eq!(pos, &Position { x: 1.0, y: 2.0 });
let vel = world.get::<Velocity>(entity).unwrap();
assert_eq!(vel, &Velocity { x: 2.0, y: 4.0 });
}
#[test]
fn flecs_components_are_entities() {
let mut world = World::new();
world.component_named::<Position>("Position"); // you can give a comp a name
world.component::<Serializable>();
let pos_e = world.id::<Position>().unwrap();
assert_eq!(world.name(pos_e), "Position");
// It's possible to add components like you would for any entity
world.add::<Serializable>(pos_e);
}
#[test]
fn flecs_raw_binding_calls() {
let world = unsafe { ecs_init() };
let entity = unsafe { ecs_new_id(world) };
let is_alive = unsafe { ecs_is_alive(world, entity) };
assert_eq!(is_alive, true);
let component = register_component(world, ComponentDescriptor {
symbol: "flecs::tests::A".to_owned(),
name: "A".to_owned(),
custom_id: None,
layout: Layout::from_size_align(16, 4).unwrap()
});
let entity = unsafe { ecs_set_id(
world,
entity,
component,
4, // size
b"test".as_ptr() as *const ::std::os::raw::c_void, // ptr
) };
// This one should fail/crash due to over size??
let entity2 = unsafe { ecs_set_id(
world,
entity,
component,
24, // size
b"test12345123451234512345".as_ptr() as *const ::std::os::raw::c_void, // ptr
) };
assert_ne!(entity2, 0);
/*
// convert this back to readable form...
let data = unsafe { ecs_get_id(
world,
entity,
component,
) }; // -> *const ::std::os::raw::c_void;
assert_eq!(data, b"test".as_ptr() as *const ::std::os::raw::c_void);
*/
unsafe { ecs_delete(world, entity) }
let is_alive = unsafe { ecs_is_alive(world, entity) };
assert_eq!(is_alive, false);
unsafe { ecs_fini(world) };
}
}
| 24.504762 | 104 | 0.631558 |
2637b1a8c7fd72d9c0fde97bf281f29813f29164 | 4,181 | //! This is a ANSI specific implementation for styling related action.
//! This module is used for Windows 10 terminals and Unix terminals by default.
use crate::utils::Result;
use crate::{csi, write_cout};
use super::super::{Attribute, Color, Colored, Style};
pub(crate) fn set_fg_csi_sequence(fg_color: Color) -> String {
format!(csi!("{}m"), Into::<String>::into(Colored::Fg(fg_color)))
}
pub(crate) fn set_bg_csi_sequence(bg_color: Color) -> String {
format!(csi!("{}m"), Into::<String>::into(Colored::Bg(bg_color)))
}
pub(crate) fn set_attr_csi_sequence(attribute: Attribute) -> String {
format!(csi!("{}m"), attribute as i16)
}
pub(crate) static RESET_CSI_SEQUENCE: &'static str = csi!("0m");
/// This struct is an ANSI escape code implementation for color related actions.
pub(crate) struct AnsiColor;
impl AnsiColor {
pub fn new() -> AnsiColor {
AnsiColor
}
}
impl Style for AnsiColor {
fn set_fg(&self, fg_color: Color) -> Result<()> {
write_cout!(set_fg_csi_sequence(fg_color))?;
Ok(())
}
fn set_bg(&self, bg_color: Color) -> Result<()> {
write_cout!(set_bg_csi_sequence(bg_color))?;
Ok(())
}
fn reset(&self) -> Result<()> {
write_cout!(RESET_CSI_SEQUENCE)?;
Ok(())
}
}
impl From<Colored> for String {
fn from(colored: Colored) -> Self {
let mut ansi_value = String::new();
let color;
match colored {
Colored::Fg(new_color) => {
if new_color == Color::Reset {
ansi_value.push_str("39");
return ansi_value;
} else {
ansi_value.push_str("38;");
color = new_color;
}
}
Colored::Bg(new_color) => {
if new_color == Color::Reset {
ansi_value.push_str("49");
return ansi_value;
} else {
ansi_value.push_str("48;");
color = new_color;
}
}
}
let color_val = match color {
Color::Black => "5;0",
Color::DarkGrey => "5;8",
Color::Red => "5;9",
Color::DarkRed => "5;1",
Color::Green => "5;10",
Color::DarkGreen => "5;2",
Color::Yellow => "5;11",
Color::DarkYellow => "5;3",
Color::Blue => "5;12",
Color::DarkBlue => "5;4",
Color::Magenta => "5;13",
Color::DarkMagenta => "5;5",
Color::Cyan => "5;14",
Color::DarkCyan => "5;6",
Color::White => "5;15",
Color::Grey => "5;7",
Color::Rgb { r, g, b } => {
ansi_value.push_str(format!("2;{};{};{}", r, g, b).as_str());
""
}
Color::AnsiValue(val) => {
ansi_value.push_str(format!("5;{}", val).as_str());
""
}
_ => "",
};
ansi_value.push_str(color_val);
ansi_value
}
}
#[cfg(test)]
mod tests {
use crate::{Color, Colored};
#[test]
fn test_parse_fg_color() {
let colored = Colored::Fg(Color::Red);
assert_eq!(Into::<String>::into(colored), "38;5;9");
}
#[test]
fn test_parse_bg_color() {
let colored = Colored::Bg(Color::Red);
assert_eq!(Into::<String>::into(colored), "48;5;9");
}
#[test]
fn test_parse_reset_fg_color() {
let colored = Colored::Fg(Color::Reset);
assert_eq!(Into::<String>::into(colored), "39");
}
#[test]
fn test_parse_reset_bg_color() {
let colored = Colored::Bg(Color::Reset);
assert_eq!(Into::<String>::into(colored), "49");
}
#[test]
fn test_parse_fg_rgb_color() {
let colored = Colored::Bg(Color::Rgb { r: 1, g: 2, b: 3 });
assert_eq!(Into::<String>::into(colored), "48;2;1;2;3");
}
#[test]
fn test_parse_fg_ansi_color() {
let colored = Colored::Fg(Color::AnsiValue(255));
assert_eq!(Into::<String>::into(colored), "38;5;255");
}
}
| 28.060403 | 80 | 0.507534 |
fcf62e1df1a78ae1db88823f099760dc1048e6b6 | 4,020 | use std::string::String;
use std::collections::HashMap;
use std::fs;
use std::io::Result;
use std::path::Path;
use std::process::exit;
use yaml_rust::Yaml;
use yaml_rust::yaml::{Hash, Array};
/// Typealias for a Folder containing sub-folders
type NamedFolder = (String, Vec<Folder>);
/// Represents the two types of "folders" that are given via the yaml config file
pub enum Folder {
/// A folder that contains nothing
Final(String),
/// A folder that contains sub-folders
TopLevel(NamedFolder)
}
impl Folder {
/// Creates a top-level folder
fn from_string(name: &String) -> Folder {
Folder::Final(String::from(name))
}
/// Constructs a Vec of Folders from a yaml Array
pub fn from_array(arr: &Array) -> Vec<Folder> {
let value: Vec<Folder> = arr.iter()
.map(Folder::from)
.filter(|a| a.is_some())
.map(|x| x.unwrap()).collect();
value
}
/// Constructs a Folder from a yaml Hash object
fn from_hash(hash: &Hash) -> Option<Folder> {
// Per config standards, a yaml hash should only contain a single key/value pair
// of string/array, so we return the first (and only) key/value pair
for (key, value) in hash.iter() {
return match (key, value) {
(Yaml::String(name), Yaml::Array(arr)) =>
Some(Folder::TopLevel((String::from(name), Folder::from_array(arr)))),
// If we have the string as the key, but an invalid value, alert the user and abort
(Yaml::String(name), _) => {
println!("folder '{}' is not a list. Aborting...", name);
exit(1);
},
_ => None
};
}
return None;
}
/// Converts a yaml object to a folder
fn from(yaml: &Yaml) -> Option<Folder> {
match yaml {
Yaml::String(name) => Some(Folder::from_string(name)),
Yaml::Hash(hash) => Folder::from_hash(hash),
_ => None
}
}
}
/// Handles the folder creation process
pub struct FolderEngine<'a> {
/// The current sub-folder path
current: Vec<String>,
/// The named folder groups
pub groups: &'a HashMap<String, Vec<Folder>>
}
impl FolderEngine<'_> {
/// Constructs a new FolderEngine
///
/// ## arguments
///
/// * `groups` - HashMap containing the named folder groups and folders they represent
pub fn new(groups: &HashMap<String, Vec<Folder>>) -> FolderEngine {
FolderEngine {
current: vec![],
groups
}
}
/// Creates a single directory.
fn create_dir(&self, name: &String) -> Result<()> {
let folder = if self.current.is_empty() {
format!("./{}", name)
} else {
format!("./{}/{}", self.current.join("/"), name)
};
println!("Creating folder: {}", folder);
fs::create_dir(Path::new(&folder))
}
/// Handles a single folder with no subfolders
fn handle_name(&mut self, name: &String) -> Result<()> {
if let Some(group) = self.groups.get(name) {
self.create_folders(group)
} else {
self.create_dir(name)
}
}
/// Recursively create folders that contain sub-folders
fn handle_named_folder(&mut self, named_folder: &NamedFolder) -> Result<()> {
let (name, folder) = named_folder;
self.create_dir(name)?;
self.current.push(String::from(name));
self.create_folders(folder)?;
self.current.pop();
Ok(())
}
/// Main function to start the folder-creation process
pub fn create_folders(&mut self, folders: &Vec<Folder>) -> Result<()> {
for folder in folders {
match folder {
Folder::Final(name) => self.handle_name(name)?,
Folder::TopLevel(named_folder) => self.handle_named_folder(named_folder)?
}
}
Ok(())
}
} | 30.454545 | 99 | 0.560945 |
f7edc68b2882f95c9dad953f703a20736b2439d1 | 38,995 | // Copyright 2019 Karl Sundequist Blomdahl <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use regex::Regex;
use std::env;
use std::fs::File;
use std::io::{BufRead, BufReader, Read};
use std::time::Instant;
use dg_go::utils::score::{Score, StoneStatus};
use dg_go::utils::sgf::Sgf;
use dg_go::{DEFAULT_KOMI, Board, Color, Point};
use dg_mcts::time_control::{TimeStrategy, RolloutLimit, ByoYomi};
use dg_mcts as mcts;
use dg_utils::config;
mod ponder_service;
mod time_settings;
mod vertex;
use self::vertex::*;
use self::ponder_service::PonderService;
use dg_mcts::options::{SearchOptions, ScoringSearch, StandardSearch};
use dg_mcts::tree::GreedyPath;
/// List containing all implemented commands, this is used to implement
/// the `list_commands` and `known_command` commands.
const KNOWN_COMMANDS: [&str; 24] = [
"protocol_version", "name", "version", "gomill-describe_engine", "gomill-cpu_time",
"boardsize", "clear_board", "komi", "play",
"list_commands", "known_command", "showboard", "genmove", "reg_genmove",
"kgs-genmove_cleanup", "gomill-explain_last_move", "undo",
"time_settings", "kgs-time_settings", "time_left", "quit",
"final_score", "final_status_list", "loadsgf"
];
#[derive(Clone, Debug, PartialEq)]
enum GenMoveMode {
Normal,
CleanUp,
Regression
}
impl GenMoveMode {
fn is_cleanup(&self) -> bool {
*self == GenMoveMode::CleanUp
}
fn is_regression(&self) -> bool {
*self == GenMoveMode::Regression
}
fn search_strategy(&self) -> Box<dyn SearchOptions + Sync> {
if self.is_cleanup() {
Box::new(ScoringSearch::default())
} else {
Box::new(StandardSearch::default())
}
}
}
#[derive(Debug, PartialEq)]
enum Command {
Pass, // do nothing
ProtocolVersion, // report protocol version
Name, // report the name of the program
Version, // report the version number of the program
BoardSize(usize), // set the board size to NxN
ClearBoard, // clear the board
CpuTime, // write the number of (cpu) seconds spent thinking
DescribeEngine, // write a description of the engine
ExplainLastMove, // write a description of why the last move was played
Komi(f32), // set the komi
Play(Color, Option<Point>), // play a stone of the given color at the given vertex
ListCommands, // list all available commands
KnownCommand(String), // tell whether a command is known
ShowBoard, // write the position to stdout
GenMove(Color, GenMoveMode), // generate and play the supposedly best move for either color
FinalScore, // write the score to stdout
FinalStatusList(StoneStatus), // write status of stones to stdout
LoadSgf(String, usize), // load SGF file
Undo, // undo one move
TimeSettingsNone, // set the time settings
TimeSettingsAbsolute(f32), // set the time settings
TimeSettingsCanadian(f32, f32, usize), // set the time settings
TimeSettingsByoYomi(f32, f32, usize), // set the time settings
TimeLeft(Color, f32, usize), // set the remaining time for the given color
Quit // quit
}
macro_rules! success {
($id:expr, $message:expr) => ({
match $id {
None => println!("= {}\n", $message),
Some(id) => println!("={} {}\n", id, $message)
}
})
}
macro_rules! error {
($id:expr, $message:expr) => ({
match $id {
None => println!("? {}\n", $message),
Some(id) => println!("?{} {}\n", id, $message)
}
})
}
lazy_static! {
static ref ID_PREFIX: Regex = Regex::new(r"^([0-9]+)(?: +(.*)$|$)").unwrap();
static ref BOARD_SIZE: Regex = Regex::new(r"^boardsize +([0-9]+)").unwrap();
static ref KOMI: Regex = Regex::new(r"^komi +(-?[0-9\.]+)").unwrap();
static ref PLAY: Regex = Regex::new(r"^play +([bBwW]) +([a-z][0-9]+|pass)").unwrap();
static ref KNOWN_COMMAND: Regex = Regex::new(r"^known_command +([^ ]+)").unwrap();
static ref GENMOVE: Regex = Regex::new(r"^genmove +([bw])").unwrap();
static ref REG_GENMOVE: Regex = Regex::new(r"^reg_genmove +([bw])").unwrap();
static ref KGS_GENMOVE_CLEANUP: Regex = Regex::new(r"^kgs-genmove_cleanup +([bw])").unwrap();
static ref FINAL_STATUS_LIST: Regex = Regex::new(r"^final_status_list +(dead|alive|seki|black_territory|white_territory)").unwrap();
static ref LOADSGF: Regex = Regex::new(r"^loadsgf +([^ ]+) *([0-9]+)?").unwrap();
static ref TIME_SETTINGS: Regex = Regex::new(r"^time_settings +([0-9]+\.?[0-9]*) +([0-9]+\.?[0-9]*) +([0-9]+)").unwrap();
static ref KGS_TIME_SETTINGS_NONE: Regex = Regex::new(r"^kgs-time_settings +none").unwrap();
static ref KGS_TIME_SETTINGS_ABSOLUTE: Regex = Regex::new(r"^kgs-time_settings +absolute +([0-9]+\.?[0-9]*)").unwrap();
static ref KGS_TIME_SETTINGS_BYOYOMI: Regex = Regex::new(r"^kgs-time_settings +byoyomi +([0-9]+\.?[0-9]*) +([0-9]+\.?[0-9]*) +([0-9]+)").unwrap();
static ref KGS_TIME_SETTINGS_CANADIAN: Regex = Regex::new(r"^kgs-time_settings +canadian +([0-9]+\.?[0-9]*) +([0-9]+\.?[0-9]*) +([0-9]+)").unwrap();
static ref TIME_LEFT: Regex = Regex::new(r"^time_left +([bBwW]) +([0-9]+\.?[0-9]*) +([0-9]+)").unwrap();
}
struct Gtp {
ponder: PonderService,
history: Vec<Board>,
komi: f32,
time_settings: [Box<dyn time_settings::TimeSettings>; 3],
explain_last_move: String,
finished_board: Option<Result<Board, &'static str>>
}
impl Gtp {
/// Parse the GTP command in the given string and returns our internal
/// representation of the given command.
///
/// # Arguments
///
/// * `id` -
/// * `line` -
///
fn parse_command(id: Option<usize>, line: &str) -> Result<(Option<usize>, Command), &str> {
let line = &line.to_lowercase();
if line == "protocol_version" {
Ok((id, Command::ProtocolVersion))
} else if line == "name" {
Ok((id, Command::Name))
} else if line == "version" {
Ok((id, Command::Version))
} else if let Some(caps) = BOARD_SIZE.captures(line) {
let size = caps[1].parse::<usize>().map_err(|_| "syntax error")?;
Ok((id, Command::BoardSize(size)))
} else if line == "clear_board" {
Ok((id, Command::ClearBoard))
} else if let Some(caps) = KOMI.captures(line) {
let komi = caps[1].parse::<f32>().map_err(|_| "syntax error")?;
Ok((id, Command::Komi(komi)))
} else if let Some(caps) = PLAY.captures(line) {
let color = caps[1].parse::<Color>().map_err(|_| "syntax error")?;
let vertex = caps[2].parse::<Vertex>().map_err(|_| "syntax error")?;
if vertex.is_pass() {
Ok((id, Command::Play(color, None)))
} else {
Ok((id, Command::Play(color, Some(Point::new(vertex.x, vertex.y)))))
}
} else if line == "list_commands" {
Ok((id, Command::ListCommands))
} else if let Some(caps) = KNOWN_COMMAND.captures(line) {
let command = &caps[1];
Ok((id, Command::KnownCommand(command.to_string())))
} else if line == "showboard" {
Ok((id, Command::ShowBoard))
} else if let Some(caps) = GENMOVE.captures(line) {
let color = caps[1].parse::<Color>().map_err(|_| "syntax error")?;
Ok((id, Command::GenMove(color, if *config::TROMP_TAYLOR { GenMoveMode::CleanUp } else { GenMoveMode::Normal })))
} else if line == "final_score" {
Ok((id, Command::FinalScore))
} else if let Some(caps) = FINAL_STATUS_LIST.captures(line) {
let status = caps[1].parse::<StoneStatus>().map_err(|_| "syntax error")?;
Ok((id, Command::FinalStatusList(status)))
} else if let Some(caps) = REG_GENMOVE.captures(line) {
let color = caps[1].parse::<Color>().map_err(|_| "syntax error")?;
Ok((id, Command::GenMove(color, GenMoveMode::Regression)))
} else if let Some(caps) = KGS_GENMOVE_CLEANUP.captures(line) {
let color = caps[1].parse::<Color>().map_err(|_| "syntax error")?;
Ok((id, Command::GenMove(color, GenMoveMode::CleanUp)))
} else if line == "undo" {
Ok((id, Command::Undo))
} else if let Some(caps) = LOADSGF.captures(line) {
let filename = caps[1].to_string();
let move_number = if let Some(move_number) = caps.get(2) {
move_number.as_str().parse::<usize>().map_err(|_| "syntax error")?
} else {
::std::usize::MAX
};
Ok((id, Command::LoadSgf(filename, move_number)))
} else if let Some(caps) = TIME_SETTINGS.captures(line) {
let main_time = caps[1].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_time = caps[2].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_stones = caps[3].parse::<usize>().map_err(|_| "syntax error")?;
if byo_yomi_time > 0.0 && byo_yomi_stones == 0 {
// we gain extra time every zero stones, so infinite...
Ok((id, Command::TimeSettingsNone))
} else if byo_yomi_time == 0.0 {
// this is effectively absolute time since we gain no extra
// time.
Ok((id, Command::TimeSettingsAbsolute(main_time)))
} else {
Ok((id, Command::TimeSettingsCanadian(main_time, byo_yomi_time, byo_yomi_stones)))
}
} else if let Some(_caps) = KGS_TIME_SETTINGS_NONE.captures(line) {
Ok((id, Command::TimeSettingsNone))
} else if let Some(caps) = KGS_TIME_SETTINGS_ABSOLUTE.captures(line) {
let main_time = caps[1].parse::<f32>().map_err(|_| "syntax error")?;
Ok((id, Command::TimeSettingsAbsolute(main_time)))
} else if let Some(caps) = KGS_TIME_SETTINGS_BYOYOMI.captures(line) {
let main_time = caps[1].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_time = caps[2].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_stones = caps[3].parse::<usize>().map_err(|_| "syntax error")?;
Ok((id, Command::TimeSettingsByoYomi(main_time, byo_yomi_time, byo_yomi_stones)))
} else if let Some(caps) = KGS_TIME_SETTINGS_CANADIAN.captures(line) {
let main_time = caps[1].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_time = caps[2].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_stones = caps[3].parse::<usize>().map_err(|_| "syntax error")?;
if byo_yomi_stones > 0 {
Ok((id, Command::TimeSettingsCanadian(main_time, byo_yomi_time, byo_yomi_stones)))
} else {
Err("syntax error")
}
} else if let Some(caps) = TIME_LEFT.captures(line) {
let color = caps[1].parse::<Color>().map_err(|_| "syntax error")?;
let main_time = caps[2].parse::<f32>().map_err(|_| "syntax error")?;
let byo_yomi_stones = caps[3].parse::<usize>().map_err(|_| "syntax error")?;
Ok((id, Command::TimeLeft(color, main_time, byo_yomi_stones)))
} else if line == "gomill-cpu_time" {
Ok((id, Command::CpuTime))
} else if line == "gomill-describe_engine" {
Ok((id, Command::DescribeEngine))
} else if line == "gomill-explain_last_move" {
Ok((id, Command::ExplainLastMove))
} else if line == "quit" {
Ok((id, Command::Quit))
} else {
error!(id, "unknown command");
Ok((None, Command::Pass))
}
}
/// Parse the GTP command in the given string and returns our internal
/// representation of the given command.
///
/// # Arguments
///
/// * `line` -
///
fn parse_line(line: &str) -> Option<(Option<usize>, Command)> {
let line = line.trim();
let line = {
if let Some(pos) = line.find('#') {
line[0..pos].to_string()
} else {
line.to_string()
}
};
if line.is_empty() {
Some((None, Command::Pass))
} else if let Some(caps) = ID_PREFIX.captures(&line) {
let id = caps[1].parse::<usize>().unwrap();
let rest = &caps[2];
match Gtp::parse_command(Some(id), rest.trim()) {
Ok(result) => Some(result),
Err(reason) => {
error!(Some(id), reason);
Some((None, Command::Pass))
}
}
} else {
match Gtp::parse_command(None, &line) {
Ok(result) => Some(result),
Err(reason) => {
error!(None as Option<usize>, reason);
Some((None, Command::Pass))
}
}
}
}
/// Generate a move using the monte carlo tree search engine for the given
/// color, using the stored search tree if available.
///
/// If the given `color` is not the players whose turn it is according to the
/// search tree then the tree is fast-forwarded until it is that players turn.
///
/// # Arguments
///
/// * `id` - the identifier of the command
/// * `to_move` - the color to generate the move for
/// * `mode` - determine whether this is a clean-up move
///
fn generate_move(&mut self, id: Option<usize>, to_move: Color, mode: &GenMoveMode) -> Option<Point> {
let (main_time, byo_yomi_time, byo_yomi_periods) = self.time_settings[to_move as usize].remaining();
let board = self.history.last().unwrap();
let result = self.ponder.service(|service, search_tree, p_state| {
let search_tree = if search_tree.to_move != to_move {
// passing moves are not recorded in GTP, so we will just assume
// the other player passed once if we are in this situation
mcts::tree::Node::forward(search_tree, 361)
} else {
Some(search_tree)
};
let search_options: Box<dyn TimeStrategy + Sync> =
if main_time.is_finite() && byo_yomi_time.is_finite() {
let total_visits = search_tree.as_ref()
.map(|tree| tree.total_count)
.unwrap_or(0);
Box::new(ByoYomi::new(board.count(), total_visits, main_time, byo_yomi_time, byo_yomi_periods))
} else {
Box::new(RolloutLimit::new((*config::NUM_ROLLOUT).into()))
};
let result = mcts::predict(
service,
mode.search_strategy(),
search_options,
search_tree,
&board,
to_move
);
if result.is_none() {
return (None, None, p_state)
}
// disqualify the `pass` move, and any move that is not in contested territory, if
// we are doing clean-up and the board is not scorable.
let (value, index, mut tree) = result.unwrap();
let (value, index) = if mode.is_cleanup() && index == 361 && !board.is_scorable() {
tree.disqualify(361);
for &index in &board.get_scorable_territory() {
tree.disqualify(index.to_packed_index());
}
tree.best(0.0)
} else {
(value, index)
};
let explain_last_move = mcts::tree::to_pretty(&tree).to_string();
eprintln!("{}", explain_last_move);
let should_resign = !*config::NO_RESIGN && value.is_finite() && value < 0.1; // 10% chance of winning
let index = if should_resign { 361 } else { index };
let (vertex, tree, other) = if index >= 361 { // passing move
(None, mcts::tree::Node::forward(tree, 361), board.clone())
} else {
let at_point = Point::from_packed_parts(index);
let mut other = board.clone();
other.place(to_move, at_point);
(Some(at_point), mcts::tree::Node::forward(tree, index), other)
};
(Some((vertex, should_resign, explain_last_move)), tree, (other, to_move.opposite()))
});
if let Ok(Some((point, should_resign, explain_last_move))) = result {
self.explain_last_move = explain_last_move;
self.finished_board = None;
if should_resign {
success!(id, "resign");
None
} else if let Some(point) = point { // passing move
success!(id, &format!("{}", Vertex::from(point)));
Some(point)
} else {
success!(id, "pass");
None
}
} else if let Ok(None) = result {
error!(id, "unrecognized error");
None
} else {
error!(id, result.err().unwrap());
None
}
}
fn greedy_playout(&mut self, board: &Board) -> Result<Board, &'static str> {
let mut finished_board = self.finished_board.clone();
if finished_board.as_ref().map(|f| f.is_err()).unwrap_or(false) {
finished_board = None;
}
let result = finished_board.get_or_insert_with(|| {
self.ponder.service(|pool, original_search_tree, p_state| {
// if the search tree is too small, the expand it before continuing
let mut board = board.clone();
let mut to_move = board.to_move();
let search_tree = match mcts::predict(
pool,
Box::new(ScoringSearch::default()),
Box::new(RolloutLimit::new((*config::NUM_ROLLOUT).into())),
None,
&board,
to_move
) {
Some((_value, _index, search_tree)) => search_tree,
None => { return (board, None, p_state); }
};
// before doing a greedy walk, traverse the current best path in any search tree
// we have computed
for index in GreedyPath::new(&search_tree, 8) {
if index != 361 {
board._place(to_move, Point::from_packed_parts(index));
}
to_move = to_move.opposite();
}
// greedy rollout of the rest of the game
let (finished, _rollout) = mcts::greedy_score(
pool.predictor(),
&board,
to_move
);
(finished, Some(original_search_tree), p_state)
})
}).clone();
self.finished_board = Some(result.clone());
result
}
fn process(&mut self, id: Option<usize>, cmd: Command) {
match cmd {
Command::Quit => {}
Command::Pass => {},
Command::ProtocolVersion => { success!(id, "2"); },
Command::Name => {
success!(id, get_name());
},
Command::Version => {
success!(id, get_version());
},
Command::DescribeEngine => {
success!(id, format!(
"{} {}\n{}",
get_name(),
get_version(),
config::get_description()
));
},
Command::BoardSize(size) => {
if size != 19 {
error!(id, "unacceptable size");
} else {
success!(id, "");
}
},
Command::ClearBoard => {
if self.history.len() > 1 {
self.history = vec![Board::new(self.komi)];
self.explain_last_move = String::new();
self.finished_board = None;
self.ponder = PonderService::new(Board::new(self.komi));
}
success!(id, "");
},
Command::Komi(komi) => {
if self.komi != komi {
self.komi = komi;
for board in self.history.iter_mut() {
(*board).set_komi(komi);
}
// restart the pondering service, since we have been thinking
// with the wrong komi.
let board = self.history.last().unwrap().clone();
self.ponder = PonderService::new(board);
}
success!(id, "");
},
Command::Play(color, at_point) => {
let next_board = {
let board = self.history.last().unwrap();
if let Some(at_point) = at_point {
if board.is_valid(color, at_point) {
let mut other = board.clone();
other.place(color, at_point);
self.ponder.forward(color, Some(at_point));
Some(other)
} else {
None
}
} else {
self.ponder.forward(color, None);
Some(board.clone())
}
};
if let Some(next_board) = next_board {
self.history.push(next_board);
success!(id, "");
} else {
error!(id, "illegal move");
}
},
Command::ListCommands => {
success!(id, KNOWN_COMMANDS.join("\n"));
},
Command::KnownCommand(other) => {
success!(id, {
if KNOWN_COMMANDS.iter().any(|&c| other == c) {
"true"
} else {
"false"
}
});
},
Command::ShowBoard => {
let board = self.history.last().unwrap();
success!(id, &format!("\n{}", board));
},
Command::GenMove(color, mode) => {
let start_time = Instant::now();
let at_point = self.generate_move(id, color, &mode);
if !mode.is_regression() {
if let Some(at_point) = at_point {
let mut board = self.history.last().unwrap().clone();
board.place(color, at_point);
self.history.push(board);
}
}
// update the remaining main time, saturating at zero instead of
// overflowing.
let elapsed = start_time.elapsed();
let elapsed_secs = elapsed.as_secs() as f32 + elapsed.subsec_nanos() as f32 / 1e9;
let c = color as usize;
self.time_settings[c].update(elapsed_secs);
},
Command::ExplainLastMove => {
success!(id, self.explain_last_move);
},
Command::FinalScore => {
let board = self.history.last().unwrap().clone();
let result = self.greedy_playout(&board);
if let Ok(finished) = result {
let (black, white) = board.get_guess_score(&finished);
eprintln!("Black: {}", black);
eprintln!("White: {} + {}", white, self.komi);
let black = black as f32;
let white = white as f32 + self.komi;
if black == white {
success!(id, "0");
} else if black > white {
success!(id, &format!("B+{:.1}", black - white));
} else if white > black {
success!(id, &format!("W+{:.1}", white - black));
}
} else {
error!(id, result.err().unwrap());
}
},
Command::FinalStatusList(status) => {
let board = self.history.last().unwrap().clone();
let result = self.greedy_playout(&board);
if let Ok(finished) = result {
let status_list = board.get_stone_status(&finished);
let vertices = status_list.into_iter()
.filter_map(|(index, stone_status)| {
if stone_status.contains(&status) {
Some(format!("{}", Vertex::from(index)))
} else {
None
}
})
.collect::<Vec<String>>();
success!(id, vertices.join(" "));
} else {
error!(id, result.err().unwrap());
}
},
Command::LoadSgf(filename, move_number) => {
if let Ok(file) = File::open(filename) {
let mut buf_reader = BufReader::new(file);
let mut content = vec! [];
if let Err(_reason) = buf_reader.read_to_end(&mut content) {
error!(id, "cannot read file content");
}
self.history = vec! [];
self.explain_last_move = String::new();
self.finished_board = None;
for entry in Sgf::new(&content, self.komi).take(move_number) {
match entry {
Ok(entry) => {
self.history.push(entry.board);
},
Err(_reason) => {
error!(id, "failed to parse file");
return;
}
}
}
// start the pondering agent
let board = self.history.last().unwrap().clone();
self.ponder = PonderService::new(board);
success!(id, "");
} else {
error!(id, "cannot open file");
}
},
Command::Undo => {
if self.history.len() > 1 {
self.history.pop();
// update the ponder state with the new board position
let board = self.history.last().unwrap().clone();
self.explain_last_move = String::new();
self.finished_board = None;
self.ponder = PonderService::new(board);
success!(id, "");
} else {
error!(id, "cannot undo");
}
},
Command::TimeSettingsNone => {
for &c in &[Color::Black, Color::White] {
self.time_settings[c as usize] = Box::new(time_settings::None::new());
}
success!(id, "");
},
Command::TimeSettingsAbsolute(main_time) => {
for &c in &[Color::Black, Color::White] {
self.time_settings[c as usize] = Box::new(time_settings::Absolute::new(main_time));
}
success!(id, "");
},
Command::TimeSettingsByoYomi(main_time, byo_yomi_time, byo_yomi_stones) => {
for &c in &[Color::Black, Color::White] {
self.time_settings[c as usize] = Box::new(time_settings::ByoYomi::new(
main_time,
byo_yomi_time,
byo_yomi_stones
));
}
success!(id, "");
},
Command::TimeSettingsCanadian(main_time, byo_yomi_time, byo_yomi_stones) => {
for &c in &[Color::Black, Color::White] {
self.time_settings[c as usize] = Box::new(time_settings::Canadian::new(
main_time,
byo_yomi_time,
byo_yomi_stones
));
}
success!(id, "");
},
Command::TimeLeft(color, main_time, byo_yomi_stones) => {
let c = color as usize;
self.time_settings[c].time_left(main_time, byo_yomi_stones);
success!(id, "");
},
Command::CpuTime => {
let cpu_time = self.ponder.cpu_time();
let secs = cpu_time.as_secs() as f64 + cpu_time.subsec_nanos() as f64 / 1e6;
success!(id, format!("{:.4}", secs));
}
}
}
}
/// Returns the name of this engine.
pub fn get_name() -> String {
env::var("DG_NAME").unwrap_or_else(|_| env!("CARGO_PKG_NAME").to_string())
}
/// Returns the version of this engine.
pub fn get_version() -> String {
env::var("DG_VERSION").unwrap_or_else(|_| env!("CARGO_PKG_VERSION").to_string())
}
/// Run the GTP (Go Text Protocol) client that reads from standard input
/// and writes to standard output. This client implements the minimum
/// necessary feature-set of a GTP client.
pub fn run() {
let stdin = ::std::io::stdin();
let stdin_lock = stdin.lock();
let mut gtp = Gtp {
ponder: PonderService::new(Board::new(DEFAULT_KOMI)),
history: vec! [Board::new(DEFAULT_KOMI)],
komi: DEFAULT_KOMI,
explain_last_move: String::new(),
finished_board: None,
time_settings: [
Box::new(time_settings::None::new()),
Box::new(time_settings::None::new()),
Box::new(time_settings::None::new()),
],
};
for line in stdin_lock.lines() {
if let Ok(line) = line {
match Gtp::parse_line(&line) {
Some((id, Command::Quit)) => {
success!(id, "");
break;
},
Some((id, cmd)) => gtp.process(id, cmd),
_ => break
}
} else {
break
}
}
}
#[cfg(test)]
mod tests {
use dg_go::*;
use gtp::*;
#[test]
fn protocol_verion() {
assert_eq!(Gtp::parse_line("1 protocol_version"), Some((Some(1), Command::ProtocolVersion)));
assert_eq!(Gtp::parse_line("protocol_version"), Some((None, Command::ProtocolVersion)));
}
#[test]
fn name() {
assert_eq!(Gtp::parse_line("1 name"), Some((Some(1), Command::Name)));
assert_eq!(Gtp::parse_line("name"), Some((None, Command::Name)));
}
#[test]
fn version() {
assert_eq!(Gtp::parse_line("1 version"), Some((Some(1), Command::Version)));
assert_eq!(Gtp::parse_line("version"), Some((None, Command::Version)));
}
#[test]
fn boardsize() {
assert_eq!(Gtp::parse_line("1 boardsize 7"), Some((Some(1), Command::BoardSize(7))));
assert_eq!(Gtp::parse_line("boardsize 13"), Some((None, Command::BoardSize(13))));
}
#[test]
fn clear_board() {
assert_eq!(Gtp::parse_line("1 clear_board"), Some((Some(1), Command::ClearBoard)));
assert_eq!(Gtp::parse_line("clear_board"), Some((None, Command::ClearBoard)));
}
#[test]
fn komi() {
assert_eq!(Gtp::parse_line("1 komi 0.5"), Some((Some(1), Command::Komi(0.5))));
assert_eq!(Gtp::parse_line("komi 10"), Some((None, Command::Komi(10.0))));
assert_eq!(Gtp::parse_line("komi -7.5"), Some((None, Command::Komi(-7.5))));
}
#[test]
fn play() {
assert_eq!(Gtp::parse_line("1 play b c2"), Some((Some(1), Command::Play(Color::Black, Some(Point::new(2, 1))))));
assert_eq!(Gtp::parse_line("play w a1"), Some((None, Command::Play(Color::White, Some(Point::new(0, 0))))));
}
#[test]
fn list_commands() {
assert_eq!(Gtp::parse_line("1 list_commands"), Some((Some(1), Command::ListCommands)));
assert_eq!(Gtp::parse_line("list_commands"), Some((None, Command::ListCommands)));
}
#[test]
fn known_command() {
assert_eq!(Gtp::parse_line("1 known_command aaaa"), Some((Some(1), Command::KnownCommand("aaaa".to_string()))));
assert_eq!(Gtp::parse_line("known_command genmove"), Some((None, Command::KnownCommand("genmove".to_string()))));
}
#[test]
fn showboard() {
assert_eq!(Gtp::parse_line("1 showboard"), Some((Some(1), Command::ShowBoard)));
assert_eq!(Gtp::parse_line("showboard"), Some((None, Command::ShowBoard)));
}
#[test]
fn genmove() {
assert_eq!(Gtp::parse_line("1 genmove b"), Some((Some(1), Command::GenMove(Color::Black, GenMoveMode::Normal))));
assert_eq!(Gtp::parse_line("genmove w"), Some((None, Command::GenMove(Color::White, GenMoveMode::Normal))));
}
#[test]
fn final_score() {
assert_eq!(Gtp::parse_line("1 final_score"), Some((Some(1), Command::FinalScore)));
assert_eq!(Gtp::parse_line("final_score"), Some((None, Command::FinalScore)));
}
#[test]
fn final_status_list() {
assert_eq!(Gtp::parse_line("1 final_status_list dead"), Some((Some(1), Command::FinalStatusList(StoneStatus::Dead))));
assert_eq!(Gtp::parse_line("final_status_list alive"), Some((None, Command::FinalStatusList(StoneStatus::Alive))));
assert_eq!(Gtp::parse_line("final_status_list dead"), Some((None, Command::FinalStatusList(StoneStatus::Dead))));
assert_eq!(Gtp::parse_line("final_status_list seki"), Some((None, Command::FinalStatusList(StoneStatus::Seki))));
assert_eq!(Gtp::parse_line("final_status_list black_territory"), Some((None, Command::FinalStatusList(StoneStatus::BlackTerritory))));
assert_eq!(Gtp::parse_line("final_status_list white_territory"), Some((None, Command::FinalStatusList(StoneStatus::WhiteTerritory))));
}
#[test]
fn reg_genmove() {
assert_eq!(Gtp::parse_line("1 reg_genmove b"), Some((Some(1), Command::GenMove(Color::Black, GenMoveMode::Regression))));
assert_eq!(Gtp::parse_line("reg_genmove w"), Some((None, Command::GenMove(Color::White, GenMoveMode::Regression))));
}
#[test]
fn kgs_genmove_cleanup() {
assert_eq!(Gtp::parse_line("1 kgs-genmove_cleanup b"), Some((Some(1), Command::GenMove(Color::Black, GenMoveMode::CleanUp))));
assert_eq!(Gtp::parse_line("kgs-genmove_cleanup w"), Some((None, Command::GenMove(Color::White, GenMoveMode::CleanUp))));
}
#[test]
fn loadsgf() {
assert_eq!(Gtp::parse_line("1 loadsgf x.sgf"), Some((Some(1), Command::LoadSgf("x.sgf".into(), ::std::usize::MAX))));
assert_eq!(Gtp::parse_line("loadsgf x.sgf"), Some((None, Command::LoadSgf("x.sgf".into(), ::std::usize::MAX))));
assert_eq!(Gtp::parse_line("loadsgf x/y/z.sgf 120"), Some((None, Command::LoadSgf("x/y/z.sgf".into(), 120))));
}
#[test]
fn undo() {
assert_eq!(Gtp::parse_line("1 undo"), Some((Some(1), Command::Undo)));
assert_eq!(Gtp::parse_line("undo"), Some((None, Command::Undo)));
}
#[test]
fn time_settings() {
assert_eq!(Gtp::parse_line("1 time_settings 0 1 0"), Some((Some(1), Command::TimeSettingsNone)));
assert_eq!(Gtp::parse_line("1 time_settings 30.2 0 0"), Some((Some(1), Command::TimeSettingsAbsolute(30.2))));
assert_eq!(Gtp::parse_line("time_settings 300 3.14 1"), Some((None, Command::TimeSettingsCanadian(300.0, 3.14, 1))));
}
#[test]
fn kgs_time_settings() {
assert_eq!(Gtp::parse_line("1 kgs-time_settings none"), Some((Some(1), Command::TimeSettingsNone)));
assert_eq!(Gtp::parse_line("kgs-time_settings none"), Some((None, Command::TimeSettingsNone)));
assert_eq!(Gtp::parse_line("2 kgs-time_settings absolute 30.2"), Some((Some(2), Command::TimeSettingsAbsolute(30.2))));
assert_eq!(Gtp::parse_line("kgs-time_settings absolute 300"), Some((None, Command::TimeSettingsAbsolute(300.0))));
assert_eq!(Gtp::parse_line("3 kgs-time_settings byoyomi 30.2 0 0"), Some((Some(3), Command::TimeSettingsByoYomi(30.2, 0.0, 0))));
assert_eq!(Gtp::parse_line("kgs-time_settings byoyomi 300 3.14 1"), Some((None, Command::TimeSettingsByoYomi(300.0, 3.14, 1))));
assert_eq!(Gtp::parse_line("4 kgs-time_settings canadian 30.2 1 1"), Some((Some(4), Command::TimeSettingsCanadian(30.2, 1.0, 1))));
assert_eq!(Gtp::parse_line("kgs-time_settings canadian 300 3.14 1"), Some((None, Command::TimeSettingsCanadian(300.0, 3.14, 1))));
}
#[test]
fn time_left() {
assert_eq!(Gtp::parse_line("1 time_left b 3.14 0"), Some((Some(1), Command::TimeLeft(Color::Black, 3.14, 0))));
assert_eq!(Gtp::parse_line("time_left W 278.1 1"), Some((None, Command::TimeLeft(Color::White, 278.1, 1))));
}
#[test]
fn gomill_explain_last_move() {
assert_eq!(Gtp::parse_line("1 gomill-explain_last_move"), Some((Some(1), Command::ExplainLastMove)));
assert_eq!(Gtp::parse_line("gomill-explain_last_move"), Some((None, Command::ExplainLastMove)));
}
#[test]
fn gomill_describe_engine() {
assert_eq!(Gtp::parse_line("1 gomill-describe_engine"), Some((Some(1), Command::DescribeEngine)));
assert_eq!(Gtp::parse_line("gomill-describe_engine"), Some((None, Command::DescribeEngine)));
}
#[test]
fn gomill_cpu_time() {
assert_eq!(Gtp::parse_line("1 gomill-cpu_time"), Some((Some(1), Command::CpuTime)));
assert_eq!(Gtp::parse_line("gomill-cpu_time"), Some((None, Command::CpuTime)));
}
#[test]
fn quit() {
assert_eq!(Gtp::parse_line("1 quit"), Some((Some(1), Command::Quit)));
assert_eq!(Gtp::parse_line("quit"), Some((None, Command::Quit)));
}
#[test]
fn empty() {
assert_eq!(Gtp::parse_line(""), Some((None, Command::Pass)));
}
}
| 40.662148 | 152 | 0.52876 |
72b2b8691c0646bcc9459edd81dabab973d299f6 | 6,918 | #[doc = "Register `IO3` reader"]
pub struct R(crate::R<IO3_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<IO3_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<IO3_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<IO3_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `IO3` writer"]
pub struct W(crate::W<IO3_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<IO3_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<IO3_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<IO3_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `PIN` reader - Pin number"]
pub struct PIN_R(crate::FieldReader<u8, u8>);
impl PIN_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
PIN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PIN_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PIN` writer - Pin number"]
pub struct PIN_W<'a> {
w: &'a mut W,
}
impl<'a> PIN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x1f) | (value as u32 & 0x1f);
self.w
}
}
#[doc = "Field `PORT` reader - Port number"]
pub struct PORT_R(crate::FieldReader<bool, bool>);
impl PORT_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
PORT_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PORT_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PORT` writer - Port number"]
pub struct PORT_W<'a> {
w: &'a mut W,
}
impl<'a> PORT_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5);
self.w
}
}
#[doc = "Connection\n\nValue on reset: 1"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CONNECT_A {
#[doc = "1: Disconnect"]
DISCONNECTED = 1,
#[doc = "0: Connect"]
CONNECTED = 0,
}
impl From<CONNECT_A> for bool {
#[inline(always)]
fn from(variant: CONNECT_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `CONNECT` reader - Connection"]
pub struct CONNECT_R(crate::FieldReader<bool, CONNECT_A>);
impl CONNECT_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
CONNECT_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CONNECT_A {
match self.bits {
true => CONNECT_A::DISCONNECTED,
false => CONNECT_A::CONNECTED,
}
}
#[doc = "Checks if the value of the field is `DISCONNECTED`"]
#[inline(always)]
pub fn is_disconnected(&self) -> bool {
**self == CONNECT_A::DISCONNECTED
}
#[doc = "Checks if the value of the field is `CONNECTED`"]
#[inline(always)]
pub fn is_connected(&self) -> bool {
**self == CONNECT_A::CONNECTED
}
}
impl core::ops::Deref for CONNECT_R {
type Target = crate::FieldReader<bool, CONNECT_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `CONNECT` writer - Connection"]
pub struct CONNECT_W<'a> {
w: &'a mut W,
}
impl<'a> CONNECT_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CONNECT_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disconnect"]
#[inline(always)]
pub fn disconnected(self) -> &'a mut W {
self.variant(CONNECT_A::DISCONNECTED)
}
#[doc = "Connect"]
#[inline(always)]
pub fn connected(self) -> &'a mut W {
self.variant(CONNECT_A::CONNECTED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | ((value as u32 & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bits 0:4 - Pin number"]
#[inline(always)]
pub fn pin(&self) -> PIN_R {
PIN_R::new((self.bits & 0x1f) as u8)
}
#[doc = "Bit 5 - Port number"]
#[inline(always)]
pub fn port(&self) -> PORT_R {
PORT_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 31 - Connection"]
#[inline(always)]
pub fn connect(&self) -> CONNECT_R {
CONNECT_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:4 - Pin number"]
#[inline(always)]
pub fn pin(&mut self) -> PIN_W {
PIN_W { w: self }
}
#[doc = "Bit 5 - Port number"]
#[inline(always)]
pub fn port(&mut self) -> PORT_W {
PORT_W { w: self }
}
#[doc = "Bit 31 - Connection"]
#[inline(always)]
pub fn connect(&mut self) -> CONNECT_W {
CONNECT_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Pin select for serial data HOLD/IO3.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [io3](index.html) module"]
pub struct IO3_SPEC;
impl crate::RegisterSpec for IO3_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [io3::R](R) reader structure"]
impl crate::Readable for IO3_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [io3::W](W) writer structure"]
impl crate::Writable for IO3_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets IO3 to value 0xffff_ffff"]
impl crate::Resettable for IO3_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0xffff_ffff
}
}
| 28.236735 | 420 | 0.56577 |
8fa17a722480a2eacaa43512fcfbead954aa5cc1 | 1,545 | //! An extension trait to add a convenient way to display keystrokes
use std::borrow::Cow;
use tuikit::prelude::Key;
/// An extension trait to add a convenient way to display keystrokes
pub trait KeyDisplay {
/// Return a compact string representing this keystroke
fn compact_string(self) -> Cow<'static, str>;
}
impl KeyDisplay for Key {
fn compact_string(self) -> Cow<'static, str> {
match self {
Key::Char(c) => Cow::from(String::from(c)),
Key::Ctrl(c) => Cow::from(format!("^{}", c)),
Key::Up => Cow::from("<Up>"),
Key::Down => Cow::from("<Down>"),
Key::Left => Cow::from("<Left>"),
Key::Right => Cow::from("<Right>"),
Key::F(num) => Cow::from(format!("<F{}>", num)),
Key::ESC => Cow::from("<Esc>"),
Key::Backspace => Cow::from("<BS>"),
Key::Delete => Cow::from("<Del>"),
Key::Tab => Cow::from("<Tab>"),
Key::Enter => Cow::from("<CR>"),
Key::Insert => Cow::from("<Insert>"),
Key::Home => Cow::from("<Home>"),
Key::End => Cow::from("<End>"),
Key::PageUp => Cow::from("<PageUp>"),
Key::PageDown => Cow::from("<PageDown>"),
_ => Cow::from(format!("{:?}", self)),
}
}
}
/// converts a `Key`s keystroke_buffer into a `String`
pub fn keystrokes_to_string(keystroke_buffer: &[Key]) -> String {
keystroke_buffer
.iter()
.map(|x| x.compact_string())
.collect()
}
| 30.294118 | 68 | 0.509385 |
cc6a985641b1ce725adf82f48169a9e387819ae8 | 29,049 | //! Data structure representing the live range of an SSA value.
//!
//! Live ranges are tracked per SSA value, not per variable or virtual register. The live range of
//! an SSA value begins where it is defined and extends to all program points where the value is
//! still needed.
//!
//! # Local Live Ranges
//!
//! Inside a single extended basic block, the live range of a value is always an interval between
//! two program points (if the value is live in the EBB at all). The starting point is either:
//!
//! 1. The instruction that defines the value, or
//! 2. The EBB header, because the value is an argument to the EBB, or
//! 3. The EBB header, because the value is defined in another EBB and live-in to this one.
//!
//! The ending point of the local live range is the last of the following program points in the
//! EBB:
//!
//! 1. The last use in the EBB, where a *use* is an instruction that has the value as an argument.
//! 2. The last branch or jump instruction in the EBB that can reach a use.
//! 3. If the value has no uses anywhere (a *dead value*), the program point that defines it.
//!
//! Note that 2. includes loop back-edges to the same EBB. In general, if a value is defined
//! outside a loop and used inside the loop, it will be live in the entire loop.
//!
//! # Global Live Ranges
//!
//! Values that appear in more than one EBB have a *global live range* which can be seen as the
//! disjoint union of the per-EBB local intervals for all of the EBBs where the value is live.
//! Together with a `ProgramOrder` which provides a linear ordering of the EBBs, the global live
//! range becomes a linear sequence of disjoint intervals, at most one per EBB.
//!
//! In the special case of a dead value, the global live range is a single interval where the start
//! and end points are the same. The global live range of a value is never completely empty.
//!
//! # Register interference
//!
//! The register allocator uses live ranges to determine if values *interfere*, which means that
//! they can't be stored in the same register. Two live ranges interfere if and only if any of
//! their intervals overlap.
//!
//! If one live range ends at an instruction that defines another live range, those two live ranges
//! are not considered to interfere. This is because most ISAs allow instructions to reuse an input
//! register for an output value. If Cranelift gets support for inline assembly, we will need to
//! handle *early clobbers* which are output registers that are not allowed to alias any input
//! registers.
//!
//! If `i1 < i2 < i3` are program points, we have:
//!
//! - `i1-i2` and `i1-i3` interfere because the intervals overlap.
//! - `i1-i2` and `i2-i3` don't interfere.
//! - `i1-i3` and `i2-i2` do interfere because the dead def would clobber the register.
//! - `i1-i2` and `i2-i2` don't interfere.
//! - `i2-i3` and `i2-i2` do interfere.
//!
//! Because of this behavior around interval end points, live range interference is not completely
//! equivalent to mathematical intersection of open or half-open intervals.
//!
//! # Implementation notes
//!
//! A few notes about the implementation of this data structure. This should not concern someone
//! only looking to use the public interface.
//!
//! ## EBB ordering
//!
//! The relative order of EBBs is used to maintain a sorted list of live-in intervals and to
//! coalesce adjacent live-in intervals when the prior interval covers the whole EBB. This doesn't
//! depend on any property of the program order, so alternative orderings are possible:
//!
//! 1. The EBB layout order. This is what we currently use.
//! 2. A topological order of the dominator tree. All the live-in intervals would come after the
//! def interval.
//! 3. A numerical order by EBB number. Performant because it doesn't need to indirect through the
//! `ProgramOrder` for comparisons.
//!
//! These orderings will cause small differences in coalescing opportunities, but all of them would
//! do a decent job of compressing a long live range. The numerical order might be preferable
//! because:
//!
//! - It has better performance because EBB numbers can be compared directly without any table
//! lookups.
//! - If EBB numbers are not reused, it is safe to allocate new EBBs without getting spurious
//! live-in intervals from any coalesced representations that happen to cross a new EBB.
//!
//! For comparing instructions, the layout order is always what we want.
//!
//! ## Alternative representation
//!
//! Since a local live-in interval always begins at its EBB header, it is uniquely described by its
//! end point instruction alone. We can use the layout to look up the EBB containing the end point.
//! This means that a sorted `Vec<Inst>` would be enough to represent the set of live-in intervals.
//!
//! Coalescing is an important compression technique because some live ranges can span thousands of
//! EBBs. We can represent that by switching to a sorted `Vec<ProgramPoint>` representation where
//! an `[Ebb, Inst]` pair represents a coalesced range, while an `Inst` entry without a preceding
//! `Ebb` entry represents a single live-in interval.
//!
//! This representation is more compact for a live range with many uncoalesced live-in intervals.
//! It is more complicated to work with, though, so it is probably not worth it. The performance
//! benefits of switching to a numerical EBB order only appears if the binary search is doing
//! EBB-EBB comparisons.
//!
//! ## B-tree representation
//!
//! A `BTreeMap<Ebb, Inst>` could also be used for the live-in intervals. It looks like the
//! standard library B-tree doesn't provide the necessary interface for an efficient implementation
//! of coalescing, so we would need to roll our own.
//!
use bforest;
use entity::SparseMapValue;
use ir::{Ebb, ExpandedProgramPoint, Inst, Layout, ProgramOrder, ProgramPoint, Value};
use regalloc::affinity::Affinity;
use std::cmp::Ordering;
use std::marker::PhantomData;
/// Global live range of a single SSA value.
///
/// As [explained in the module documentation](index.html#local-live-ranges), the live range of an
/// SSA value is the disjoint union of a set of intervals, each local to a single EBB, and with at
/// most one interval per EBB. We further distinguish between:
///
/// 1. The *def interval* is the local interval in the EBB where the value is defined, and
/// 2. The *live-in intervals* are the local intervals in the remaining EBBs.
///
/// A live-in interval always begins at the EBB header, while the def interval can begin at the
/// defining instruction, or at the EBB header for an EBB argument value.
///
/// All values have a def interval, but a large proportion of values don't have any live-in
/// intervals. These are called *local live ranges*.
///
/// # Program order requirements
///
/// The internal representation of a `LiveRange` depends on a consistent `ProgramOrder` both for
/// ordering instructions inside an EBB *and* for ordering EBBs. The methods that depend on the
/// ordering take an explicit `ProgramOrder` object, and it is the caller's responsibility to
/// ensure that the provided ordering is consistent between calls.
///
/// In particular, changing the order of EBBs or inserting new EBBs will invalidate live ranges.
///
/// Inserting new instructions in the layout is safe, but removing instructions is not. Besides the
/// instructions using or defining their value, `LiveRange` structs can contain references to
/// branch and jump instructions.
pub type LiveRange = GenLiveRange<Layout>;
/// Generic live range implementation.
///
/// The intended generic parameter is `PO=Layout`, but tests are simpler with a mock order.
/// Use `LiveRange` instead of using this generic directly.
pub struct GenLiveRange<PO: ProgramOrder> {
/// The value described by this live range.
/// This member can't be modified in case the live range is stored in a `SparseMap`.
value: Value,
/// The preferred register allocation for this value.
pub affinity: Affinity,
/// The instruction or EBB header where this value is defined.
def_begin: ProgramPoint,
/// The end point of the def interval. This must always belong to the same EBB as `def_begin`.
///
/// We always have `def_begin <= def_end` with equality implying a dead def live range with no
/// uses.
def_end: ProgramPoint,
/// Additional live-in intervals sorted in program order.
///
/// This map is empty for most values which are only used in one EBB.
///
/// A map entry `ebb -> inst` means that the live range is live-in to `ebb`, continuing up to
/// `inst` which may belong to a later EBB in the program order.
///
/// The entries are non-overlapping, and none of them overlap the EBB where the value is
/// defined.
liveins: bforest::Map<Ebb, Inst>,
po: PhantomData<*const PO>,
}
/// Context information needed to query a `LiveRange`.
pub struct LiveRangeContext<'a, PO: 'a + ProgramOrder> {
/// Ordering of EBBs.
pub order: &'a PO,
/// Memory pool.
pub forest: &'a bforest::MapForest<Ebb, Inst>,
}
impl<'a, PO: ProgramOrder> LiveRangeContext<'a, PO> {
/// Make a new context.
pub fn new(
order: &'a PO,
forest: &'a bforest::MapForest<Ebb, Inst>,
) -> LiveRangeContext<'a, PO> {
LiveRangeContext { order, forest }
}
}
impl<'a, PO: ProgramOrder> Clone for LiveRangeContext<'a, PO> {
fn clone(&self) -> Self {
LiveRangeContext {
order: self.order,
forest: self.forest,
}
}
}
impl<'a, PO: ProgramOrder> Copy for LiveRangeContext<'a, PO> {}
/// Forest of B-trees used for storing live ranges.
pub type LiveRangeForest = bforest::MapForest<Ebb, Inst>;
struct Cmp<'a, PO: ProgramOrder + 'a>(&'a PO);
impl<'a, PO: ProgramOrder> bforest::Comparator<Ebb> for Cmp<'a, PO> {
fn cmp(&self, a: Ebb, b: Ebb) -> Ordering {
self.0.cmp(a, b)
}
}
impl<PO: ProgramOrder> GenLiveRange<PO> {
/// Create a new live range for `value` defined at `def`.
///
/// The live range will be created as dead, but it can be extended with `extend_in_ebb()`.
pub fn new(value: Value, def: ProgramPoint, affinity: Affinity) -> Self {
Self {
value,
affinity,
def_begin: def,
def_end: def,
liveins: bforest::Map::new(),
po: PhantomData,
}
}
/// Extend the local interval for `ebb` so it reaches `to` which must belong to `ebb`.
/// Create a live-in interval if necessary.
///
/// If the live range already has a local interval in `ebb`, extend its end point so it
/// includes `to`, and return false.
///
/// If the live range did not previously have a local interval in `ebb`, add one so the value
/// is live-in to `ebb`, extending to `to`. Return true.
///
/// The return value can be used to detect if we just learned that the value is live-in to
/// `ebb`. This can trigger recursive extensions in `ebb`'s CFG predecessor blocks.
pub fn extend_in_ebb(
&mut self,
ebb: Ebb,
to: Inst,
order: &PO,
forest: &mut bforest::MapForest<Ebb, Inst>,
) -> bool {
// First check if we're extending the def interval.
//
// We're assuming here that `to` never precedes `def_begin` in the same EBB, but we can't
// check it without a method for getting `to`'s EBB.
if order.cmp(ebb, self.def_end) != Ordering::Greater
&& order.cmp(to, self.def_begin) != Ordering::Less
{
let to_pp = to.into();
debug_assert_ne!(
to_pp, self.def_begin,
"Can't use value in the defining instruction."
);
if order.cmp(to, self.def_end) == Ordering::Greater {
self.def_end = to_pp;
}
return false;
}
// Now check if we're extending any of the existing live-in intervals.
let cmp = Cmp(order);
let mut c = self.liveins.cursor(forest, &cmp);
let first_time_livein;
if let Some(end) = c.goto(ebb) {
// There's an interval beginning at `ebb`. See if it extends.
first_time_livein = false;
if order.cmp(end, to) == Ordering::Less {
*c.value_mut().unwrap() = to;
} else {
return first_time_livein;
}
} else if let Some((_, end)) = c.prev() {
// There's no interval beginning at `ebb`, but we could still be live-in at `ebb` with
// a coalesced interval that begins before and ends after.
if order.cmp(end, ebb) == Ordering::Greater {
// Yep, the previous interval overlaps `ebb`.
first_time_livein = false;
if order.cmp(end, to) == Ordering::Less {
*c.value_mut().unwrap() = to;
} else {
return first_time_livein;
}
} else {
first_time_livein = true;
// The current interval does not overlap `ebb`, but it may still be possible to
// coalesce with it.
if order.is_ebb_gap(end, ebb) {
*c.value_mut().unwrap() = to;
} else {
c.insert(ebb, to);
}
}
} else {
// There is no existing interval before `ebb`.
first_time_livein = true;
c.insert(ebb, to);
}
// Now `c` to left pointing at an interval that ends in `to`.
debug_assert_eq!(c.value(), Some(to));
// See if it can be coalesced with the following interval.
if let Some((next_ebb, next_end)) = c.next() {
if order.is_ebb_gap(to, next_ebb) {
// Remove this interval and extend the previous end point to `next_end`.
c.remove();
c.prev();
*c.value_mut().unwrap() = next_end;
}
}
first_time_livein
}
/// Is this the live range of a dead value?
///
/// A dead value has no uses, and its live range ends at the same program point where it is
/// defined.
pub fn is_dead(&self) -> bool {
self.def_begin == self.def_end
}
/// Is this a local live range?
///
/// A local live range is only used in the same EBB where it was defined. It is allowed to span
/// multiple basic blocks within that EBB.
pub fn is_local(&self) -> bool {
self.liveins.is_empty()
}
/// Get the program point where this live range is defined.
///
/// This will be an EBB header when the value is an EBB argument, otherwise it is the defining
/// instruction.
pub fn def(&self) -> ProgramPoint {
self.def_begin
}
/// Move the definition of this value to a new program point.
///
/// It is only valid to move the definition within the same EBB, and it can't be moved beyond
/// `def_local_end()`.
pub fn move_def_locally(&mut self, def: ProgramPoint) {
self.def_begin = def;
}
/// Get the local end-point of this live range in the EBB where it is defined.
///
/// This can be the EBB header itself in the case of a dead EBB argument.
/// Otherwise, it will be the last local use or branch/jump that can reach a use.
pub fn def_local_end(&self) -> ProgramPoint {
self.def_end
}
/// Get the local end-point of this live range in an EBB where it is live-in.
///
/// If this live range is not live-in to `ebb`, return `None`. Otherwise, return the end-point
/// of this live range's local interval in `ebb`.
///
/// If the live range is live through all of `ebb`, the terminator of `ebb` is a correct
/// answer, but it is also possible that an even later program point is returned. So don't
/// depend on the returned `Inst` to belong to `ebb`.
pub fn livein_local_end(&self, ebb: Ebb, ctx: LiveRangeContext<PO>) -> Option<Inst> {
let cmp = Cmp(ctx.order);
self.liveins
.get_or_less(ebb, ctx.forest, &cmp)
.and_then(|(_, inst)| {
// We have an entry that ends at `inst`.
if ctx.order.cmp(inst, ebb) == Ordering::Greater {
Some(inst)
} else {
None
}
})
}
/// Is this value live-in to `ebb`?
///
/// An EBB argument is not considered to be live in.
pub fn is_livein(&self, ebb: Ebb, ctx: LiveRangeContext<PO>) -> bool {
self.livein_local_end(ebb, ctx).is_some()
}
/// Get all the live-in intervals.
///
/// Note that the intervals are stored in a compressed form so each entry may span multiple
/// EBBs where the value is live in.
pub fn liveins<'a>(&'a self, ctx: LiveRangeContext<'a, PO>) -> bforest::MapIter<'a, Ebb, Inst> {
self.liveins.iter(ctx.forest)
}
/// Check if this live range overlaps a definition in `ebb`.
pub fn overlaps_def(
&self,
def: ExpandedProgramPoint,
ebb: Ebb,
ctx: LiveRangeContext<PO>,
) -> bool {
// Two defs at the same program point always overlap, even if one is dead.
if def == self.def_begin.into() {
return true;
}
// Check for an overlap with the local range.
if ctx.order.cmp(def, self.def_begin) != Ordering::Less
&& ctx.order.cmp(def, self.def_end) == Ordering::Less
{
return true;
}
// Check for an overlap with a live-in range.
match self.livein_local_end(ebb, ctx) {
Some(inst) => ctx.order.cmp(def, inst) == Ordering::Less,
None => false,
}
}
/// Check if this live range reaches a use at `user` in `ebb`.
pub fn reaches_use(&self, user: Inst, ebb: Ebb, ctx: LiveRangeContext<PO>) -> bool {
// Check for an overlap with the local range.
if ctx.order.cmp(user, self.def_begin) == Ordering::Greater
&& ctx.order.cmp(user, self.def_end) != Ordering::Greater
{
return true;
}
// Check for an overlap with a live-in range.
match self.livein_local_end(ebb, ctx) {
Some(inst) => ctx.order.cmp(user, inst) != Ordering::Greater,
None => false,
}
}
/// Check if this live range is killed at `user` in `ebb`.
pub fn killed_at(&self, user: Inst, ebb: Ebb, ctx: LiveRangeContext<PO>) -> bool {
self.def_local_end() == user.into() || self.livein_local_end(ebb, ctx) == Some(user)
}
}
/// Allow a `LiveRange` to be stored in a `SparseMap` indexed by values.
impl<PO: ProgramOrder> SparseMapValue<Value> for GenLiveRange<PO> {
fn key(&self) -> Value {
self.value
}
}
#[cfg(test)]
mod tests {
use super::{GenLiveRange, LiveRangeContext};
use bforest;
use entity::EntityRef;
use ir::{Ebb, Inst, Value};
use ir::{ExpandedProgramPoint, ProgramOrder};
use std::cmp::Ordering;
use std::vec::Vec;
// Dummy program order which simply compares indexes.
// It is assumed that EBBs have indexes that are multiples of 10, and instructions have indexes
// in between. `is_ebb_gap` assumes that terminator instructions have indexes of the form
// ebb * 10 + 1. This is used in the coalesce test.
struct ProgOrder {}
impl ProgramOrder for ProgOrder {
fn cmp<A, B>(&self, a: A, b: B) -> Ordering
where
A: Into<ExpandedProgramPoint>,
B: Into<ExpandedProgramPoint>,
{
fn idx(pp: ExpandedProgramPoint) -> usize {
match pp {
ExpandedProgramPoint::Inst(i) => i.index(),
ExpandedProgramPoint::Ebb(e) => e.index(),
}
}
let ia = idx(a.into());
let ib = idx(b.into());
ia.cmp(&ib)
}
fn is_ebb_gap(&self, inst: Inst, ebb: Ebb) -> bool {
inst.index() % 10 == 1 && ebb.index() / 10 == inst.index() / 10 + 1
}
}
impl ProgOrder {
// Get the EBB corresponding to `inst`.
fn inst_ebb(&self, inst: Inst) -> Ebb {
let i = inst.index();
Ebb::new(i - i % 10)
}
// Get the EBB of a program point.
fn pp_ebb<PP: Into<ExpandedProgramPoint>>(&self, pp: PP) -> Ebb {
match pp.into() {
ExpandedProgramPoint::Inst(i) => self.inst_ebb(i),
ExpandedProgramPoint::Ebb(e) => e,
}
}
// Validate the live range invariants.
fn validate(&self, lr: &GenLiveRange<ProgOrder>, forest: &bforest::MapForest<Ebb, Inst>) {
// The def interval must cover a single EBB.
let def_ebb = self.pp_ebb(lr.def_begin);
assert_eq!(def_ebb, self.pp_ebb(lr.def_end));
// Check that the def interval isn't backwards.
match self.cmp(lr.def_begin, lr.def_end) {
Ordering::Equal => assert!(lr.liveins.is_empty()),
Ordering::Greater => {
panic!("Backwards def interval: {}-{}", lr.def_begin, lr.def_end)
}
Ordering::Less => {}
}
// Check the live-in intervals.
let mut prev_end = None;
for (begin, end) in lr.liveins.iter(forest) {
assert_eq!(self.cmp(begin, end), Ordering::Less);
if let Some(e) = prev_end {
assert_eq!(self.cmp(e, begin), Ordering::Less);
}
assert!(
self.cmp(lr.def_end, begin) == Ordering::Less
|| self.cmp(lr.def_begin, end) == Ordering::Greater,
"Interval can't overlap the def EBB"
);
// Save for next round.
prev_end = Some(end);
}
}
}
// Singleton `ProgramOrder` for tests below.
const PO: &'static ProgOrder = &ProgOrder {};
#[test]
fn dead_def_range() {
let v0 = Value::new(0);
let e0 = Ebb::new(0);
let i1 = Inst::new(1);
let i2 = Inst::new(2);
let e2 = Ebb::new(2);
let lr = GenLiveRange::new(v0, i1.into(), Default::default());
let forest = &bforest::MapForest::new();
let ctx = LiveRangeContext::new(PO, forest);
assert!(lr.is_dead());
assert!(lr.is_local());
assert_eq!(lr.def(), i1.into());
assert_eq!(lr.def_local_end(), i1.into());
assert_eq!(lr.livein_local_end(e2, ctx), None);
PO.validate(&lr, ctx.forest);
// A dead live range overlaps its own def program point.
assert!(lr.overlaps_def(i1.into(), e0, ctx));
assert!(!lr.overlaps_def(i2.into(), e0, ctx));
assert!(!lr.overlaps_def(e0.into(), e0, ctx));
}
#[test]
fn dead_arg_range() {
let v0 = Value::new(0);
let e2 = Ebb::new(2);
let lr = GenLiveRange::new(v0, e2.into(), Default::default());
let forest = &bforest::MapForest::new();
let ctx = LiveRangeContext::new(PO, forest);
assert!(lr.is_dead());
assert!(lr.is_local());
assert_eq!(lr.def(), e2.into());
assert_eq!(lr.def_local_end(), e2.into());
// The def interval of an EBB argument does not count as live-in.
assert_eq!(lr.livein_local_end(e2, ctx), None);
PO.validate(&lr, ctx.forest);
}
#[test]
fn local_def() {
let v0 = Value::new(0);
let e10 = Ebb::new(10);
let i11 = Inst::new(11);
let i12 = Inst::new(12);
let i13 = Inst::new(13);
let mut lr = GenLiveRange::new(v0, i11.into(), Default::default());
let forest = &mut bforest::MapForest::new();
assert_eq!(lr.extend_in_ebb(e10, i13, PO, forest), false);
PO.validate(&lr, forest);
assert!(!lr.is_dead());
assert!(lr.is_local());
assert_eq!(lr.def(), i11.into());
assert_eq!(lr.def_local_end(), i13.into());
// Extending to an already covered inst should not change anything.
assert_eq!(lr.extend_in_ebb(e10, i12, PO, forest), false);
PO.validate(&lr, forest);
assert_eq!(lr.def(), i11.into());
assert_eq!(lr.def_local_end(), i13.into());
}
#[test]
fn local_arg() {
let v0 = Value::new(0);
let e10 = Ebb::new(10);
let i11 = Inst::new(11);
let i12 = Inst::new(12);
let i13 = Inst::new(13);
let mut lr = GenLiveRange::new(v0, e10.into(), Default::default());
let forest = &mut bforest::MapForest::new();
// Extending a dead EBB argument in its own block should not indicate that a live-in
// interval was created.
assert_eq!(lr.extend_in_ebb(e10, i12, PO, forest), false);
PO.validate(&lr, forest);
assert!(!lr.is_dead());
assert!(lr.is_local());
assert_eq!(lr.def(), e10.into());
assert_eq!(lr.def_local_end(), i12.into());
// Extending to an already covered inst should not change anything.
assert_eq!(lr.extend_in_ebb(e10, i11, PO, forest), false);
PO.validate(&lr, forest);
assert_eq!(lr.def(), e10.into());
assert_eq!(lr.def_local_end(), i12.into());
// Extending further.
assert_eq!(lr.extend_in_ebb(e10, i13, PO, forest), false);
PO.validate(&lr, forest);
assert_eq!(lr.def(), e10.into());
assert_eq!(lr.def_local_end(), i13.into());
}
#[test]
fn global_def() {
let v0 = Value::new(0);
let e10 = Ebb::new(10);
let i11 = Inst::new(11);
let i12 = Inst::new(12);
let e20 = Ebb::new(20);
let i21 = Inst::new(21);
let i22 = Inst::new(22);
let i23 = Inst::new(23);
let mut lr = GenLiveRange::new(v0, i11.into(), Default::default());
let forest = &mut bforest::MapForest::new();
assert_eq!(lr.extend_in_ebb(e10, i12, PO, forest), false);
// Adding a live-in interval.
assert_eq!(lr.extend_in_ebb(e20, i22, PO, forest), true);
PO.validate(&lr, forest);
assert_eq!(
lr.livein_local_end(e20, LiveRangeContext::new(PO, forest)),
Some(i22)
);
// Non-extending the live-in.
assert_eq!(lr.extend_in_ebb(e20, i21, PO, forest), false);
assert_eq!(
lr.livein_local_end(e20, LiveRangeContext::new(PO, forest)),
Some(i22)
);
// Extending the existing live-in.
assert_eq!(lr.extend_in_ebb(e20, i23, PO, forest), false);
PO.validate(&lr, forest);
assert_eq!(
lr.livein_local_end(e20, LiveRangeContext::new(PO, forest)),
Some(i23)
);
}
#[test]
fn coalesce() {
let v0 = Value::new(0);
let i11 = Inst::new(11);
let e20 = Ebb::new(20);
let i21 = Inst::new(21);
let e30 = Ebb::new(30);
let i31 = Inst::new(31);
let e40 = Ebb::new(40);
let i41 = Inst::new(41);
let mut lr = GenLiveRange::new(v0, i11.into(), Default::default());
let forest = &mut bforest::MapForest::new();
assert_eq!(lr.extend_in_ebb(e30, i31, PO, forest), true);
assert_eq!(
lr.liveins(LiveRangeContext::new(PO, forest))
.collect::<Vec<_>>(),
[(e30, i31)]
);
// Coalesce to previous
assert_eq!(lr.extend_in_ebb(e40, i41, PO, forest), true);
assert_eq!(
lr.liveins(LiveRangeContext::new(PO, forest))
.collect::<Vec<_>>(),
[(e30, i41)]
);
// Coalesce to next
assert_eq!(lr.extend_in_ebb(e20, i21, PO, forest), true);
assert_eq!(
lr.liveins(LiveRangeContext::new(PO, forest))
.collect::<Vec<_>>(),
[(e20, i41)]
);
let mut lr = GenLiveRange::new(v0, i11.into(), Default::default());
assert_eq!(lr.extend_in_ebb(e40, i41, PO, forest), true);
assert_eq!(
lr.liveins(LiveRangeContext::new(PO, forest))
.collect::<Vec<_>>(),
[(e40, i41)]
);
assert_eq!(lr.extend_in_ebb(e20, i21, PO, forest), true);
assert_eq!(
lr.liveins(LiveRangeContext::new(PO, forest))
.collect::<Vec<_>>(),
[(e20, i21), (e40, i41)]
);
// Coalesce to previous and next
assert_eq!(lr.extend_in_ebb(e30, i31, PO, forest), true);
assert_eq!(
lr.liveins(LiveRangeContext::new(PO, forest))
.collect::<Vec<_>>(),
[(e20, i41)]
);
}
// TODO: Add more tests that exercise the binary search algorithm.
}
| 38.783712 | 100 | 0.599745 |
9b518ed425efff198c4ae2f29e9c39dab257cdf6 | 388 | // modules1.rs
// Make me compile! Execute `rustlings hint modules1` for hints :)
mod sausage_factory {
// Don't let anybody outside of this module see this!
fn get_secret_recipe() -> String {
String::from("Ginger")
}
pub fn make_sausage() {
get_secret_recipe();
println!("sausage!");
}
}
fn main() {
sausage_factory::make_sausage();
}
| 20.421053 | 66 | 0.615979 |
fef91537b6cc3c4abcbe07c9016f3a63e0131ffb | 11,485 | // Copyright 2018 Flavien Raynaud
// Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
//
// Portions of this file are derived from the ToAvro implementation for
// serde_json::Value that is shipped with the avro_rs project. The original
// source code was retrieved on April 25, 2019 from:
//
// https://github.com/flavray/avro-rs/blob/c4971ac08f52750db6bc95559c2b5faa6c0c9a06/src/types.rs
//
// The original source code is subject to the terms of the MIT license, a copy
// of which can be found in the LICENSE file at the root of this repository.
use std::collections::HashMap;
use std::convert::{TryFrom, TryInto};
use std::num::TryFromIntError;
use serde_json::Value as JsonValue;
// Re-export components from the various other Avro libraries, so that other
// testdrive modules can import just this one.
pub use interchange::avro::parse_schema;
use mz_avro::schema::SchemaKind;
pub use mz_avro::schema::{Schema, SchemaNode, SchemaPiece, SchemaPieceOrNamed};
pub use mz_avro::types::{DecimalValue, ToAvro, Value};
pub use mz_avro::{from_avro_datum, to_avro_datum, Codec, Reader, Writer};
// This function is derived from code in the avro_rs project. Update the license
// header on this file accordingly if you move it to a new home.
pub fn from_json(json: &JsonValue, schema: SchemaNode) -> Result<Value, String> {
match (json, schema.inner) {
(JsonValue::Null, SchemaPiece::Null) => Ok(Value::Null),
(JsonValue::Bool(b), SchemaPiece::Boolean) => Ok(Value::Boolean(*b)),
(JsonValue::Number(ref n), SchemaPiece::Int) => Ok(Value::Int(
n.as_i64()
.unwrap()
.try_into()
.map_err(|e: TryFromIntError| e.to_string())?,
)),
(JsonValue::Number(ref n), SchemaPiece::Long) => Ok(Value::Long(n.as_i64().unwrap())),
(JsonValue::Number(ref n), SchemaPiece::Float) => {
Ok(Value::Float(n.as_f64().unwrap() as f32))
}
(JsonValue::Number(ref n), SchemaPiece::Double) => Ok(Value::Double(n.as_f64().unwrap())),
(JsonValue::Number(ref n), SchemaPiece::Date) => Ok(Value::Date(
chrono::NaiveDate::from_ymd(1970, 1, 1) + chrono::Duration::days(n.as_i64().unwrap()),
)),
(JsonValue::Number(ref n), SchemaPiece::TimestampMilli) => {
let ts = n.as_i64().unwrap();
Ok(Value::Timestamp(chrono::NaiveDateTime::from_timestamp(
ts / 1_000,
ts as u32 % 1_000,
)))
}
(JsonValue::Number(ref n), SchemaPiece::TimestampMicro) => {
let ts = n.as_i64().unwrap();
Ok(Value::Timestamp(chrono::NaiveDateTime::from_timestamp(
ts / 1_000_000,
ts as u32 % 1_000_000,
)))
}
(JsonValue::Array(items), SchemaPiece::Array(inner)) => Ok(Value::Array(
items
.iter()
.map(|x| from_json(x, schema.step(&**inner)))
.collect::<Result<_, _>>()?,
)),
(JsonValue::String(s), SchemaPiece::String) => Ok(Value::String(s.clone())),
(
JsonValue::Array(items),
SchemaPiece::Decimal {
precision, scale, ..
},
) => {
let bytes = match items
.iter()
.map(|x| x.as_i64().and_then(|x| u8::try_from(x).ok()))
.collect::<Option<Vec<u8>>>()
{
Some(bytes) => bytes,
None => return Err("decimal was not represented by byte array".into()),
};
Ok(Value::Decimal(DecimalValue {
unscaled: bytes,
precision: *precision,
scale: *scale,
}))
}
(JsonValue::Array(items), SchemaPiece::Fixed { size }) => {
let bytes = match items
.iter()
.map(|x| x.as_i64().and_then(|x| u8::try_from(x).ok()))
.collect::<Option<Vec<u8>>>()
{
Some(bytes) => bytes,
None => return Err("fixed was not represented by byte array".into()),
};
if *size != bytes.len() {
Err(format!(
"expected fixed size {}, got {}",
*size,
bytes.len()
))
} else {
Ok(Value::Fixed(*size, bytes))
}
}
(JsonValue::String(s), SchemaPiece::Json) => {
let j = serde_json::from_str(s).map_err(|e| e.to_string())?;
Ok(Value::Json(j))
}
(JsonValue::String(s), SchemaPiece::Uuid) => {
let u = uuid::Uuid::parse_str(&s).map_err(|e| e.to_string())?;
Ok(Value::Uuid(u))
}
(JsonValue::String(s), SchemaPiece::Enum { symbols, .. }) => {
if symbols.contains(s) {
Ok(Value::String(s.clone()))
} else {
Err(format!("Unrecognized enum variant: {}", s))
}
}
(JsonValue::Object(items), SchemaPiece::Record { .. }) => {
let mut builder = mz_avro::types::Record::new(schema)
.expect("`Record::new` should never fail if schema piece is a record!");
for (key, val) in items {
let field = builder
.field_by_name(key)
.ok_or_else(|| format!("No such key in record: {}", key))?;
let val = from_json(val, schema.step(&field.schema))?;
builder.put(key, val);
}
Ok(builder.avro())
}
(JsonValue::Object(items), SchemaPiece::Map(m)) => {
let mut map = HashMap::new();
for (k, v) in items {
let (inner, name) = m.get_piece_and_name(schema.root);
map.insert(
k.to_owned(),
from_json(
v,
SchemaNode {
root: schema.root,
inner,
name,
},
)?,
);
}
Ok(Value::Map(map))
}
(val, SchemaPiece::Union(us)) => {
let variants = us.variants();
let null_variant = variants
.iter()
.position(|v| v == &SchemaPieceOrNamed::Piece(SchemaPiece::Null));
if let JsonValue::Null = val {
return if let Some(nv) = null_variant {
Ok(Value::Union {
index: nv,
inner: Box::new(Value::Null),
n_variants: variants.len(),
null_variant,
})
} else {
Err("No `null` value in union schema.".to_string())
};
}
let items = match val {
JsonValue::Object(items) => items,
_ => return Err(format!("Union schema element must be `null` or a map from type name to value; found {:?}", val))
};
let (name, val) = if items.len() == 1 {
(items.keys().next().unwrap(), items.values().next().unwrap())
} else {
return Err(format!(
"Expected one-element object to match union schema: {:?} vs {:?}",
json, schema
));
};
for (i, variant) in variants.iter().enumerate() {
let name_matches = match variant {
SchemaPieceOrNamed::Piece(piece) => SchemaKind::from(piece).name() == name,
SchemaPieceOrNamed::Named(idx) => {
let schema_name = &schema.root.lookup(*idx).name;
if name.chars().any(|ch| ch == '.') {
name == &schema_name.to_string()
} else {
name == schema_name.base_name()
}
}
};
if name_matches {
match from_json(val, schema.step(variant)) {
Ok(avro) => {
return Ok(Value::Union {
index: i,
inner: Box::new(avro),
n_variants: variants.len(),
null_variant,
})
}
Err(msg) => return Err(msg),
}
}
}
Err(format!("Type not found in union: {}", name))
}
_ => Err(format!(
"unable to match JSON value to schema: {:?} vs {:?}",
json, schema
)),
}
}
pub fn validate_sink<I>(
key_schema: Option<&Schema>,
value_schema: &Schema,
expected: I,
actual: &[(Option<Value>, Value)],
) -> Result<(), String>
where
I: IntoIterator,
I::Item: AsRef<str>,
{
let expected = expected
.into_iter()
.map(|v| {
let mut deserializer = serde_json::Deserializer::from_str(v.as_ref()).into_iter();
let key = if let Some(key_schema) = key_schema {
let key: serde_json::Value = match deserializer.next() {
None => Err("key missing in input line".to_string()),
Some(r) => r.map_err(|e| format!("parsing json: {}", e)),
}?;
Some(from_json(&key, key_schema.top_node())?)
} else {
None
};
let value: serde_json::Value = match deserializer.next() {
None => Err("value missing in input line".to_string()),
Some(r) => r.map_err(|e| format!("parsing json: {}", e)),
}?;
let value = from_json(&value, value_schema.top_node())?;
Ok((key, value))
})
.collect::<Result<Vec<_>, String>>()?;
let mut expected = expected.iter();
let mut actual = actual.iter();
let mut index = 0..;
loop {
let i = index.next().expect("known to exist");
match (expected.next(), actual.next()) {
(Some(e), Some(a)) => {
if e != a {
return Err(format!(
"record {} did not match\nexpected:\n{:#?}\n\nactual:\n{:#?}",
i, e, a
));
}
}
(Some(e), None) => return Err(format!("missing record {}: {:#?}", i, e)),
(None, Some(a)) => return Err(format!("extra record {}: {:#?}", i, a)),
(None, None) => break,
}
}
let expected: Vec<_> = expected.map(|e| format!("{:#?}", e)).collect();
let actual: Vec<_> = actual.map(|a| format!("{:#?}", a)).collect();
if !expected.is_empty() {
Err(format!("missing records:\n{}", expected.join("\n")))
} else if !actual.is_empty() {
Err(format!("extra records:\n{}", actual.join("\n")))
} else {
Ok(())
}
}
| 40.440141 | 129 | 0.477667 |
91897017fe03c73c2e2fddd06e646c11f5e31642 | 99 | pub mod game;
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum GameState {
Game,
}
| 14.142857 | 50 | 0.656566 |
e9418e15229ad67d72a9735bdec599c050a25a8a | 1,812 | use mouscache;
#[derive(Cacheable, Clone, Debug)]
struct ConcurrentData {
field1: u16,
field2: u32,
}
#[test]
fn memory_cache_concurrency_test() {
use std::thread;
let data = ConcurrentData {
field1: 42,
field2: 123456789,
};
let cache = mouscache::memory();
let _ = cache.insert("test", data.clone());
let mut handle_vec = Vec::new();
for _i in 1..10 {
let assert_data = data.clone();
let cache_clone = cache.clone();
let h = thread::spawn(move || {
let rdata: ConcurrentData = cache_clone.get("test").unwrap().unwrap();
assert_eq!(assert_data.field1, rdata.field1);
assert_eq!(assert_data.field2, rdata.field2);
});
handle_vec.push(h);
}
for handle in handle_vec {
let _ = handle.join();
println!("thread joined");
}
}
#[test]
fn redis_cache_concurrency_test() {
use std::thread;
let data = ConcurrentData {
field1: 42,
field2: 123456789,
};
let cache = match mouscache::redis("localhost", Some("123456"), None) {
Ok(c) => c,
Err(e) => {
println!("{:?}", e);
return;
}
};
let _ = cache.insert("test", data.clone());
let mut handle_vec = Vec::new();
for _i in 1..10 {
let assert_data = data.clone();
let cache_clone = cache.clone();
let h = thread::spawn(move || {
let rdata: ConcurrentData = cache_clone.get("test").unwrap().unwrap();
assert_eq!(assert_data.field1, rdata.field1);
assert_eq!(assert_data.field2, rdata.field2);
});
handle_vec.push(h);
}
for handle in handle_vec {
let _ = handle.join();
println!("thread joined");
}
} | 21.831325 | 82 | 0.548013 |
e48f57bb5518bcf88902dc5b733dad5e2b509e28 | 477 | #[macro_use] extern crate rocket;
#[derive(FromFormValue)]
struct Foo1;
#[derive(FromFormValue)]
struct Foo2(usize);
#[derive(FromFormValue)]
struct Foo3 {
foo: usize,
}
#[derive(FromFormValue)]
enum Foo4 {
A(usize),
}
#[derive(FromFormValue)]
enum Foo5 { }
#[derive(FromFormValue)]
enum Foo6<T> {
A(T),
}
#[derive(FromFormValue)]
enum Bar1 {
#[form(value = 123)]
A,
}
#[derive(FromFormValue)]
enum Bar2 {
#[form(value)]
A,
}
fn main() { }
| 11.925 | 33 | 0.626834 |
01d864e5432cf0581b9da0abaa46bb77dc111e4f | 4,604 | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
use super::{ByteDigest, ElementHasher, Hasher};
use core::{convert::TryInto, fmt::Debug, marker::PhantomData};
use math::{FieldElement, StarkField};
use utils::ByteWriter;
#[cfg(test)]
mod tests;
// BLAKE3 256-BIT OUTPUT
// ================================================================================================
/// Implementation of the [Hasher](super::Hasher) trait for BLAKE3 hash function with 256-bit
/// output.
#[derive(Debug, PartialEq, Eq)]
pub struct Blake3_256<B: StarkField>(PhantomData<B>);
impl<B: StarkField> Hasher for Blake3_256<B> {
type Digest = ByteDigest<32>;
fn hash(bytes: &[u8]) -> Self::Digest {
ByteDigest(*blake3::hash(bytes).as_bytes())
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
ByteDigest(blake3::hash(ByteDigest::digests_as_bytes(values)).into())
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut data = [0; 40];
data[..32].copy_from_slice(&seed.0);
data[32..].copy_from_slice(&value.to_le_bytes());
ByteDigest(*blake3::hash(&data).as_bytes())
}
}
impl<B: StarkField> ElementHasher for Blake3_256<B> {
type BaseField = B;
fn hash_elements<E: FieldElement<BaseField = Self::BaseField>>(elements: &[E]) -> Self::Digest {
if B::IS_CANONICAL {
// when element's internal and canonical representations are the same, we can hash
// element bytes directly
let bytes = E::elements_as_bytes(elements);
ByteDigest(*blake3::hash(bytes).as_bytes())
} else {
// when elements' internal and canonical representations differ, we need to serialize
// them before hashing
let mut hasher = BlakeHasher::new();
hasher.write(elements);
ByteDigest(hasher.finalize())
}
}
}
// BLAKE3 192-BIT OUTPUT
// ================================================================================================
/// Implementation of the [Hasher](super::Hasher) trait for BLAKE3 hash function with 192-bit
/// output.
#[derive(Debug, PartialEq, Eq)]
pub struct Blake3_192<B: StarkField>(PhantomData<B>);
impl<B: StarkField> Hasher for Blake3_192<B> {
type Digest = ByteDigest<24>;
fn hash(bytes: &[u8]) -> Self::Digest {
let result = blake3::hash(bytes);
ByteDigest(result.as_bytes()[..24].try_into().unwrap())
}
fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
let result = blake3::hash(ByteDigest::digests_as_bytes(values));
ByteDigest(result.as_bytes()[..24].try_into().unwrap())
}
fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut data = [0; 32];
data[..24].copy_from_slice(&seed.0);
data[24..].copy_from_slice(&value.to_le_bytes());
let result = blake3::hash(&data);
ByteDigest(result.as_bytes()[..24].try_into().unwrap())
}
}
impl<B: StarkField> ElementHasher for Blake3_192<B> {
type BaseField = B;
fn hash_elements<E: FieldElement<BaseField = Self::BaseField>>(elements: &[E]) -> Self::Digest {
if B::IS_CANONICAL {
// when element's internal and canonical representations are the same, we can hash
// element bytes directly
let bytes = E::elements_as_bytes(elements);
let result = blake3::hash(bytes);
ByteDigest(result.as_bytes()[..24].try_into().unwrap())
} else {
// when elements' internal and canonical representations differ, we need to serialize
// them before hashing
let mut hasher = BlakeHasher::new();
hasher.write(elements);
let result = hasher.finalize();
ByteDigest(result[..24].try_into().unwrap())
}
}
}
// BLAKE HASHER
// ================================================================================================
/// Wrapper around BLAKE3 hasher to implement [ByteWriter] trait for it.
struct BlakeHasher(blake3::Hasher);
impl BlakeHasher {
pub fn new() -> Self {
Self(blake3::Hasher::new())
}
pub fn finalize(&self) -> [u8; 32] {
*self.0.finalize().as_bytes()
}
}
impl ByteWriter for BlakeHasher {
fn write_u8(&mut self, value: u8) {
self.0.update(&[value]);
}
fn write_u8_slice(&mut self, values: &[u8]) {
self.0.update(values);
}
}
| 33.605839 | 100 | 0.582537 |
f44596f297e2cc64a766595dc2b510b149b2268b | 1,315 | use std::os::raw::c_int;
pub type BlstPairing = blst::Pairing;
pub type BlstFp = blst::blst_fp;
pub type BlstFp2 = blst::blst_fp2;
pub type BlstFp12 = blst::blst_fp12;
pub type BlstFp6 = blst::blst_fp6;
pub type BlstFr = blst::blst_fr;
pub type BlstP1 = blst::blst_p1;
pub type BlstP1Affine = blst::blst_p1_affine;
pub type BlstP2 = blst::blst_p2;
pub type BlstP2Affine = blst::blst_p2_affine;
pub type BlstScalar = blst::blst_scalar;
pub type BlstUniq = blst::blst_uniq;
pub enum CurveType {
BN254 = 0,
BN381 = 1,
SNARK = 4,
BLS12_381 = 5,
}
const MCLBN_FP_UNIT_SIZE: usize = 6;
const MCLBN_FR_UNIT_SIZE: usize = 4;
const MCLBN_COMPILED_TIME_VAR: c_int =
MCLBN_FR_UNIT_SIZE as c_int * 10 + MCLBN_FP_UNIT_SIZE as c_int;
#[macro_use]
pub mod init_def;
pub mod mcl_methods;
pub mod utilities;
pub mod data_types {
pub mod fp;
pub mod fp2;
pub mod fr;
pub mod g1;
pub mod g2;
pub mod gt;
}
pub mod trait_implementations {
pub mod das;
pub mod fft_fr;
pub mod fft_g1;
pub mod fft_settings;
pub mod fr;
pub mod poly;
pub mod zero_poly;
pub mod g1;
pub mod g2;
pub mod kzg_settings;
pub mod fk20;
}
pub mod das;
pub mod data_recovery;
pub mod fk20_fft;
pub mod fk20_matrix;
pub mod kzg10;
pub mod zero_poly;
pub mod kzg_settings; | 22.288136 | 67 | 0.69962 |
7a5f5a7d526bf0d5a8b6f611c7e8d188222d41b3 | 2,282 | use operational_transform::OperationSeq;
#[path = "../src/utilities.rs"]
mod utilities;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use utilities::Rng;
pub fn compose(c: &mut Criterion) {
let mut rng = Rng::from_seed(Default::default());
let input = (0..1000)
.map(|_| {
let s = rng.gen_string(20);
let a = rng.gen_operation_seq(&s);
let after_a = a.apply(&s).unwrap();
let b = rng.gen_operation_seq(&after_a);
(a, b)
})
.collect::<Vec<_>>();
c.bench_function("compose", |b| {
b.iter(|| {
for (a, b) in input.iter() {
let _ = a.compose(black_box(b));
}
})
});
}
pub fn transform(c: &mut Criterion) {
let mut rng = Rng::from_seed(Default::default());
let input = (0..1000)
.map(|_| {
let s = rng.gen_string(20);
let a = rng.gen_operation_seq(&s);
let b = rng.gen_operation_seq(&s);
(a, b)
})
.collect::<Vec<_>>();
c.bench_function("transform", |b| {
b.iter(|| {
for (a, b) in input.iter() {
let _ = a.transform(black_box(b));
}
})
});
}
pub fn invert(c: &mut Criterion) {
let mut rng = Rng::from_seed(Default::default());
let input = (0..1000)
.map(|_| {
let s = rng.gen_string(50);
let o = rng.gen_operation_seq(&s);
(o, s)
})
.collect::<Vec<_>>();
c.bench_function("invert", |b| {
b.iter(|| {
for (o, s) in input.iter() {
let _ = o.invert(black_box(&s));
}
})
});
}
pub fn apply(c: &mut Criterion) {
let mut rng = Rng::from_seed(Default::default());
let input = (0..1000)
.map(|_| {
let s = rng.gen_string(50);
let o = rng.gen_operation_seq(&s);
(o, s)
})
.collect::<Vec<_>>();
c.bench_function("apply", |b| {
b.iter(|| {
for (o, s) in input.iter() {
let _ = o.apply(black_box(&s));
}
})
});
}
criterion_group!(benches, compose, transform, invert, apply);
criterion_main!(benches);
| 26.534884 | 71 | 0.475898 |
2872df7d57dddc4b0042bd3a276332aff177b345 | 6,380 | // SPDX-License-Identifier: Apache-2.0
//! syscall interface layer between assembler and rust
use crate::hostcall::{HostCall, UserMemScope};
use crate::spin::{Locked, RacyCell};
use core::arch::asm;
use core::mem::size_of;
use sallyport::guest;
use sallyport::guest::Handler;
use sallyport::item::enarxcall::SYS_GETATT;
#[cfg(feature = "dbg")]
use sallyport::libc::{SYS_write, STDERR_FILENO, STDOUT_FILENO};
use spinning::Lazy;
#[repr(C)]
struct X8664DoubleReturn {
rax: u64,
rdx: u64,
}
/// syscall service routine
///
/// # Safety
///
/// This function is not be called from rust.
#[naked]
pub unsafe extern "sysv64" fn _syscall_enter() -> ! {
// TaskStateSegment.privilege_stack_table[0]
const KERNEL_RSP_OFF: usize = size_of::<u32>();
// TaskStateSegment.privilege_stack_table[3]
const USR_RSP_OFF: usize = size_of::<u32>() + 3 * size_of::<u64>();
asm!(
// prepare the stack for sysretq and load the kernel rsp
"swapgs", // set gs segment to TSS
// swapgs variant of Spectre V1. Disable speculation past this point
"lfence",
"mov QWORD PTR gs:{USR}, rsp", // save userspace rsp
"mov rsp, QWORD PTR gs:{KRN}",// load kernel rsp
"push QWORD PTR gs:{USR}", // push userspace rsp - stack_pointer_ring_3
"mov QWORD PTR gs:{USR}, 0x0", // clear userspace rsp in the TSS
"push r11", // push RFLAGS stored in r11
"push rcx", // push userspace return pointer
"push rbp",
"mov rbp, rsp", // Save stack frame
// Arguments in registers:
// SYSV: rdi, rsi, rdx, rcx, r8, r9
// SYSCALL: rdi, rsi, rdx, r10, r8, r9 and syscall number in rax
"mov rcx, r10",
// These will be preserved by `syscall_rust` via the SYS-V ABI
// rbx, rsp, rbp, r12, r13, r14, r15
// save registers
"push rdi",
"push rsi",
"push r10",
"push r9",
"push r8",
// syscall number on the stack as the seventh argument
"push rax",
"call {syscall_rust}",
// skip rax pop, as it is the return value
"add rsp, 0x8",
// restore registers
"pop r8",
"pop r9",
"pop r10",
"pop rsi",
"pop rdi",
"pop rbp",
"pop rcx", // Pop userspace return pointer
"pop r11", // pop rflags to r11
"pop QWORD PTR gs:{USR}", // Pop userspace rsp
"mov rsp, gs:{USR}", // Restore userspace rsp
"swapgs",
// swapgs variant of Spectre V1. Disable speculation past this point
"lfence",
"sysretq",
USR = const USR_RSP_OFF,
KRN = const KERNEL_RSP_OFF,
syscall_rust = sym syscall_rust,
options(noreturn)
)
}
/// Thread local storage
/// FIXME: when using multithreading
pub static THREAD_TLS: Lazy<Locked<&mut guest::ThreadLocalStorage>> = Lazy::new(|| unsafe {
static TLSHANDLE: RacyCell<guest::ThreadLocalStorage> =
RacyCell::new(guest::ThreadLocalStorage::new());
Locked::<&mut guest::ThreadLocalStorage>::new(&mut (*TLSHANDLE.get()))
});
/// Handle a syscall in rust
#[allow(clippy::many_single_char_names)]
extern "sysv64" fn syscall_rust(
a: usize,
b: usize,
c: usize,
d: usize,
e: usize,
f: usize,
nr: usize,
) -> X8664DoubleReturn {
let orig_rdx: usize = c;
#[cfg(feature = "dbg")]
if !(nr == SYS_write as usize && (a == STDERR_FILENO as usize || a == STDOUT_FILENO as usize)) {
eprintln!("syscall {} …", nr)
}
let mut tls = THREAD_TLS.lock();
let mut h = HostCall::try_new(&mut tls).unwrap();
let usermemscope = UserMemScope;
match nr as i64 {
SYS_GETATT => {
let ret = h.get_attestation(&usermemscope, a, b, c, d);
match ret {
Err(e) => {
#[cfg(feature = "dbg")]
eprintln!("syscall {} = {}", nr, e.checked_neg().unwrap());
X8664DoubleReturn {
rax: e.checked_neg().unwrap() as _,
// Preserve `rdx` as it is normally not clobbered with a syscall
rdx: orig_rdx as _,
}
}
Ok([rax, rdx]) => {
#[cfg(feature = "dbg")]
eprintln!("syscall {} = {}", nr, rax);
X8664DoubleReturn {
rax: rax as _,
rdx: rdx as _,
}
}
}
}
_ => {
let ret = unsafe { h.syscall(&usermemscope, [nr, a, b, c, d, e, f]) };
match ret {
Err(e) => {
#[cfg(feature = "dbg")]
if !(nr == SYS_write as usize
&& (a == STDERR_FILENO as usize || a == STDOUT_FILENO as usize))
{
eprintln!("syscall {} = {}", nr, e.checked_neg().unwrap());
}
X8664DoubleReturn {
rax: e.checked_neg().unwrap() as _,
// Preserve `rdx` as it is normally not clobbered with a syscall
rdx: orig_rdx as _,
}
}
Ok([rax, _]) => {
#[cfg(feature = "dbg")]
if !(nr == SYS_write as usize
&& (a == STDERR_FILENO as usize || a == STDOUT_FILENO as usize))
{
eprintln!("syscall {} = {}", nr, rax);
}
X8664DoubleReturn {
rax: rax as _,
// Preserve `rdx` as it is normally not clobbered with a syscall
rdx: orig_rdx as _,
}
}
}
}
}
}
| 32.886598 | 104 | 0.465204 |
f5ac27ac976e51ae7aafa5e414a3d05be5229e4a | 19,026 | //! Ncurses-specific backend.
use log::{debug, warn};
use ncurses::mmask_t;
use std::cell::{Cell, RefCell};
use std::ffi::CString;
use std::fs::File;
use std::io;
use std::io::Write;
use crate::backend;
use crate::event::{Event, Key, MouseButton, MouseEvent};
use crate::theme::{Color, ColorPair, Effect};
use crate::utf8;
use crate::Vec2;
use super::split_i32;
// Use AHash instead of the slower SipHash
type HashMap<K, V> = std::collections::HashMap<K, V, ahash::RandomState>;
/// Backend using ncurses.
pub struct Backend {
current_style: Cell<ColorPair>,
// Maps (front, back) ncurses colors to ncurses pairs
pairs: RefCell<HashMap<(i16, i16), i16>>,
// Pre-computed map of ncurses codes to parsed Event
key_codes: HashMap<i32, Event>,
// Remember the last pressed button to correctly feed Released Event
last_mouse_button: Option<MouseButton>,
// Sometimes a code from ncurses should be split in two Events.
//
// So remember the one we didn't return.
input_buffer: Option<Event>,
}
fn find_closest_pair(pair: ColorPair) -> (i16, i16) {
super::find_closest_pair(pair, ncurses::COLORS() as i16)
}
/// Writes some bytes directly to `/dev/tty`
///
/// Since this is not going to be used often, we can afford to re-open the
/// file every time.
fn write_to_tty(bytes: &[u8]) -> io::Result<()> {
let mut tty_output =
File::create("/dev/tty").expect("cursive can only run with a tty");
tty_output.write_all(bytes)?;
// tty_output will be flushed automatically at the end of the function.
Ok(())
}
impl Backend {
/// Creates a new ncurses-based backend.
///
/// Uses `/dev/tty` for input/output.
pub fn init() -> io::Result<Box<dyn backend::Backend>> {
Self::init_with_files("/dev/tty", "/dev/tty")
}
/// Creates a new ncurses-based backend.
///
/// Uses stdin/stdout for input/output.
pub fn init_stdio() -> io::Result<Box<dyn backend::Backend>> {
Self::init_with_files("/dev/stdin", "/dev/stdout")
}
/// Creates a new ncurses-based backend using the given files for input/output.
pub fn init_with_files(
input_path: &str,
output_path: &str,
) -> io::Result<Box<dyn backend::Backend>> {
// Check the $TERM variable.
if std::env::var("TERM")
.map(|var| var.is_empty())
.unwrap_or(true)
{
return Err(io::Error::new(
io::ErrorKind::Other,
"$TERM is unset. Cannot initialize ncurses interface.",
));
}
// Change the locale.
// For some reasons it's mandatory to get some UTF-8 support.
let buf = CString::new("").unwrap();
unsafe { libc::setlocale(libc::LC_ALL, buf.as_ptr()) };
// The delay is the time ncurses wait after pressing ESC
// to see if it's an escape sequence.
// Default delay is way too long. 25 is imperceptible yet works fine.
::std::env::set_var("ESCDELAY", "25");
// Don't output to standard IO, directly feed into /dev/tty
// This leaves stdin and stdout usable for other purposes.
let input = {
let mode = CString::new("r").unwrap();
let path = CString::new(input_path).unwrap();
unsafe { libc::fopen(path.as_ptr(), mode.as_ptr()) }
};
let output = {
let mode = CString::new("w").unwrap();
let path = CString::new(output_path).unwrap();
unsafe { libc::fopen(path.as_ptr(), mode.as_ptr()) }
};
ncurses::newterm(None, output, input);
// Enable keypad (like arrows)
ncurses::keypad(ncurses::stdscr(), true);
// This disables mouse click detection,
// and provides 0-delay access to mouse presses.
ncurses::mouseinterval(0);
// Listen to all mouse events.
ncurses::mousemask(
(ncurses::ALL_MOUSE_EVENTS | ncurses::REPORT_MOUSE_POSITION)
as mmask_t,
None,
);
// Enable non-blocking input, so getch() immediately returns.
ncurses::timeout(0);
// Don't echo user input, we'll take care of that
ncurses::noecho();
// This disables buffering and some input processing.
ncurses::raw();
// This enables color support.
ncurses::start_color();
// Pick up background and text color from the terminal theme.
ncurses::use_default_colors();
// Don't print cursors.
ncurses::curs_set(ncurses::CURSOR_VISIBILITY::CURSOR_INVISIBLE);
// This asks the terminal to provide us with mouse drag events
// (Mouse move when a button is pressed).
// Replacing 1002 with 1003 would give us ANY mouse move.
write_to_tty(b"\x1B[?1002h")?;
let c = Backend {
current_style: Cell::new(ColorPair::from_256colors(0, 0)),
pairs: RefCell::new(HashMap::default()),
key_codes: initialize_keymap(),
last_mouse_button: None,
input_buffer: None,
};
Ok(Box::new(c))
}
/// Save a new color pair.
fn insert_color(
&self,
pairs: &mut HashMap<(i16, i16), i16>,
(front, back): (i16, i16),
) -> i16 {
let n = 1 + pairs.len() as i16;
let target = if ncurses::COLOR_PAIRS() > i32::from(n) {
// We still have plenty of space for everyone.
n
} else {
// The world is too small for both of us.
let target = n - 1;
// Remove the mapping to n-1
pairs.retain(|_, &mut v| v != target);
target
};
pairs.insert((front, back), target);
ncurses::init_pair(target, front, back);
target
}
/// Checks the pair in the cache, or re-define a color if needed.
fn get_or_create(&self, pair: ColorPair) -> i16 {
let mut pairs = self.pairs.borrow_mut();
// Find if we have this color in stock
let result = find_closest_pair(pair);
let lookup = pairs.get(&result).copied();
lookup.unwrap_or_else(|| self.insert_color(&mut *pairs, result))
}
fn set_colors(&self, pair: ColorPair) {
let i = self.get_or_create(pair);
self.current_style.set(pair);
let style = ncurses::COLOR_PAIR(i);
ncurses::attron(style);
}
fn parse_next(&mut self) -> Option<Event> {
if let Some(event) = self.input_buffer.take() {
return Some(event);
}
let ch: i32 = ncurses::getch();
// Non-blocking input will return -1 as long as no input is available.
if ch == -1 {
return None;
}
// Is it a UTF-8 starting point?
let event = if 32 <= ch && ch <= 255 && ch != 127 {
utf8::read_char(ch as u8, || Some(ncurses::getch() as u8))
.map(Event::Char)
.unwrap_or_else(|e| {
warn!("Error reading input: {}", e);
Event::Unknown(vec![ch as u8])
})
} else {
self.parse_ncurses_char(ch)
};
Some(event)
}
fn parse_ncurses_char(&mut self, ch: i32) -> Event {
// eprintln!("Found {:?}", ncurses::keyname(ch));
if ch == ncurses::KEY_MOUSE {
self.parse_mouse_event()
} else {
self.key_codes
.get(&ch)
.cloned()
.unwrap_or_else(|| Event::Unknown(split_i32(ch)))
}
}
fn parse_mouse_event(&mut self) -> Event {
let mut mevent = ncurses::MEVENT {
id: 0,
x: 0,
y: 0,
z: 0,
bstate: 0,
};
if ncurses::getmouse(&mut mevent as *mut ncurses::MEVENT)
== ncurses::OK
{
// Currently unused
let _ctrl = (mevent.bstate & ncurses::BUTTON_CTRL as mmask_t) != 0;
let _shift =
(mevent.bstate & ncurses::BUTTON_SHIFT as mmask_t) != 0;
let _alt = (mevent.bstate & ncurses::BUTTON_ALT as mmask_t) != 0;
// Keep the base state, without the modifiers
mevent.bstate &= !(ncurses::BUTTON_SHIFT
| ncurses::BUTTON_ALT
| ncurses::BUTTON_CTRL)
as mmask_t;
// This makes a full `Event` from a `MouseEvent`.
let make_event = |event| Event::Mouse {
offset: Vec2::zero(),
position: Vec2::new(mevent.x as usize, mevent.y as usize),
event,
};
if mevent.bstate == ncurses::REPORT_MOUSE_POSITION as mmask_t {
// The event is either a mouse drag event,
// or a weird double-release event. :S
self.last_mouse_button
.map(MouseEvent::Hold)
.or_else(|| {
// In legacy mode, some buttons overlap,
// so we need to disambiguate.
if mevent.bstate
== ncurses::BUTTON5_DOUBLE_CLICKED as mmask_t
{
Some(MouseEvent::WheelDown)
} else {
None
}
})
.map(&make_event)
.unwrap_or_else(|| Event::Unknown(vec![]))
} else {
// Identify the button
let mut bare_event = mevent.bstate & ((1 << 25) - 1);
let mut event = None;
// ncurses encodes multiple events in the same value.
while bare_event != 0 {
let single_event = 1 << bare_event.trailing_zeros();
bare_event ^= single_event;
// Process single_event
on_mouse_event(single_event as i32, |e| {
// Keep one event for later,
// send the rest through the channel.
if event.is_none() {
event = Some(e);
} else {
self.input_buffer = Some(make_event(e));
}
});
}
if let Some(event) = event {
match event {
MouseEvent::Press(btn) => {
self.last_mouse_button = Some(btn);
}
MouseEvent::Release(_) => {
self.last_mouse_button = None;
}
_ => (),
}
make_event(event)
} else {
debug!("No event parsed?...");
Event::Unknown(vec![])
}
}
} else {
debug!("Ncurses event not recognized.");
Event::Unknown(vec![])
}
}
}
impl Drop for Backend {
fn drop(&mut self) {
write_to_tty(b"\x1B[?1002l").unwrap();
ncurses::endwin();
}
}
impl backend::Backend for Backend {
fn name(&self) -> &str {
"ncurses"
}
fn screen_size(&self) -> Vec2 {
let mut x: i32 = 0;
let mut y: i32 = 0;
ncurses::getmaxyx(ncurses::stdscr(), &mut y, &mut x);
(x, y).into()
}
fn has_colors(&self) -> bool {
ncurses::has_colors()
}
fn poll_event(&mut self) -> Option<Event> {
self.parse_next()
}
fn set_color(&self, colors: ColorPair) -> ColorPair {
// eprintln!("Color used: {:?}", colors);
let current = self.current_style.get();
if current != colors {
self.set_colors(colors);
}
current
}
fn set_effect(&self, effect: Effect) {
let style = match effect {
Effect::Reverse => ncurses::A_REVERSE(),
Effect::Simple => ncurses::A_NORMAL(),
Effect::Bold => ncurses::A_BOLD(),
Effect::Italic => ncurses::A_ITALIC(),
Effect::Strikethrough => ncurses::A_NORMAL(),
Effect::Underline => ncurses::A_UNDERLINE(),
};
ncurses::attron(style);
}
fn unset_effect(&self, effect: Effect) {
let style = match effect {
Effect::Reverse => ncurses::A_REVERSE(),
Effect::Simple => ncurses::A_NORMAL(),
Effect::Bold => ncurses::A_BOLD(),
Effect::Italic => ncurses::A_ITALIC(),
Effect::Strikethrough => ncurses::A_NORMAL(),
Effect::Underline => ncurses::A_UNDERLINE(),
};
ncurses::attroff(style);
}
fn clear(&self, color: Color) {
let id = self.get_or_create(ColorPair {
front: color,
back: color,
});
ncurses::wbkgd(ncurses::stdscr(), ncurses::COLOR_PAIR(id));
ncurses::clear();
}
fn refresh(&mut self) {
ncurses::refresh();
}
fn print_at(&self, pos: Vec2, text: &str) {
ncurses::mvaddstr(pos.y as i32, pos.x as i32, text);
}
fn print_at_rep(&self, pos: Vec2, repetitions: usize, text: &str) {
if repetitions > 0 {
ncurses::mvaddstr(pos.y as i32, pos.x as i32, text);
let mut dupes_left = repetitions - 1;
while dupes_left > 0 {
ncurses::addstr(text);
dupes_left -= 1;
}
}
}
}
/// Returns the Key enum corresponding to the given ncurses event.
fn get_mouse_button(bare_event: i32) -> MouseButton {
match bare_event {
ncurses::BUTTON1_RELEASED
| ncurses::BUTTON1_PRESSED
| ncurses::BUTTON1_CLICKED
| ncurses::BUTTON1_DOUBLE_CLICKED
| ncurses::BUTTON1_TRIPLE_CLICKED => MouseButton::Left,
ncurses::BUTTON2_RELEASED
| ncurses::BUTTON2_PRESSED
| ncurses::BUTTON2_CLICKED
| ncurses::BUTTON2_DOUBLE_CLICKED
| ncurses::BUTTON2_TRIPLE_CLICKED => MouseButton::Middle,
ncurses::BUTTON3_RELEASED
| ncurses::BUTTON3_PRESSED
| ncurses::BUTTON3_CLICKED
| ncurses::BUTTON3_DOUBLE_CLICKED
| ncurses::BUTTON3_TRIPLE_CLICKED => MouseButton::Right,
ncurses::BUTTON4_RELEASED
| ncurses::BUTTON4_PRESSED
| ncurses::BUTTON4_CLICKED
| ncurses::BUTTON4_DOUBLE_CLICKED
| ncurses::BUTTON4_TRIPLE_CLICKED => MouseButton::Button4,
ncurses::BUTTON5_RELEASED
| ncurses::BUTTON5_PRESSED
| ncurses::BUTTON5_CLICKED
| ncurses::BUTTON5_DOUBLE_CLICKED
| ncurses::BUTTON5_TRIPLE_CLICKED => MouseButton::Button5,
_ => MouseButton::Other,
}
}
/// Parse the given code into one or more event.
///
/// If the given event code should expend into multiple events
/// (for instance click expends into PRESS + RELEASE),
/// the returned Vec will include those queued events.
///
/// The main event is returned separately to avoid allocation in most cases.
fn on_mouse_event<F>(bare_event: i32, mut f: F)
where
F: FnMut(MouseEvent),
{
let button = get_mouse_button(bare_event);
match bare_event {
ncurses::BUTTON1_RELEASED
| ncurses::BUTTON2_RELEASED
| ncurses::BUTTON3_RELEASED => f(MouseEvent::Release(button)),
ncurses::BUTTON1_PRESSED
| ncurses::BUTTON2_PRESSED
| ncurses::BUTTON3_PRESSED => f(MouseEvent::Press(button)),
ncurses::BUTTON4_PRESSED => f(MouseEvent::WheelUp),
ncurses::BUTTON5_PRESSED => f(MouseEvent::WheelDown),
// BUTTON4_RELEASED? BUTTON5_RELEASED?
// Do they ever happen?
_ => debug!("Unknown event: {:032b}", bare_event),
}
}
fn add_fn<F>(start: i32, with_key: F, map: &mut HashMap<i32, Event>)
where
F: Fn(Key) -> Event,
{
for i in 0..12 {
map.insert(start + i, with_key(Key::from_f((i + 1) as u8)));
}
}
fn initialize_keymap() -> HashMap<i32, Event> {
// First, define the static mappings.
let mut map = HashMap::default();
// Value sent by ncurses when nothing happens
map.insert(-1, Event::Refresh);
// Values under 256 are chars and control values
// Tab is '\t'
map.insert(9, Event::Key(Key::Tab));
// Treat '\n' and the numpad Enter the same
map.insert(10, Event::Key(Key::Enter));
map.insert(ncurses::KEY_ENTER, Event::Key(Key::Enter));
// This is the escape key when pressed by itself.
// When used for control sequences,
// it should have been caught earlier.
map.insert(27, Event::Key(Key::Esc));
// `Backspace` sends 127, but Ctrl-H sends `Backspace`
map.insert(127, Event::Key(Key::Backspace));
map.insert(ncurses::KEY_BACKSPACE, Event::Key(Key::Backspace));
map.insert(410, Event::WindowResize);
map.insert(ncurses::KEY_B2, Event::Key(Key::NumpadCenter));
map.insert(ncurses::KEY_DC, Event::Key(Key::Del));
map.insert(ncurses::KEY_IC, Event::Key(Key::Ins));
map.insert(ncurses::KEY_BTAB, Event::Shift(Key::Tab));
map.insert(ncurses::KEY_SLEFT, Event::Shift(Key::Left));
map.insert(ncurses::KEY_SRIGHT, Event::Shift(Key::Right));
map.insert(ncurses::KEY_LEFT, Event::Key(Key::Left));
map.insert(ncurses::KEY_RIGHT, Event::Key(Key::Right));
map.insert(ncurses::KEY_UP, Event::Key(Key::Up));
map.insert(ncurses::KEY_DOWN, Event::Key(Key::Down));
map.insert(ncurses::KEY_SR, Event::Shift(Key::Up));
map.insert(ncurses::KEY_SF, Event::Shift(Key::Down));
map.insert(ncurses::KEY_PPAGE, Event::Key(Key::PageUp));
map.insert(ncurses::KEY_NPAGE, Event::Key(Key::PageDown));
map.insert(ncurses::KEY_HOME, Event::Key(Key::Home));
map.insert(ncurses::KEY_END, Event::Key(Key::End));
map.insert(ncurses::KEY_SHOME, Event::Shift(Key::Home));
map.insert(ncurses::KEY_SEND, Event::Shift(Key::End));
map.insert(ncurses::KEY_SDC, Event::Shift(Key::Del));
map.insert(ncurses::KEY_SNEXT, Event::Shift(Key::PageDown));
map.insert(ncurses::KEY_SPREVIOUS, Event::Shift(Key::PageUp));
// Then add some dynamic ones
for c in 1..=26 {
let event = match c {
// Ctrl-i and Ctrl-j are special, they use the same codes as Tab
// and Enter respecively. There's just no way to detect them. :(
9 => Event::Key(Key::Tab),
10 => Event::Key(Key::Enter),
other => Event::CtrlChar((b'a' - 1 + other as u8) as char),
};
map.insert(c, event);
}
// Ncurses provides a F1 variable, but no modifiers
add_fn(ncurses::KEY_F1, Event::Key, &mut map);
add_fn(277, Event::Shift, &mut map);
add_fn(289, Event::Ctrl, &mut map);
add_fn(301, Event::CtrlShift, &mut map);
add_fn(313, Event::Alt, &mut map);
// Those codes actually vary between ncurses versions...
super::fill_key_codes(&mut map, ncurses::keyname);
map
}
| 34.34296 | 83 | 0.555503 |
d6c28bc493f515909f96fd884ab715df68525d9a | 3,809 | use crate::{ImplItem, TypeParamBound};
use syn::{
parse::{Parse, ParseBuffer, ParseStream, Result},
punctuated::Punctuated,
Generics, Ident, ImplItemType, Type, Visibility,
};
#[allow(dead_code)]
struct FlexibleItemType {
vis: Visibility,
defaultness: Option<syn::token::Default>,
type_token: syn::token::Type,
ident: Ident,
generics: Generics,
colon_token: Option<syn::token::Colon>,
bounds: Punctuated<TypeParamBound, syn::token::Add>,
ty: Option<(syn::token::Eq, Type)>,
semi_token: syn::token::Semi,
}
impl Parse for FlexibleItemType {
fn parse(input: ParseStream) -> Result<Self> {
let vis: Visibility = input.parse()?;
let defaultness: Option<syn::token::Default> = input.parse()?;
let type_token: syn::token::Type = input.parse()?;
let ident: Ident = input.parse()?;
let mut generics: Generics = input.parse()?;
let colon_token: Option<syn::token::Colon> = input.parse()?;
let mut bounds = Punctuated::new();
if colon_token.is_some() {
loop {
if input.peek(syn::token::Where)
|| input.peek(syn::token::Eq)
|| input.peek(syn::token::Semi)
{
break;
}
bounds.push_value(input.parse::<TypeParamBound>()?);
if input.peek(syn::token::Where)
|| input.peek(syn::token::Eq)
|| input.peek(syn::token::Semi)
{
break;
}
bounds.push_punct(input.parse::<syn::token::Add>()?);
}
}
generics.where_clause = input.parse()?;
let ty = if let Some(eq_token) = input.parse()? {
Some((eq_token, input.parse::<Type>()?))
} else {
None
};
let semi_token: syn::token::Semi = input.parse()?;
Ok(FlexibleItemType {
vis,
defaultness,
type_token,
ident,
generics,
colon_token,
bounds,
ty,
semi_token,
})
}
}
pub(super) mod verbatim {
use proc_macro2::TokenStream as TokenStream2;
use std::iter;
use syn::parse::{ParseBuffer, ParseStream};
pub fn between<'a>(begin: ParseBuffer<'a>, end: ParseStream<'a>) -> TokenStream2 {
let end = end.cursor();
let mut cursor = begin.cursor();
let mut tokens = TokenStream2::new();
while cursor != end {
let (tt, next) = cursor.token_tree().unwrap();
tokens.extend(iter::once(tt));
cursor = next;
}
tokens
}
}
pub(super) fn parse_impl_item_type(begin: ParseBuffer, input: ParseStream) -> Result<ImplItem> {
let FlexibleItemType {
vis,
defaultness,
type_token,
ident,
generics,
colon_token,
bounds: _,
ty,
semi_token,
} = input.parse()?;
if colon_token.is_some() || ty.is_none() {
Ok(ImplItem::Verbatim(verbatim::between(begin, input)))
} else {
let (eq_token, ty) = ty.unwrap();
Ok(ImplItem::Type(ImplItemType {
attrs: Vec::new(),
vis,
defaultness,
type_token,
ident,
generics,
eq_token,
ty,
semi_token,
}))
}
}
pub(super) fn peek_signature(input: ParseStream) -> bool {
let fork = input.fork();
fork.parse::<Option<syn::token::Const>>().is_ok()
&& fork.parse::<Option<syn::token::Async>>().is_ok()
&& fork.parse::<Option<syn::token::Unsafe>>().is_ok()
&& fork.parse::<Option<syn::Abi>>().is_ok()
&& fork.peek(syn::token::Fn)
}
| 30.717742 | 96 | 0.523497 |
7a2c027193eb350ee58eb56b161dc3924251e737 | 2,205 | use turron_common::{
miette::{NamedSource, Severity, SourceOffset},
regex::Regex,
smol::{self, fs, process::Command},
tracing,
};
pub use errors::{DotnetError, MsBuildError};
mod errors;
pub async fn pack() -> Result<(), DotnetError> {
let cli_path = smol::unblock(|| which::which("dotnet")).await?;
let output = Command::new(cli_path)
.arg("pack")
.arg("--nologo")
.output()
.await?;
// TODO: handle bad utf8 errors
let stdout = String::from_utf8(output.stdout).unwrap_or_else(|_| "".into());
let regex = Regex::new(
r"^\s*(?P<file>.*?)(\((?P<line>\d+),(?P<column>\d+)\))?\s*:\s+(?P<severity>.*?)\s+(?P<code>.*):\s+(?P<message>.*)$",
).expect("TURRON BUG: oops, bad regex?");
let mut errors = Vec::new();
for line in stdout.lines() {
if let Some(captures) = regex.captures(line) {
let filename: String = captures.name("file").unwrap().as_str().trim().into();
let contents = fs::read_to_string(&filename).await?;
let line = captures
.name("line")
.map(|x| x.as_str().parse::<usize>().unwrap())
.unwrap_or(0);
let column = captures
.name("column")
.map(|x| x.as_str().parse::<usize>().unwrap())
.unwrap_or(0);
let err_offset = SourceOffset::from_location(&contents, line, column);
errors.push(MsBuildError {
file: NamedSource::new(filename, contents),
span: (err_offset, 0.into()).into(),
code: captures.name("code").unwrap().as_str().trim().into(),
message: captures.name("message").unwrap().as_str().trim().into(),
severity: match captures.name("severity").unwrap().as_str().trim() {
"warning" => Severity::Warning,
"info" => Severity::Advice,
_ => Severity::Error,
},
});
} else {
tracing::info!("{}", line);
}
}
if output.status.success() {
Ok(())
} else {
Err(DotnetError::PackFailed(errors))
}
}
| 36.75 | 128 | 0.509297 |
38561aa82240da16f07629678217a0216f3d6b30 | 13,320 | //! # Queueing Honey Badger
//!
//! This works exactly like Dynamic Honey Badger, but it has a transaction queue built in. Whenever
//! an epoch is output, it will automatically select a list of pending transactions and propose it
//! for the next one. The user can continuously add more pending transactions to the queue.
//!
//! If there are no pending transactions, no validators in the process of being added or
//! removed and not enough other nodes have proposed yet, no automatic proposal will be made: The
//! network then waits until at least _f + 1_ have any content for the next epoch.
//!
//! ## How it works
//!
//! Queueing Honey Badger runs a Dynamic Honey Badger internally, and automatically inputs a list
//! of pending transactions as its contribution at the beginning of each epoch. These are selected
//! by making a random choice of _B / N_ out of the first _B_ entries in the queue, where _B_ is the
//! configurable `batch_size` parameter, and _N_ is the current number of validators.
//!
//! After each output, the transactions that made it into the new batch are removed from the queue.
//!
//! The random choice of transactions is made to reduce redundancy even if all validators have
//! roughly the same entries in their queues. By selecting a random fraction of the first _B_
//! entries, any two nodes will likely make almost disjoint contributions instead of proposing
//! the same transaction multiple times.
use std::marker::PhantomData;
use std::{cmp, iter};
use derivative::Derivative;
use failure::Fail;
use rand::distributions::{Distribution, Standard};
use rand::Rng;
use serde::{de::DeserializeOwned, Serialize};
use crate::crypto::{PublicKey, SecretKey};
use crate::dynamic_honey_badger::{
self, Batch as DhbBatch, DynamicHoneyBadger, FaultKind, JoinPlan, Message, Step as DhbStep,
};
use crate::transaction_queue::TransactionQueue;
use crate::{ConsensusProtocol, Contribution, NetworkInfo, NodeIdT};
pub use crate::dynamic_honey_badger::{Change, ChangeState, Input};
/// Queueing honey badger error variants.
#[derive(Debug, Fail)]
pub enum Error {
/// Failed to handle input.
#[fail(display = "Input error: {}", _0)]
Input(dynamic_honey_badger::Error),
/// Failed to handle a message.
#[fail(display = "Handle message error: {}", _0)]
HandleMessage(dynamic_honey_badger::Error),
/// Failed to propose a contribution.
#[fail(display = "Propose error: {}", _0)]
Propose(dynamic_honey_badger::Error),
/// Failed to create a Dynamic Honey Badger instance according to a join plan.
#[fail(display = "New joining error: {}", _0)]
NewJoining(dynamic_honey_badger::Error),
}
/// The result of `QueueingHoneyBadger` handling an input or message.
pub type Result<T> = ::std::result::Result<T, Error>;
/// A Queueing Honey Badger builder, to configure the parameters and create new instances of
/// `QueueingHoneyBadger`.
pub struct QueueingHoneyBadgerBuilder<T, N, Q>
where
T: Contribution + Serialize + DeserializeOwned + Clone,
N: NodeIdT + Serialize + DeserializeOwned,
{
/// Shared network data.
dyn_hb: DynamicHoneyBadger<Vec<T>, N>,
/// The target number of transactions to be included in each batch.
batch_size: usize,
/// The queue of pending transactions that haven't been output in a batch yet.
queue: Q,
/// The initial step of the managed `DynamicHoneyBadger` instance.
step: Option<DhbStep<Vec<T>, N>>,
_phantom: PhantomData<T>,
}
type QueueingHoneyBadgerWithStep<T, N, Q> = (QueueingHoneyBadger<T, N, Q>, Step<T, N>);
impl<T, N, Q> QueueingHoneyBadgerBuilder<T, N, Q>
where
T: Contribution + Serialize + DeserializeOwned + Clone,
N: NodeIdT + Serialize + DeserializeOwned,
Q: TransactionQueue<T>,
Standard: Distribution<N>,
{
/// Returns a new `QueueingHoneyBadgerBuilder` wrapping the given instance of
/// `DynamicHoneyBadger`.
pub fn new(dyn_hb: DynamicHoneyBadger<Vec<T>, N>) -> Self {
// TODO: Use the defaults from `HoneyBadgerBuilder`.
QueueingHoneyBadgerBuilder {
dyn_hb,
batch_size: 100,
queue: Default::default(),
step: None,
_phantom: PhantomData,
}
}
/// Sets the initial step of the `DynamicHoneyBadger` instance.
pub fn step(mut self, step: DhbStep<Vec<T>, N>) -> Self {
self.step = Some(step);
self
}
/// Sets the target number of transactions per batch.
pub fn batch_size(mut self, batch_size: usize) -> Self {
self.batch_size = batch_size;
self
}
/// Sets the transaction queue object.
pub fn queue(mut self, queue: Q) -> Self {
self.queue = queue;
self
}
/// Creates a new Queueing Honey Badger instance with an empty buffer.
pub fn build<R: Rng>(self, rng: &mut R) -> Result<QueueingHoneyBadgerWithStep<T, N, Q>> {
self.build_with_transactions(None, rng)
}
/// Returns a new Queueing Honey Badger instance that starts with the given transactions in its
/// buffer.
pub fn build_with_transactions<TI, R>(
mut self,
txs: TI,
rng: &mut R,
) -> Result<QueueingHoneyBadgerWithStep<T, N, Q>>
where
TI: IntoIterator<Item = T>,
R: Rng,
{
self.queue.extend(txs);
let mut qhb = QueueingHoneyBadger {
dyn_hb: self.dyn_hb,
batch_size: self.batch_size,
queue: self.queue,
};
let mut step = qhb.propose(rng)?;
if let Some(dhb_step) = self.step {
step.extend(dhb_step);
}
Ok((qhb, step))
}
}
/// A Honey Badger instance that can handle adding and removing nodes and manages a transaction
/// queue.
#[derive(Derivative)]
#[derivative(Debug)]
pub struct QueueingHoneyBadger<T, N: Ord, Q> {
/// The target number of transactions to be included in each batch.
batch_size: usize,
/// The internal managed `DynamicHoneyBadger` instance.
dyn_hb: DynamicHoneyBadger<Vec<T>, N>,
/// The queue of pending transactions that haven't been output in a batch yet.
queue: Q,
}
/// A `QueueingHoneyBadger` step, possibly containing multiple outputs.
pub type Step<T, N> = crate::Step<Message<N>, Batch<T, N>, N, FaultKind>;
impl<T, N, Q> ConsensusProtocol for QueueingHoneyBadger<T, N, Q>
where
T: Contribution + Serialize + DeserializeOwned + Clone,
N: NodeIdT + Serialize + DeserializeOwned,
Q: TransactionQueue<T>,
Standard: Distribution<N>,
{
type NodeId = N;
type Input = Input<T, N>;
type Output = Batch<T, N>;
type Message = Message<N>;
type Error = Error;
type FaultKind = FaultKind;
fn handle_input<R: Rng>(&mut self, input: Self::Input, rng: &mut R) -> Result<Step<T, N>> {
// User transactions are forwarded to `HoneyBadger` right away. Internal messages are
// in addition signed and broadcast.
match input {
Input::User(tx) => self.push_transaction(tx, rng),
Input::Change(change) => self.vote_for(change, rng),
}
}
fn handle_message<R: Rng>(
&mut self,
sender_id: &N,
message: Self::Message,
rng: &mut R,
) -> Result<Step<T, N>> {
self.handle_message(sender_id, message, rng)
}
fn terminated(&self) -> bool {
false
}
fn our_id(&self) -> &N {
self.dyn_hb.our_id()
}
}
impl<T, N, Q> QueueingHoneyBadger<T, N, Q>
where
T: Contribution + Serialize + DeserializeOwned + Clone,
N: NodeIdT + Serialize + DeserializeOwned,
Q: TransactionQueue<T>,
Standard: Distribution<N>,
{
/// Returns a new `QueueingHoneyBadgerBuilder` configured to use the node IDs and cryptographic
/// keys specified by `netinfo`.
pub fn builder(dyn_hb: DynamicHoneyBadger<Vec<T>, N>) -> QueueingHoneyBadgerBuilder<T, N, Q> {
QueueingHoneyBadgerBuilder::new(dyn_hb)
}
/// Creates a new `QueueingHoneyBadgerBuilder` for joining the network specified in the
/// `JoinPlan`.
///
/// Returns a `QueueingHoneyBadgerBuilder` or an error if creation of the managed
/// `DynamicHoneyBadger` instance has failed.
pub fn builder_joining<R: Rng>(
our_id: N,
secret_key: SecretKey,
join_plan: JoinPlan<N>,
rng: &mut R,
) -> Result<QueueingHoneyBadgerBuilder<T, N, Q>> {
let (dhb, step) = DynamicHoneyBadger::new_joining(our_id, secret_key, join_plan, rng)
.map_err(Error::NewJoining)?;
Ok(QueueingHoneyBadgerBuilder::new(dhb).step(step))
}
/// Adds a transaction to the queue.
///
/// This can be called at any time to append to the transaction queue. The new transaction will
/// be proposed in some future epoch.
///
/// If no proposal has yet been made for the current epoch, this may trigger one. In this case,
/// a nonempty step will returned, with the corresponding messages. (Or, if we are the only
/// validator, even with the completed batch as an output.)
pub fn push_transaction<R: Rng>(&mut self, tx: T, rng: &mut R) -> Result<Step<T, N>> {
self.queue.extend(iter::once(tx));
self.propose(rng)
}
/// Casts a vote to change the set of validators.
///
/// This stores a pending vote for the change. It will be included in some future batch, and
/// once enough validators have been voted for the same change, it will take effect.
pub fn vote_for<R: Rng>(&mut self, change: Change<N>, rng: &mut R) -> Result<Step<T, N>> {
self.apply(|dyn_hb, _| dyn_hb.vote_for(change), rng)
}
/// Casts a vote to add a node as a validator.
///
/// This stores a pending vote for the change. It will be included in some future batch, and
/// once enough validators have been voted for the same change, it will take effect.
pub fn vote_to_add<R: Rng>(
&mut self,
node_id: N,
pub_key: PublicKey,
rng: &mut R,
) -> Result<Step<T, N>> {
self.apply(|dyn_hb, _| dyn_hb.vote_to_add(node_id, pub_key), rng)
}
/// Casts a vote to demote a validator to observer.
///
/// This stores a pending vote for the change. It will be included in some future batch, and
/// once enough validators have been voted for the same change, it will take effect.
pub fn vote_to_remove<R: Rng>(&mut self, node_id: &N, rng: &mut R) -> Result<Step<T, N>> {
self.apply(|dyn_hb, _| dyn_hb.vote_to_remove(node_id), rng)
}
/// Handles a message received from `sender_id`.
///
/// This must be called with every message we receive from another node.
pub fn handle_message<R: Rng>(
&mut self,
sender_id: &N,
message: Message<N>,
rng: &mut R,
) -> Result<Step<T, N>> {
self.apply(
|dyn_hb, rng| dyn_hb.handle_message(sender_id, message, rng),
rng,
)
}
/// Returns a reference to the internal managed `DynamicHoneyBadger` instance.
pub fn dyn_hb(&self) -> &DynamicHoneyBadger<Vec<T>, N> {
&self.dyn_hb
}
/// Returns the information about the node IDs in the network, and the cryptographic keys.
pub fn netinfo(&self) -> &NetworkInfo<N> {
self.dyn_hb.netinfo()
}
/// Returns the current queue of the `QueueingHoneyBadger`.
pub fn queue(&self) -> &Q {
&self.queue
}
/// Applies a function `f` to the `DynamicHoneyBadger` instance and processes the step.
fn apply<R, F>(&mut self, f: F, rng: &mut R) -> Result<Step<T, N>>
where
F: FnOnce(
&mut DynamicHoneyBadger<Vec<T>, N>,
&mut R,
) -> dynamic_honey_badger::Result<Step<T, N>>,
R: Rng,
{
let step = f(&mut self.dyn_hb, rng).map_err(Error::Input)?;
self.queue
.remove_multiple(step.output.iter().flat_map(Batch::iter));
Ok(step.join(self.propose(rng)?))
}
/// Returns the epoch of the next batch that will be output.
pub fn next_epoch(&self) -> u64 {
self.dyn_hb.next_epoch()
}
/// Returns `true` if we are ready to propose our contribution for the next epoch, i.e. if the
/// previous epoch has completed and we have either pending transactions or we are required to
/// make a proposal to avoid stalling the network.
fn can_propose(&self) -> bool {
if self.dyn_hb.has_input() {
return false; // Previous epoch is still in progress.
}
!self.queue.is_empty() || self.dyn_hb.should_propose()
}
/// Initiates the next epoch by proposing a batch from the queue.
fn propose<R: Rng>(&mut self, rng: &mut R) -> Result<Step<T, N>> {
let mut step = Step::default();
while self.can_propose() {
let amount = cmp::max(1, self.batch_size / self.dyn_hb.netinfo().num_nodes());
let proposal = self.queue.choose(rng, amount, self.batch_size);
step.extend(
self.dyn_hb
.handle_input(Input::User(proposal), rng)
.map_err(Error::Propose)?,
);
}
Ok(step)
}
}
/// A batch containing a list of transactions from at least two thirds of the validators.
pub type Batch<T, N> = DhbBatch<Vec<T>, N>;
| 37.206704 | 100 | 0.644595 |
39692836866baab6e7873920c7159ed3bb605be3 | 62,559 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A module for working with processes.
//!
//! This module is mostly concerned with spawning and interacting with child
//! processes, but it also provides [`abort`] and [`exit`] for terminating the
//! current process.
//!
//! # Spawning a process
//!
//! The [`Command`] struct is used to configure and spawn processes:
//!
//! ```
//! use std::process::Command;
//!
//! let output = Command::new("echo")
//! .arg("Hello world")
//! .output()
//! .expect("Failed to execute command");
//!
//! assert_eq!(b"Hello world\n", output.stdout.as_slice());
//! ```
//!
//! Several methods on [`Command`], such as [`spawn`] or [`output`], can be used
//! to spawn a process. In particular, [`output`] spawns the child process and
//! waits until the process terminates, while [`spawn`] will return a [`Child`]
//! that represents the spawned child process.
//!
//! # Handling I/O
//!
//! The [`stdout`], [`stdin`], and [`stderr`] of a child process can be
//! configured by passing an [`Stdio`] to the corresponding method on
//! [`Command`]. Once spawned, they can be accessed from the [`Child`]. For
//! example, piping output from one command into another command can be done
//! like so:
//!
//! ```no_run
//! use std::process::{Command, Stdio};
//!
//! // stdout must be configured with `Stdio::piped` in order to use
//! // `echo_child.stdout`
//! let echo_child = Command::new("echo")
//! .arg("Oh no, a tpyo!")
//! .stdout(Stdio::piped())
//! .spawn()
//! .expect("Failed to start echo process");
//!
//! // Note that `echo_child` is moved here, but we won't be needing
//! // `echo_child` anymore
//! let echo_out = echo_child.stdout.expect("Failed to open echo stdout");
//!
//! let mut sed_child = Command::new("sed")
//! .arg("s/tpyo/typo/")
//! .stdin(Stdio::from(echo_out))
//! .stdout(Stdio::piped())
//! .spawn()
//! .expect("Failed to start sed process");
//!
//! let output = sed_child.wait_with_output().expect("Failed to wait on sed");
//! assert_eq!(b"Oh no, a typo!\n", output.stdout.as_slice());
//! ```
//!
//! Note that [`ChildStderr`] and [`ChildStdout`] implement [`Read`] and
//! [`ChildStdin`] implements [`Write`]:
//!
//! ```no_run
//! use std::process::{Command, Stdio};
//! use std::io::Write;
//!
//! let mut child = Command::new("/bin/cat")
//! .stdin(Stdio::piped())
//! .stdout(Stdio::piped())
//! .spawn()
//! .expect("failed to execute child");
//!
//! {
//! // limited borrow of stdin
//! let stdin = child.stdin.as_mut().expect("failed to get stdin");
//! stdin.write_all(b"test").expect("failed to write to stdin");
//! }
//!
//! let output = child
//! .wait_with_output()
//! .expect("failed to wait on child");
//!
//! assert_eq!(b"test", output.stdout.as_slice());
//! ```
//!
//! [`abort`]: fn.abort.html
//! [`exit`]: fn.exit.html
//!
//! [`Command`]: struct.Command.html
//! [`spawn`]: struct.Command.html#method.spawn
//! [`output`]: struct.Command.html#method.output
//!
//! [`Child`]: struct.Child.html
//! [`ChildStdin`]: struct.ChildStdin.html
//! [`ChildStdout`]: struct.ChildStdout.html
//! [`ChildStderr`]: struct.ChildStderr.html
//! [`Stdio`]: struct.Stdio.html
//!
//! [`stdout`]: struct.Command.html#method.stdout
//! [`stdin`]: struct.Command.html#method.stdin
//! [`stderr`]: struct.Command.html#method.stderr
//!
//! [`Write`]: ../io/trait.Write.html
//! [`Read`]: ../io/trait.Read.html
#![stable(feature = "process", since = "1.0.0")]
use io::prelude::*;
use ffi::OsStr;
use fmt;
use fs;
use io::{self, Initializer};
use path::Path;
use str;
use sys::pipe::{read2, AnonPipe};
use sys::process as imp;
use sys_common::{AsInner, AsInnerMut, FromInner, IntoInner};
/// Representation of a running or exited child process.
///
/// This structure is used to represent and manage child processes. A child
/// process is created via the [`Command`] struct, which configures the
/// spawning process and can itself be constructed using a builder-style
/// interface.
///
/// There is no implementation of [`Drop`] for child processes,
/// so if you do not ensure the `Child` has exited then it will continue to
/// run, even after the `Child` handle to the child process has gone out of
/// scope.
///
/// Calling [`wait`](#method.wait) (or other functions that wrap around it) will make
/// the parent process wait until the child has actually exited before
/// continuing.
///
/// # Examples
///
/// ```should_panic
/// use std::process::Command;
///
/// let mut child = Command::new("/bin/cat")
/// .arg("file.txt")
/// .spawn()
/// .expect("failed to execute child");
///
/// let ecode = child.wait()
/// .expect("failed to wait on child");
///
/// assert!(ecode.success());
/// ```
///
/// [`Command`]: struct.Command.html
/// [`Drop`]: ../../core/ops/trait.Drop.html
/// [`wait`]: #method.wait
#[stable(feature = "process", since = "1.0.0")]
pub struct Child {
handle: imp::Process,
/// The handle for writing to the child's standard input (stdin), if it has
/// been captured.
#[stable(feature = "process", since = "1.0.0")]
pub stdin: Option<ChildStdin>,
/// The handle for reading from the child's standard output (stdout), if it
/// has been captured.
#[stable(feature = "process", since = "1.0.0")]
pub stdout: Option<ChildStdout>,
/// The handle for reading from the child's standard error (stderr), if it
/// has been captured.
#[stable(feature = "process", since = "1.0.0")]
pub stderr: Option<ChildStderr>,
}
impl AsInner<imp::Process> for Child {
fn as_inner(&self) -> &imp::Process { &self.handle }
}
impl FromInner<(imp::Process, imp::StdioPipes)> for Child {
fn from_inner((handle, io): (imp::Process, imp::StdioPipes)) -> Child {
Child {
handle,
stdin: io.stdin.map(ChildStdin::from_inner),
stdout: io.stdout.map(ChildStdout::from_inner),
stderr: io.stderr.map(ChildStderr::from_inner),
}
}
}
impl IntoInner<imp::Process> for Child {
fn into_inner(self) -> imp::Process { self.handle }
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl fmt::Debug for Child {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Child")
.field("stdin", &self.stdin)
.field("stdout", &self.stdout)
.field("stderr", &self.stderr)
.finish()
}
}
/// A handle to a child process's standard input (stdin).
///
/// This struct is used in the [`stdin`] field on [`Child`].
///
/// When an instance of `ChildStdin` is [dropped], the `ChildStdin`'s underlying
/// file handle will be closed. If the child process was blocked on input prior
/// to being dropped, it will become unblocked after dropping.
///
/// [`Child`]: struct.Child.html
/// [`stdin`]: struct.Child.html#structfield.stdin
/// [dropped]: ../ops/trait.Drop.html
#[stable(feature = "process", since = "1.0.0")]
pub struct ChildStdin {
inner: AnonPipe
}
#[stable(feature = "process", since = "1.0.0")]
impl Write for ChildStdin {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.inner.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl AsInner<AnonPipe> for ChildStdin {
fn as_inner(&self) -> &AnonPipe { &self.inner }
}
impl IntoInner<AnonPipe> for ChildStdin {
fn into_inner(self) -> AnonPipe { self.inner }
}
impl FromInner<AnonPipe> for ChildStdin {
fn from_inner(pipe: AnonPipe) -> ChildStdin {
ChildStdin { inner: pipe }
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl fmt::Debug for ChildStdin {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("ChildStdin { .. }")
}
}
/// A handle to a child process's standard output (stdout).
///
/// This struct is used in the [`stdout`] field on [`Child`].
///
/// When an instance of `ChildStdout` is [dropped], the `ChildStdout`'s
/// underlying file handle will be closed.
///
/// [`Child`]: struct.Child.html
/// [`stdout`]: struct.Child.html#structfield.stdout
/// [dropped]: ../ops/trait.Drop.html
#[stable(feature = "process", since = "1.0.0")]
pub struct ChildStdout {
inner: AnonPipe
}
#[stable(feature = "process", since = "1.0.0")]
impl Read for ChildStdout {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.read(buf)
}
#[inline]
unsafe fn initializer(&self) -> Initializer {
Initializer::nop()
}
}
impl AsInner<AnonPipe> for ChildStdout {
fn as_inner(&self) -> &AnonPipe { &self.inner }
}
impl IntoInner<AnonPipe> for ChildStdout {
fn into_inner(self) -> AnonPipe { self.inner }
}
impl FromInner<AnonPipe> for ChildStdout {
fn from_inner(pipe: AnonPipe) -> ChildStdout {
ChildStdout { inner: pipe }
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl fmt::Debug for ChildStdout {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("ChildStdout { .. }")
}
}
/// A handle to a child process's stderr.
///
/// This struct is used in the [`stderr`] field on [`Child`].
///
/// When an instance of `ChildStderr` is [dropped], the `ChildStderr`'s
/// underlying file handle will be closed.
///
/// [`Child`]: struct.Child.html
/// [`stderr`]: struct.Child.html#structfield.stderr
/// [dropped]: ../ops/trait.Drop.html
#[stable(feature = "process", since = "1.0.0")]
pub struct ChildStderr {
inner: AnonPipe
}
#[stable(feature = "process", since = "1.0.0")]
impl Read for ChildStderr {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.inner.read(buf)
}
#[inline]
unsafe fn initializer(&self) -> Initializer {
Initializer::nop()
}
}
impl AsInner<AnonPipe> for ChildStderr {
fn as_inner(&self) -> &AnonPipe { &self.inner }
}
impl IntoInner<AnonPipe> for ChildStderr {
fn into_inner(self) -> AnonPipe { self.inner }
}
impl FromInner<AnonPipe> for ChildStderr {
fn from_inner(pipe: AnonPipe) -> ChildStderr {
ChildStderr { inner: pipe }
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl fmt::Debug for ChildStderr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("ChildStderr { .. }")
}
}
/// A process builder, providing fine-grained control
/// over how a new process should be spawned.
///
/// A default configuration can be
/// generated using `Command::new(program)`, where `program` gives a path to the
/// program to be executed. Additional builder methods allow the configuration
/// to be changed (for example, by adding arguments) prior to spawning:
///
/// ```
/// use std::process::Command;
///
/// let output = if cfg!(target_os = "windows") {
/// Command::new("cmd")
/// .args(&["/C", "echo hello"])
/// .output()
/// .expect("failed to execute process")
/// } else {
/// Command::new("sh")
/// .arg("-c")
/// .arg("echo hello")
/// .output()
/// .expect("failed to execute process")
/// };
///
/// let hello = output.stdout;
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub struct Command {
inner: imp::Command,
}
impl Command {
/// Constructs a new `Command` for launching the program at
/// path `program`, with the following default configuration:
///
/// * No arguments to the program
/// * Inherit the current process's environment
/// * Inherit the current process's working directory
/// * Inherit stdin/stdout/stderr for `spawn` or `status`, but create pipes for `output`
///
/// Builder methods are provided to change these defaults and
/// otherwise configure the process.
///
/// If `program` is not an absolute path, the `PATH` will be searched in
/// an OS-defined way.
///
/// The search path to be used may be controlled by setting the
/// `PATH` environment variable on the Command,
/// but this has some implementation limitations on Windows
/// (see <https://github.com/rust-lang/rust/issues/37519>).
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("sh")
/// .spawn()
/// .expect("sh command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn new<S: AsRef<OsStr>>(program: S) -> Command {
Command { inner: imp::Command::new(program.as_ref()) }
}
/// Add an argument to pass to the program.
///
/// Only one argument can be passed per use. So instead of:
///
/// ```no_run
/// # std::process::Command::new("sh")
/// .arg("-C /path/to/repo")
/// # ;
/// ```
///
/// usage would be:
///
/// ```no_run
/// # std::process::Command::new("sh")
/// .arg("-C")
/// .arg("/path/to/repo")
/// # ;
/// ```
///
/// To pass multiple arguments see [`args`].
///
/// [`args`]: #method.args
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .arg("-l")
/// .arg("-a")
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn arg<S: AsRef<OsStr>>(&mut self, arg: S) -> &mut Command {
self.inner.arg(arg.as_ref());
self
}
/// Add multiple arguments to pass to the program.
///
/// To pass a single argument see [`arg`].
///
/// [`arg`]: #method.arg
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .args(&["-l", "-a"])
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn args<I, S>(&mut self, args: I) -> &mut Command
where I: IntoIterator<Item=S>, S: AsRef<OsStr>
{
for arg in args {
self.arg(arg.as_ref());
}
self
}
/// Inserts or updates an environment variable mapping.
///
/// Note that environment variable names are case-insensitive (but case-preserving) on Windows,
/// and case-sensitive on all other platforms.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .env("PATH", "/bin")
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn env<K, V>(&mut self, key: K, val: V) -> &mut Command
where K: AsRef<OsStr>, V: AsRef<OsStr>
{
self.inner.env_mut().set(key.as_ref(), val.as_ref());
self
}
/// Add or update multiple environment variable mappings.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::{Command, Stdio};
/// use std::env;
/// use std::collections::HashMap;
///
/// let filtered_env : HashMap<String, String> =
/// env::vars().filter(|&(ref k, _)|
/// k == "TERM" || k == "TZ" || k == "LANG" || k == "PATH"
/// ).collect();
///
/// Command::new("printenv")
/// .stdin(Stdio::null())
/// .stdout(Stdio::inherit())
/// .env_clear()
/// .envs(&filtered_env)
/// .spawn()
/// .expect("printenv failed to start");
/// ```
#[stable(feature = "command_envs", since = "1.19.0")]
pub fn envs<I, K, V>(&mut self, vars: I) -> &mut Command
where I: IntoIterator<Item=(K, V)>, K: AsRef<OsStr>, V: AsRef<OsStr>
{
for (ref key, ref val) in vars {
self.inner.env_mut().set(key.as_ref(), val.as_ref());
}
self
}
/// Removes an environment variable mapping.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .env_remove("PATH")
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn env_remove<K: AsRef<OsStr>>(&mut self, key: K) -> &mut Command {
self.inner.env_mut().remove(key.as_ref());
self
}
/// Clears the entire environment map for the child process.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .env_clear()
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn env_clear(&mut self) -> &mut Command {
self.inner.env_mut().clear();
self
}
/// Sets the working directory for the child process.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .current_dir("/bin")
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn current_dir<P: AsRef<Path>>(&mut self, dir: P) -> &mut Command {
self.inner.cwd(dir.as_ref().as_ref());
self
}
/// Configuration for the child process's standard input (stdin) handle.
///
/// Defaults to [`inherit`] when used with `spawn` or `status`, and
/// defaults to [`piped`] when used with `output`.
///
/// [`inherit`]: struct.Stdio.html#method.inherit
/// [`piped`]: struct.Stdio.html#method.piped
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// Command::new("ls")
/// .stdin(Stdio::null())
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn stdin<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command {
self.inner.stdin(cfg.into().0);
self
}
/// Configuration for the child process's standard output (stdout) handle.
///
/// Defaults to [`inherit`] when used with `spawn` or `status`, and
/// defaults to [`piped`] when used with `output`.
///
/// [`inherit`]: struct.Stdio.html#method.inherit
/// [`piped`]: struct.Stdio.html#method.piped
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// Command::new("ls")
/// .stdout(Stdio::null())
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn stdout<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command {
self.inner.stdout(cfg.into().0);
self
}
/// Configuration for the child process's standard error (stderr) handle.
///
/// Defaults to [`inherit`] when used with `spawn` or `status`, and
/// defaults to [`piped`] when used with `output`.
///
/// [`inherit`]: struct.Stdio.html#method.inherit
/// [`piped`]: struct.Stdio.html#method.piped
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// Command::new("ls")
/// .stderr(Stdio::null())
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn stderr<T: Into<Stdio>>(&mut self, cfg: T) -> &mut Command {
self.inner.stderr(cfg.into().0);
self
}
/// Executes the command as a child process, returning a handle to it.
///
/// By default, stdin, stdout and stderr are inherited from the parent.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// Command::new("ls")
/// .spawn()
/// .expect("ls command failed to start");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn spawn(&mut self) -> io::Result<Child> {
self.inner.spawn(imp::Stdio::Inherit, true).map(Child::from_inner)
}
/// Executes the command as a child process, waiting for it to finish and
/// collecting all of its output.
///
/// By default, stdout and stderr are captured (and used to provide the
/// resulting output). Stdin is not inherited from the parent and any
/// attempt by the child process to read from the stdin stream will result
/// in the stream immediately closing.
///
/// # Examples
///
/// ```should_panic
/// use std::process::Command;
/// let output = Command::new("/bin/cat")
/// .arg("file.txt")
/// .output()
/// .expect("failed to execute process");
///
/// println!("status: {}", output.status);
/// println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
/// println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
///
/// assert!(output.status.success());
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn output(&mut self) -> io::Result<Output> {
self.inner.spawn(imp::Stdio::MakePipe, false).map(Child::from_inner)
.and_then(|p| p.wait_with_output())
}
/// Executes a command as a child process, waiting for it to finish and
/// collecting its exit status.
///
/// By default, stdin, stdout and stderr are inherited from the parent.
///
/// # Examples
///
/// ```should_panic
/// use std::process::Command;
///
/// let status = Command::new("/bin/cat")
/// .arg("file.txt")
/// .status()
/// .expect("failed to execute process");
///
/// println!("process exited with: {}", status);
///
/// assert!(status.success());
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn status(&mut self) -> io::Result<ExitStatus> {
self.inner.spawn(imp::Stdio::Inherit, true).map(Child::from_inner)
.and_then(|mut p| p.wait())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for Command {
/// Format the program and arguments of a Command for display. Any
/// non-utf8 data is lossily converted using the utf8 replacement
/// character.
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
impl AsInner<imp::Command> for Command {
fn as_inner(&self) -> &imp::Command { &self.inner }
}
impl AsInnerMut<imp::Command> for Command {
fn as_inner_mut(&mut self) -> &mut imp::Command { &mut self.inner }
}
/// The output of a finished process.
///
/// This is returned in a Result by either the [`output`] method of a
/// [`Command`], or the [`wait_with_output`] method of a [`Child`]
/// process.
///
/// [`Command`]: struct.Command.html
/// [`Child`]: struct.Child.html
/// [`output`]: struct.Command.html#method.output
/// [`wait_with_output`]: struct.Child.html#method.wait_with_output
#[derive(PartialEq, Eq, Clone)]
#[stable(feature = "process", since = "1.0.0")]
pub struct Output {
/// The status (exit code) of the process.
#[stable(feature = "process", since = "1.0.0")]
pub status: ExitStatus,
/// The data that the process wrote to stdout.
#[stable(feature = "process", since = "1.0.0")]
pub stdout: Vec<u8>,
/// The data that the process wrote to stderr.
#[stable(feature = "process", since = "1.0.0")]
pub stderr: Vec<u8>,
}
// If either stderr or stdout are valid utf8 strings it prints the valid
// strings, otherwise it prints the byte sequence instead
#[stable(feature = "process_output_debug", since = "1.7.0")]
impl fmt::Debug for Output {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let stdout_utf8 = str::from_utf8(&self.stdout);
let stdout_debug: &dyn fmt::Debug = match stdout_utf8 {
Ok(ref str) => str,
Err(_) => &self.stdout
};
let stderr_utf8 = str::from_utf8(&self.stderr);
let stderr_debug: &dyn fmt::Debug = match stderr_utf8 {
Ok(ref str) => str,
Err(_) => &self.stderr
};
fmt.debug_struct("Output")
.field("status", &self.status)
.field("stdout", stdout_debug)
.field("stderr", stderr_debug)
.finish()
}
}
/// Describes what to do with a standard I/O stream for a child process when
/// passed to the [`stdin`], [`stdout`], and [`stderr`] methods of [`Command`].
///
/// [`stdin`]: struct.Command.html#method.stdin
/// [`stdout`]: struct.Command.html#method.stdout
/// [`stderr`]: struct.Command.html#method.stderr
/// [`Command`]: struct.Command.html
#[stable(feature = "process", since = "1.0.0")]
pub struct Stdio(imp::Stdio);
impl Stdio {
/// A new pipe should be arranged to connect the parent and child processes.
///
/// # Examples
///
/// With stdout:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// let output = Command::new("echo")
/// .arg("Hello, world!")
/// .stdout(Stdio::piped())
/// .output()
/// .expect("Failed to execute command");
///
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "Hello, world!\n");
/// // Nothing echoed to console
/// ```
///
/// With stdin:
///
/// ```no_run
/// use std::io::Write;
/// use std::process::{Command, Stdio};
///
/// let mut child = Command::new("rev")
/// .stdin(Stdio::piped())
/// .stdout(Stdio::piped())
/// .spawn()
/// .expect("Failed to spawn child process");
///
/// {
/// let mut stdin = child.stdin.as_mut().expect("Failed to open stdin");
/// stdin.write_all("Hello, world!".as_bytes()).expect("Failed to write to stdin");
/// }
///
/// let output = child.wait_with_output().expect("Failed to read stdout");
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "!dlrow ,olleH\n");
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn piped() -> Stdio { Stdio(imp::Stdio::MakePipe) }
/// The child inherits from the corresponding parent descriptor.
///
/// # Examples
///
/// With stdout:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// let output = Command::new("echo")
/// .arg("Hello, world!")
/// .stdout(Stdio::inherit())
/// .output()
/// .expect("Failed to execute command");
///
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "");
/// // "Hello, world!" echoed to console
/// ```
///
/// With stdin:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// let output = Command::new("rev")
/// .stdin(Stdio::inherit())
/// .stdout(Stdio::piped())
/// .output()
/// .expect("Failed to execute command");
///
/// println!("You piped in the reverse of: {}", String::from_utf8_lossy(&output.stdout));
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn inherit() -> Stdio { Stdio(imp::Stdio::Inherit) }
/// This stream will be ignored. This is the equivalent of attaching the
/// stream to `/dev/null`
///
/// # Examples
///
/// With stdout:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// let output = Command::new("echo")
/// .arg("Hello, world!")
/// .stdout(Stdio::null())
/// .output()
/// .expect("Failed to execute command");
///
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "");
/// // Nothing echoed to console
/// ```
///
/// With stdin:
///
/// ```no_run
/// use std::process::{Command, Stdio};
///
/// let output = Command::new("rev")
/// .stdin(Stdio::null())
/// .stdout(Stdio::piped())
/// .output()
/// .expect("Failed to execute command");
///
/// assert_eq!(String::from_utf8_lossy(&output.stdout), "");
/// // Ignores any piped-in input
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn null() -> Stdio { Stdio(imp::Stdio::Null) }
}
impl FromInner<imp::Stdio> for Stdio {
fn from_inner(inner: imp::Stdio) -> Stdio {
Stdio(inner)
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl fmt::Debug for Stdio {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.pad("Stdio { .. }")
}
}
#[stable(feature = "stdio_from", since = "1.20.0")]
impl From<ChildStdin> for Stdio {
fn from(child: ChildStdin) -> Stdio {
Stdio::from_inner(child.into_inner().into())
}
}
#[stable(feature = "stdio_from", since = "1.20.0")]
impl From<ChildStdout> for Stdio {
fn from(child: ChildStdout) -> Stdio {
Stdio::from_inner(child.into_inner().into())
}
}
#[stable(feature = "stdio_from", since = "1.20.0")]
impl From<ChildStderr> for Stdio {
fn from(child: ChildStderr) -> Stdio {
Stdio::from_inner(child.into_inner().into())
}
}
#[stable(feature = "stdio_from", since = "1.20.0")]
impl From<fs::File> for Stdio {
fn from(file: fs::File) -> Stdio {
Stdio::from_inner(file.into_inner().into())
}
}
/// Describes the result of a process after it has terminated.
///
/// This `struct` is used to represent the exit status of a child process.
/// Child processes are created via the [`Command`] struct and their exit
/// status is exposed through the [`status`] method.
///
/// [`Command`]: struct.Command.html
/// [`status`]: struct.Command.html#method.status
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
#[stable(feature = "process", since = "1.0.0")]
pub struct ExitStatus(imp::ExitStatus);
impl ExitStatus {
/// Was termination successful? Signal termination is not considered a
/// success, and success is defined as a zero exit status.
///
/// # Examples
///
/// ```rust,no_run
/// use std::process::Command;
///
/// let status = Command::new("mkdir")
/// .arg("projects")
/// .status()
/// .expect("failed to execute mkdir");
///
/// if status.success() {
/// println!("'projects/' directory created");
/// } else {
/// println!("failed to create 'projects/' directory");
/// }
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn success(&self) -> bool {
self.0.success()
}
/// Returns the exit code of the process, if any.
///
/// On Unix, this will return `None` if the process was terminated
/// by a signal; `std::os::unix` provides an extension trait for
/// extracting the signal and other details from the `ExitStatus`.
///
/// # Examples
///
/// ```no_run
/// use std::process::Command;
///
/// let status = Command::new("mkdir")
/// .arg("projects")
/// .status()
/// .expect("failed to execute mkdir");
///
/// match status.code() {
/// Some(code) => println!("Exited with status code: {}", code),
/// None => println!("Process terminated by signal")
/// }
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn code(&self) -> Option<i32> {
self.0.code()
}
}
impl AsInner<imp::ExitStatus> for ExitStatus {
fn as_inner(&self) -> &imp::ExitStatus { &self.0 }
}
impl FromInner<imp::ExitStatus> for ExitStatus {
fn from_inner(s: imp::ExitStatus) -> ExitStatus {
ExitStatus(s)
}
}
#[stable(feature = "process", since = "1.0.0")]
impl fmt::Display for ExitStatus {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.0.fmt(f)
}
}
/// This type represents the status code a process can return to its
/// parent under normal termination.
///
/// Numeric values used in this type don't have portable meanings, and
/// different platforms may mask different amounts of them.
///
/// For the platform's canonical successful and unsuccessful codes, see
/// the [`SUCCESS`] and [`FAILURE`] associated items.
///
/// [`SUCCESS`]: #associatedconstant.SUCCESS
/// [`FAILURE`]: #associatedconstant.FAILURE
///
/// **Warning**: While various forms of this were discussed in [RFC #1937],
/// it was ultimately cut from that RFC, and thus this type is more subject
/// to change even than the usual unstable item churn.
///
/// [RFC #1937]: https://github.com/rust-lang/rfcs/pull/1937
#[derive(Clone, Copy, Debug)]
#[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
pub struct ExitCode(imp::ExitCode);
#[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
impl ExitCode {
/// The canonical ExitCode for successful termination on this platform.
///
/// Note that a `()`-returning `main` implicitly results in a successful
/// termination, so there's no need to return this from `main` unless
/// you're also returning other possible codes.
#[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
pub const SUCCESS: ExitCode = ExitCode(imp::ExitCode::SUCCESS);
/// The canonical ExitCode for unsuccessful termination on this platform.
///
/// If you're only returning this and `SUCCESS` from `main`, consider
/// instead returning `Err(_)` and `Ok(())` respectively, which will
/// return the same codes (but will also `eprintln!` the error).
#[unstable(feature = "process_exitcode_placeholder", issue = "48711")]
pub const FAILURE: ExitCode = ExitCode(imp::ExitCode::FAILURE);
}
impl Child {
/// Forces the child process to exit. If the child has already exited, an [`InvalidInput`]
/// error is returned.
///
/// The mapping to [`ErrorKind`]s is not part of the compatibility contract of the function,
/// especially the [`Other`] kind might change to more specific kinds in the future.
///
/// This is equivalent to sending a SIGKILL on Unix platforms.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// let mut command = Command::new("yes");
/// if let Ok(mut child) = command.spawn() {
/// child.kill().expect("command wasn't running");
/// } else {
/// println!("yes command didn't start");
/// }
/// ```
///
/// [`ErrorKind`]: ../io/enum.ErrorKind.html
/// [`InvalidInput`]: ../io/enum.ErrorKind.html#variant.InvalidInput
/// [`Other`]: ../io/enum.ErrorKind.html#variant.Other
#[stable(feature = "process", since = "1.0.0")]
pub fn kill(&mut self) -> io::Result<()> {
self.handle.kill()
}
/// Returns the OS-assigned process identifier associated with this child.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// let mut command = Command::new("ls");
/// if let Ok(child) = command.spawn() {
/// println!("Child's id is {}", child.id());
/// } else {
/// println!("ls command didn't start");
/// }
/// ```
#[stable(feature = "process_id", since = "1.3.0")]
pub fn id(&self) -> u32 {
self.handle.id()
}
/// Waits for the child to exit completely, returning the status that it
/// exited with. This function will continue to have the same return value
/// after it has been called at least once.
///
/// The stdin handle to the child process, if any, will be closed
/// before waiting. This helps avoid deadlock: it ensures that the
/// child does not block waiting for input from the parent, while
/// the parent waits for the child to exit.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// let mut command = Command::new("ls");
/// if let Ok(mut child) = command.spawn() {
/// child.wait().expect("command wasn't running");
/// println!("Child has finished its execution!");
/// } else {
/// println!("ls command didn't start");
/// }
/// ```
#[stable(feature = "process", since = "1.0.0")]
pub fn wait(&mut self) -> io::Result<ExitStatus> {
drop(self.stdin.take());
self.handle.wait().map(ExitStatus)
}
/// Attempts to collect the exit status of the child if it has already
/// exited.
///
/// This function will not block the calling thread and will only advisorily
/// check to see if the child process has exited or not. If the child has
/// exited then on Unix the process id is reaped. This function is
/// guaranteed to repeatedly return a successful exit status so long as the
/// child has already exited.
///
/// If the child has exited, then `Ok(Some(status))` is returned. If the
/// exit status is not available at this time then `Ok(None)` is returned.
/// If an error occurs, then that error is returned.
///
/// Note that unlike `wait`, this function will not attempt to drop stdin.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process::Command;
///
/// let mut child = Command::new("ls").spawn().unwrap();
///
/// match child.try_wait() {
/// Ok(Some(status)) => println!("exited with: {}", status),
/// Ok(None) => {
/// println!("status not ready yet, let's really wait");
/// let res = child.wait();
/// println!("result: {:?}", res);
/// }
/// Err(e) => println!("error attempting to wait: {}", e),
/// }
/// ```
#[stable(feature = "process_try_wait", since = "1.18.0")]
pub fn try_wait(&mut self) -> io::Result<Option<ExitStatus>> {
Ok(self.handle.try_wait()?.map(ExitStatus))
}
/// Simultaneously waits for the child to exit and collect all remaining
/// output on the stdout/stderr handles, returning an `Output`
/// instance.
///
/// The stdin handle to the child process, if any, will be closed
/// before waiting. This helps avoid deadlock: it ensures that the
/// child does not block waiting for input from the parent, while
/// the parent waits for the child to exit.
///
/// By default, stdin, stdout and stderr are inherited from the parent.
/// In order to capture the output into this `Result<Output>` it is
/// necessary to create new pipes between parent and child. Use
/// `stdout(Stdio::piped())` or `stderr(Stdio::piped())`, respectively.
///
/// # Examples
///
/// ```should_panic
/// use std::process::{Command, Stdio};
///
/// let child = Command::new("/bin/cat")
/// .arg("file.txt")
/// .stdout(Stdio::piped())
/// .spawn()
/// .expect("failed to execute child");
///
/// let output = child
/// .wait_with_output()
/// .expect("failed to wait on child");
///
/// assert!(output.status.success());
/// ```
///
#[stable(feature = "process", since = "1.0.0")]
pub fn wait_with_output(mut self) -> io::Result<Output> {
drop(self.stdin.take());
let (mut stdout, mut stderr) = (Vec::new(), Vec::new());
match (self.stdout.take(), self.stderr.take()) {
(None, None) => {}
(Some(mut out), None) => {
let res = out.read_to_end(&mut stdout);
res.unwrap();
}
(None, Some(mut err)) => {
let res = err.read_to_end(&mut stderr);
res.unwrap();
}
(Some(out), Some(err)) => {
let res = read2(out.inner, &mut stdout, err.inner, &mut stderr);
res.unwrap();
}
}
let status = self.wait()?;
Ok(Output {
status,
stdout,
stderr,
})
}
}
/// Terminates the current process with the specified exit code.
///
/// This function will never return and will immediately terminate the current
/// process. The exit code is passed through to the underlying OS and will be
/// available for consumption by another process.
///
/// Note that because this function never returns, and that it terminates the
/// process, no destructors on the current stack or any other thread's stack
/// will be run. If a clean shutdown is needed it is recommended to only call
/// this function at a known point where there are no more destructors left
/// to run.
///
/// ## Platform-specific behavior
///
/// **Unix**: On Unix-like platforms, it is unlikely that all 32 bits of `exit`
/// will be visible to a parent process inspecting the exit code. On most
/// Unix-like platforms, only the eight least-significant bits are considered.
///
/// # Examples
///
/// Due to this function’s behavior regarding destructors, a conventional way
/// to use the function is to extract the actual computation to another
/// function and compute the exit code from its return value:
///
/// ```
/// fn run_app() -> Result<(), ()> {
/// // Application logic here
/// Ok(())
/// }
///
/// fn main() {
/// ::std::process::exit(match run_app() {
/// Ok(_) => 0,
/// Err(err) => {
/// eprintln!("error: {:?}", err);
/// 1
/// }
/// });
/// }
/// ```
///
/// Due to [platform-specific behavior], the exit code for this example will be
/// `0` on Linux, but `256` on Windows:
///
/// ```no_run
/// use std::process;
///
/// process::exit(0x0100);
/// ```
///
/// [platform-specific behavior]: #platform-specific-behavior
#[stable(feature = "rust1", since = "1.0.0")]
pub fn exit(code: i32) -> ! {
::sys_common::cleanup();
::sys::os::exit(code)
}
/// Terminates the process in an abnormal fashion.
///
/// The function will never return and will immediately terminate the current
/// process in a platform specific "abnormal" manner.
///
/// Note that because this function never returns, and that it terminates the
/// process, no destructors on the current stack or any other thread's stack
/// will be run.
///
/// This is in contrast to the default behaviour of [`panic!`] which unwinds
/// the current thread's stack and calls all destructors.
/// When `panic="abort"` is set, either as an argument to `rustc` or in a
/// crate's Cargo.toml, [`panic!`] and `abort` are similar. However,
/// [`panic!`] will still call the [panic hook] while `abort` will not.
///
/// If a clean shutdown is needed it is recommended to only call
/// this function at a known point where there are no more destructors left
/// to run.
///
/// # Examples
///
/// ```no_run
/// use std::process;
///
/// fn main() {
/// println!("aborting");
///
/// process::abort();
///
/// // execution never gets here
/// }
/// ```
///
/// The `abort` function terminates the process, so the destructor will not
/// get run on the example below:
///
/// ```no_run
/// use std::process;
///
/// struct HasDrop;
///
/// impl Drop for HasDrop {
/// fn drop(&mut self) {
/// println!("This will never be printed!");
/// }
/// }
///
/// fn main() {
/// let _x = HasDrop;
/// process::abort();
/// // the destructor implemented for HasDrop will never get run
/// }
/// ```
///
/// [`panic!`]: ../../std/macro.panic.html
/// [panic hook]: ../../std/panic/fn.set_hook.html
#[stable(feature = "process_abort", since = "1.17.0")]
pub fn abort() -> ! {
unsafe { ::sys::abort_internal() };
}
/// Returns the OS-assigned process identifier associated with this process.
///
/// # Examples
///
/// Basic usage:
///
/// ```no_run
/// use std::process;
///
/// println!("My pid is {}", process::id());
/// ```
///
///
#[stable(feature = "getpid", since = "1.26.0")]
pub fn id() -> u32 {
::sys::os::getpid()
}
/// A trait for implementing arbitrary return types in the `main` function.
///
/// The c-main function only supports to return integers as return type.
/// So, every type implementing the `Termination` trait has to be converted
/// to an integer.
///
/// The default implementations are returning `libc::EXIT_SUCCESS` to indicate
/// a successful execution. In case of a failure, `libc::EXIT_FAILURE` is returned.
#[cfg_attr(not(test), lang = "termination")]
#[unstable(feature = "termination_trait_lib", issue = "43301")]
#[rustc_on_unimplemented(
message="`main` has invalid return type `{Self}`",
label="`main` can only return types that implement `{Termination}`")]
pub trait Termination {
/// Is called to get the representation of the value as status code.
/// This status code is returned to the operating system.
fn report(self) -> i32;
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for () {
#[inline]
fn report(self) -> i32 { ExitCode::SUCCESS.report() }
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl<E: fmt::Debug> Termination for Result<(), E> {
fn report(self) -> i32 {
match self {
Ok(()) => ().report(),
Err(err) => Err::<!, _>(err).report(),
}
}
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for ! {
fn report(self) -> i32 { self }
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl<E: fmt::Debug> Termination for Result<!, E> {
fn report(self) -> i32 {
let Err(err) = self;
eprintln!("Error: {:?}", err);
ExitCode::FAILURE.report()
}
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for ExitCode {
#[inline]
fn report(self) -> i32 {
self.0.as_i32()
}
}
#[cfg(all(test, not(any(target_os = "cloudabi", target_os = "emscripten"))))]
mod tests {
use io::prelude::*;
use io::ErrorKind;
use str;
use super::{Command, Output, Stdio};
// FIXME(#10380) these tests should not all be ignored on android.
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn smoke() {
let p = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "exit 0"]).spawn()
} else {
Command::new("true").spawn()
};
assert!(p.is_ok());
let mut p = p.unwrap();
assert!(p.wait().unwrap().success());
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn smoke_failure() {
match Command::new("if-this-is-a-binary-then-the-world-has-ended").spawn() {
Ok(..) => panic!(),
Err(..) => {}
}
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn exit_reported_right() {
let p = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "exit 1"]).spawn()
} else {
Command::new("false").spawn()
};
assert!(p.is_ok());
let mut p = p.unwrap();
assert!(p.wait().unwrap().code() == Some(1));
drop(p.wait());
}
#[test]
#[cfg(unix)]
#[cfg_attr(target_os = "android", ignore)]
fn signal_reported_right() {
use os::unix::process::ExitStatusExt;
let mut p = Command::new("/bin/sh")
.arg("-c").arg("read a")
.stdin(Stdio::piped())
.spawn().unwrap();
p.kill().unwrap();
match p.wait().unwrap().signal() {
Some(9) => {},
result => panic!("not terminated by signal 9 (instead, {:?})",
result),
}
}
pub fn run_output(mut cmd: Command) -> String {
let p = cmd.spawn();
assert!(p.is_ok());
let mut p = p.unwrap();
assert!(p.stdout.is_some());
let mut ret = String::new();
p.stdout.as_mut().unwrap().read_to_string(&mut ret).unwrap();
assert!(p.wait().unwrap().success());
return ret;
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn stdout_works() {
if cfg!(target_os = "windows") {
let mut cmd = Command::new("cmd");
cmd.args(&["/C", "echo foobar"]).stdout(Stdio::piped());
assert_eq!(run_output(cmd), "foobar\r\n");
} else {
let mut cmd = Command::new("echo");
cmd.arg("foobar").stdout(Stdio::piped());
assert_eq!(run_output(cmd), "foobar\n");
}
}
#[test]
#[cfg_attr(any(windows, target_os = "android"), ignore)]
fn set_current_dir_works() {
let mut cmd = Command::new("/bin/sh");
cmd.arg("-c").arg("pwd")
.current_dir("/")
.stdout(Stdio::piped());
assert_eq!(run_output(cmd), "/\n");
}
#[test]
#[cfg_attr(any(windows, target_os = "android"), ignore)]
fn stdin_works() {
let mut p = Command::new("/bin/sh")
.arg("-c").arg("read line; echo $line")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn().unwrap();
p.stdin.as_mut().unwrap().write("foobar".as_bytes()).unwrap();
drop(p.stdin.take());
let mut out = String::new();
p.stdout.as_mut().unwrap().read_to_string(&mut out).unwrap();
assert!(p.wait().unwrap().success());
assert_eq!(out, "foobar\n");
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
#[cfg(unix)]
fn uid_works() {
use os::unix::prelude::*;
use libc;
let mut p = Command::new("/bin/sh")
.arg("-c").arg("true")
.uid(unsafe { libc::getuid() })
.gid(unsafe { libc::getgid() })
.spawn().unwrap();
assert!(p.wait().unwrap().success());
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
#[cfg(unix)]
fn uid_to_root_fails() {
use os::unix::prelude::*;
use libc;
// if we're already root, this isn't a valid test. Most of the bots run
// as non-root though (android is an exception).
if unsafe { libc::getuid() == 0 } { return }
assert!(Command::new("/bin/ls").uid(0).gid(0).spawn().is_err());
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn test_process_status() {
let mut status = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "exit 1"]).status().unwrap()
} else {
Command::new("false").status().unwrap()
};
assert!(status.code() == Some(1));
status = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "exit 0"]).status().unwrap()
} else {
Command::new("true").status().unwrap()
};
assert!(status.success());
}
#[test]
fn test_process_output_fail_to_start() {
match Command::new("/no-binary-by-this-name-should-exist").output() {
Err(e) => assert_eq!(e.kind(), ErrorKind::NotFound),
Ok(..) => panic!()
}
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn test_process_output_output() {
let Output {status, stdout, stderr}
= if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "echo hello"]).output().unwrap()
} else {
Command::new("echo").arg("hello").output().unwrap()
};
let output_str = str::from_utf8(&stdout).unwrap();
assert!(status.success());
assert_eq!(output_str.trim().to_string(), "hello");
assert_eq!(stderr, Vec::new());
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn test_process_output_error() {
let Output {status, stdout, stderr}
= if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "mkdir ."]).output().unwrap()
} else {
Command::new("mkdir").arg("./").output().unwrap()
};
assert!(status.code() == Some(1));
assert_eq!(stdout, Vec::new());
assert!(!stderr.is_empty());
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn test_finish_once() {
let mut prog = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap()
} else {
Command::new("false").spawn().unwrap()
};
assert!(prog.wait().unwrap().code() == Some(1));
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn test_finish_twice() {
let mut prog = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "exit 1"]).spawn().unwrap()
} else {
Command::new("false").spawn().unwrap()
};
assert!(prog.wait().unwrap().code() == Some(1));
assert!(prog.wait().unwrap().code() == Some(1));
}
#[test]
#[cfg_attr(target_os = "android", ignore)]
fn test_wait_with_output_once() {
let prog = if cfg!(target_os = "windows") {
Command::new("cmd").args(&["/C", "echo hello"]).stdout(Stdio::piped()).spawn().unwrap()
} else {
Command::new("echo").arg("hello").stdout(Stdio::piped()).spawn().unwrap()
};
let Output {status, stdout, stderr} = prog.wait_with_output().unwrap();
let output_str = str::from_utf8(&stdout).unwrap();
assert!(status.success());
assert_eq!(output_str.trim().to_string(), "hello");
assert_eq!(stderr, Vec::new());
}
#[cfg(all(unix, not(target_os="android")))]
pub fn env_cmd() -> Command {
Command::new("env")
}
#[cfg(target_os="android")]
pub fn env_cmd() -> Command {
let mut cmd = Command::new("/system/bin/sh");
cmd.arg("-c").arg("set");
cmd
}
#[cfg(windows)]
pub fn env_cmd() -> Command {
let mut cmd = Command::new("cmd");
cmd.arg("/c").arg("set");
cmd
}
#[test]
fn test_inherit_env() {
use env;
let result = env_cmd().output().unwrap();
let output = String::from_utf8(result.stdout).unwrap();
for (ref k, ref v) in env::vars() {
// Don't check android RANDOM variable which seems to change
// whenever the shell runs, and our `env_cmd` is indeed running a
// shell which means it'll get a different RANDOM than we probably
// have.
//
// Also skip env vars with `-` in the name on android because, well,
// I'm not sure. It appears though that the `set` command above does
// not print env vars with `-` in the name, so we just skip them
// here as we won't find them in the output. Note that most env vars
// use `_` instead of `-`, but our build system sets a few env vars
// with `-` in the name.
if cfg!(target_os = "android") &&
(*k == "RANDOM" || k.contains("-")) {
continue
}
// Windows has hidden environment variables whose names start with
// equals signs (`=`). Those do not show up in the output of the
// `set` command.
assert!((cfg!(windows) && k.starts_with("=")) ||
k.starts_with("DYLD") ||
output.contains(&format!("{}={}", *k, *v)) ||
output.contains(&format!("{}='{}'", *k, *v)),
"output doesn't contain `{}={}`\n{}",
k, v, output);
}
}
#[test]
fn test_override_env() {
use env;
// In some build environments (such as chrooted Nix builds), `env` can
// only be found in the explicitly-provided PATH env variable, not in
// default places such as /bin or /usr/bin. So we need to pass through
// PATH to our sub-process.
let mut cmd = env_cmd();
cmd.env_clear().env("RUN_TEST_NEW_ENV", "123");
if let Some(p) = env::var_os("PATH") {
cmd.env("PATH", &p);
}
let result = cmd.output().unwrap();
let output = String::from_utf8_lossy(&result.stdout).to_string();
assert!(output.contains("RUN_TEST_NEW_ENV=123"),
"didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output);
}
#[test]
fn test_add_to_env() {
let result = env_cmd().env("RUN_TEST_NEW_ENV", "123").output().unwrap();
let output = String::from_utf8_lossy(&result.stdout).to_string();
assert!(output.contains("RUN_TEST_NEW_ENV=123"),
"didn't find RUN_TEST_NEW_ENV inside of:\n\n{}", output);
}
#[test]
fn test_capture_env_at_spawn() {
use env;
let mut cmd = env_cmd();
cmd.env("RUN_TEST_NEW_ENV1", "123");
// This variable will not be present if the environment has already
// been captured above.
env::set_var("RUN_TEST_NEW_ENV2", "456");
let result = cmd.output().unwrap();
env::remove_var("RUN_TEST_NEW_ENV2");
let output = String::from_utf8_lossy(&result.stdout).to_string();
assert!(output.contains("RUN_TEST_NEW_ENV1=123"),
"didn't find RUN_TEST_NEW_ENV1 inside of:\n\n{}", output);
assert!(output.contains("RUN_TEST_NEW_ENV2=456"),
"didn't find RUN_TEST_NEW_ENV2 inside of:\n\n{}", output);
}
// Regression tests for #30858.
#[test]
fn test_interior_nul_in_progname_is_error() {
match Command::new("has-some-\0\0s-inside").spawn() {
Err(e) => assert_eq!(e.kind(), ErrorKind::InvalidInput),
Ok(_) => panic!(),
}
}
#[test]
fn test_interior_nul_in_arg_is_error() {
match Command::new("echo").arg("has-some-\0\0s-inside").spawn() {
Err(e) => assert_eq!(e.kind(), ErrorKind::InvalidInput),
Ok(_) => panic!(),
}
}
#[test]
fn test_interior_nul_in_args_is_error() {
match Command::new("echo").args(&["has-some-\0\0s-inside"]).spawn() {
Err(e) => assert_eq!(e.kind(), ErrorKind::InvalidInput),
Ok(_) => panic!(),
}
}
#[test]
fn test_interior_nul_in_current_dir_is_error() {
match Command::new("echo").current_dir("has-some-\0\0s-inside").spawn() {
Err(e) => assert_eq!(e.kind(), ErrorKind::InvalidInput),
Ok(_) => panic!(),
}
}
// Regression tests for #30862.
#[test]
fn test_interior_nul_in_env_key_is_error() {
match env_cmd().env("has-some-\0\0s-inside", "value").spawn() {
Err(e) => assert_eq!(e.kind(), ErrorKind::InvalidInput),
Ok(_) => panic!(),
}
}
#[test]
fn test_interior_nul_in_env_value_is_error() {
match env_cmd().env("key", "has-some-\0\0s-inside").spawn() {
Err(e) => assert_eq!(e.kind(), ErrorKind::InvalidInput),
Ok(_) => panic!(),
}
}
/// Test that process creation flags work by debugging a process.
/// Other creation flags make it hard or impossible to detect
/// behavioral changes in the process.
#[test]
#[cfg(windows)]
fn test_creation_flags() {
use os::windows::process::CommandExt;
use sys::c::{BOOL, DWORD, INFINITE};
#[repr(C, packed)]
struct DEBUG_EVENT {
pub event_code: DWORD,
pub process_id: DWORD,
pub thread_id: DWORD,
// This is a union in the real struct, but we don't
// need this data for the purposes of this test.
pub _junk: [u8; 164],
}
extern "system" {
fn WaitForDebugEvent(lpDebugEvent: *mut DEBUG_EVENT, dwMilliseconds: DWORD) -> BOOL;
fn ContinueDebugEvent(dwProcessId: DWORD, dwThreadId: DWORD,
dwContinueStatus: DWORD) -> BOOL;
}
const DEBUG_PROCESS: DWORD = 1;
const EXIT_PROCESS_DEBUG_EVENT: DWORD = 5;
const DBG_EXCEPTION_NOT_HANDLED: DWORD = 0x80010001;
let mut child = Command::new("cmd")
.creation_flags(DEBUG_PROCESS)
.stdin(Stdio::piped()).spawn().unwrap();
child.stdin.take().unwrap().write_all(b"exit\r\n").unwrap();
let mut events = 0;
let mut event = DEBUG_EVENT {
event_code: 0,
process_id: 0,
thread_id: 0,
_junk: [0; 164],
};
loop {
if unsafe { WaitForDebugEvent(&mut event as *mut DEBUG_EVENT, INFINITE) } == 0 {
panic!("WaitForDebugEvent failed!");
}
events += 1;
if event.event_code == EXIT_PROCESS_DEBUG_EVENT {
break;
}
if unsafe { ContinueDebugEvent(event.process_id,
event.thread_id,
DBG_EXCEPTION_NOT_HANDLED) } == 0 {
panic!("ContinueDebugEvent failed!");
}
}
assert!(events > 0);
}
#[test]
fn test_command_implements_send() {
fn take_send_type<T: Send>(_: T) {}
take_send_type(Command::new(""))
}
}
| 31.95046 | 99 | 0.554692 |
288bad51d8e25ef9cd5a674a1b3de467dc818a9b | 445 | use crate::ics05_port::capabilities::Capability;
use crate::ics24_host::identifier::PortId;
// A context supplying all the necessary read-only dependencies for processing any information regarding a port.
pub trait PortReader {
fn lookup_module_by_port(&self, port_id: &PortId) -> Option<Capability>;
fn autenthenticate(&self, key: &Capability, port_id: &PortId) -> bool;
}
// Result<Capability, Error>//return Ok(Capability::new());
| 40.454545 | 112 | 0.750562 |
db7753b8da773ae8fc7fa4aa5b8cf17725d8cdf5 | 2,495 | cfg_any_client! {
use crate::{constants::Type, Result};
}
/// OPT pseudo-record.
///
/// - [RFC 2671](https://www.rfc-editor.org/rfc/rfc2671.html)
/// - [RFC 6891](https://www.rfc-editor.org/rfc/rfc6891.html)
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Default)]
pub struct Opt {
udp_payload_size: u16,
rcode_extension: u8,
version: u8,
flags: u16,
}
impl Opt {
cfg_any_client! {
#[allow(dead_code)]
#[inline]
pub(crate) fn new(version: u8, udp_payload_size: u16) -> Opt {
Opt {
udp_payload_size,
version,
..Default::default()
}
}
fn ttl(&self) -> u32 {
(self.rcode_extension as u32) << 24 | (self.version as u32) << 16 | self.flags as u32
}
}
#[inline]
pub(crate) fn from_msg(rclass: u16, ttl: u32) -> Opt {
Opt {
udp_payload_size: rclass,
rcode_extension: ((ttl & 0xFF000000u32) >> 24) as u8,
version: ((ttl & 0x00FF0000u32) >> 16) as u8,
flags: (ttl & 0x0000FFFF) as u16,
}
}
/// Returns the UDP payload size
#[inline]
pub fn udp_payload_size(&self) -> u16 {
self.udp_payload_size
}
/// Returns the EDNS `RCODE` extension value (upper 8 bits).
///
/// See [`RCodeValue::extended`] for a way to combine a base `RCODE` value
/// from the message header and this extension to a final extended `RCODE` value.
///
/// [RFC 6891 section 6.1.3](https://www.rfc-editor.org/rfc/rfc6891.html#section-6.1.3)
///
/// [`RCodeValue::extended`]: crate::message::RCodeValue::extended
#[inline]
pub fn rcode_extension(&self) -> u8 {
self.rcode_extension
}
/// Returns the `OPT` version.
#[inline]
pub fn version(&self) -> u8 {
self.version
}
/// Returns the `DNSSEC OK` bit.
///
/// [RFC3225](https://www.rfc-editor.org/rfc/rfc3225.html)
#[inline]
pub fn dnssec_ok(&self) -> bool {
(self.flags & 0b1000_0000_0000_0000) != 0
}
}
cfg_any_client! {
impl crate::bytes::WCursor<'_> {
pub(crate) fn write_opt(&mut self, opt: &Opt) -> Result<()> {
self.u8(0)?; // DNAME
self.u16_be(Type::Opt as u16)?; // TYPE
self.u16_be(opt.udp_payload_size)?; // CLASS
self.u32_be(opt.ttl())?; // TTL
self.u16_be(0)?; // RDLEN
Ok(())
}
}
}
| 27.722222 | 97 | 0.544689 |
4af132389e2e0e3e6da3172411eeba45ccc3b532 | 2,644 | use std::{
pin::Pin,
task::{Context, Poll},
};
use bytes::Bytes;
use futures_core::{ready, Stream};
use pin_project_lite::pin_project;
use crate::error::Error;
use super::{BodySize, MessageBody};
pin_project! {
/// Known sized streaming response wrapper.
///
/// This body implementation should be used if total size of stream is known. Data get sent as is
/// without using transfer encoding.
pub struct SizedStream<S> {
size: u64,
#[pin]
stream: S,
}
}
impl<S> SizedStream<S>
where
S: Stream<Item = Result<Bytes, Error>>,
{
pub fn new(size: u64, stream: S) -> Self {
SizedStream { size, stream }
}
}
impl<S> MessageBody for SizedStream<S>
where
S: Stream<Item = Result<Bytes, Error>>,
{
type Error = Error;
fn size(&self) -> BodySize {
BodySize::Sized(self.size as u64)
}
/// Attempts to pull out the next value of the underlying [`Stream`].
///
/// Empty values are skipped to prevent [`SizedStream`]'s transmission being
/// ended on a zero-length chunk, but rather proceed until the underlying
/// [`Stream`] ends.
fn poll_next(
mut self: Pin<&mut Self>,
cx: &mut Context<'_>,
) -> Poll<Option<Result<Bytes, Self::Error>>> {
loop {
let stream = self.as_mut().project().stream;
let chunk = match ready!(stream.poll_next(cx)) {
Some(Ok(ref bytes)) if bytes.is_empty() => continue,
val => val,
};
return Poll::Ready(chunk);
}
}
}
#[cfg(test)]
mod tests {
use actix_rt::pin;
use actix_utils::future::poll_fn;
use futures_util::stream;
use super::*;
use crate::body::to_bytes;
#[actix_rt::test]
async fn skips_empty_chunks() {
let body = SizedStream::new(
2,
stream::iter(["1", "", "2"].iter().map(|&v| Ok(Bytes::from(v)))),
);
pin!(body);
assert_eq!(
poll_fn(|cx| body.as_mut().poll_next(cx))
.await
.unwrap()
.ok(),
Some(Bytes::from("1")),
);
assert_eq!(
poll_fn(|cx| body.as_mut().poll_next(cx))
.await
.unwrap()
.ok(),
Some(Bytes::from("2")),
);
}
#[actix_rt::test]
async fn read_to_bytes() {
let body = SizedStream::new(
2,
stream::iter(["1", "", "2"].iter().map(|&v| Ok(Bytes::from(v)))),
);
assert_eq!(to_bytes(body).await.ok(), Some(Bytes::from("12")));
}
}
| 23.607143 | 101 | 0.52534 |
ddf23a8dd28a67b977b7060003ccd4124c6fb861 | 1,379 | use helgoboss_midi::{ShortMessage, StructuredShortMessage};
use notation_model::prelude::*;
#[derive(Clone, Debug)]
pub struct MidiMessage {
pub pass_mode: EntryPassMode,
pub pos: BarPosition,
pub duration: Units,
pub delay: bool,
pub midi: StructuredShortMessage,
}
impl MidiMessage {
pub fn new(pass_mode: EntryPassMode, pos: BarPosition, duration: Units, delay: bool, midi: StructuredShortMessage) -> Self {
Self {
pass_mode,
pos,
duration,
delay,
midi,
}
}
pub fn of_entry(entry: &LaneEntry, delay: bool, midi: StructuredShortMessage) -> Self {
Self {
pass_mode: entry.pass_mode(),
pos: entry.bar_position(),
duration: entry.tied_units(),
delay,
midi,
}
}
pub fn bar_ordinal(&self) -> usize {
self.pos.bar_ordinal
}
pub fn effect_position(&self) -> BarPosition {
if self.delay {
self.pos.with_delay(self.duration)
} else {
self.pos
}
}
pub fn effect_units(&self) -> Units {
self.effect_position().into()
}
pub fn to_midi(&self) -> [u8; 3] {
[
self.midi.status_byte(),
self.midi.data_byte_1().into(),
self.midi.data_byte_2().into(),
]
}
}
| 26.519231 | 128 | 0.551849 |
71987d73e73ddb286ff4724a6c07b02ff3dcbbd0 | 3,948 |
use serde::{
Deserialize,
Serialize
};
use crate::{
error
};
use sciimg::cahvor::Cahvor;
use std::fs::File;
use std::io::Read;
pub trait ImageMetadata {
fn get_link(&self) -> String;
fn get_credit(&self) -> String;
fn get_sol(&self) -> u32;
fn get_imageid(&self) -> String;
fn get_caption(&self) -> String;
fn get_date_taken_utc(&self) -> String;
fn get_date_taken_mars(&self) -> Option<String>;
fn get_subframe_rect(&self) -> Option<Vec<f64>>;
// fn get_dimension(&self) -> Option<&[f64]>;
fn get_scale_factor(&self) -> u32;
fn get_instrument(&self) -> String;
fn get_filter_name(&self) -> Option<String>;
fn get_camera_vector(&self) -> Option<Vec<f64>>;
fn get_camera_model_component_list(&self) -> Option<Cahvor>;
fn get_camera_position(&self) -> Option<Vec<f64>>;
fn get_camera_model_type(&self) -> Option<String>;
fn get_site(&self) -> Option<u32>;
fn get_drive(&self) -> Option<u32>;
fn get_mast_az(&self) -> Option<f64>;
fn get_mast_el(&self) -> Option<f64>;
fn get_sclk(&self) -> Option<f64>;
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct Metadata {
pub link:String,
pub credit:String,
pub sol:u32,
pub imageid:String,
pub caption:String,
pub date_taken_utc:String,
pub date_taken_mars:Option<String>,
pub subframe_rect:Option<Vec<f64>>,
pub scale_factor:u32,
pub instrument:String,
pub filter_name: Option<String>,
pub camera_vector:Option<Vec<f64>>,
pub mast_az: Option<f64>,
pub mast_el: Option<f64>,
pub sclk: Option<f64>,
#[serde(with = "crate::jsonfetch::tuple_format")]
pub camera_position: Option<Vec<f64>>,
pub camera_model_type: Option<String>,
pub site:Option<u32>,
pub drive:Option<u32>,
#[serde(with = "crate::jsonfetch::cahvor_format")]
pub camera_model_component_list: Option<Cahvor>,
#[serde(default = "default_step_status")]
pub decompand:bool,
#[serde(default = "default_step_status")]
pub debayer:bool,
#[serde(default = "default_step_status")]
pub flatfield:bool,
#[serde(default = "default_step_status")]
pub radiometric:bool,
#[serde(default = "default_step_status")]
pub inpaint:bool,
#[serde(default = "default_step_status")]
pub cropped:bool
}
fn default_step_status() -> bool {
false
}
pub fn convert_to_std_metadata<T:ImageMetadata>(im:&T) -> Metadata {
Metadata{
link:im.get_link(),
credit:im.get_credit(),
sol:im.get_sol(),
imageid:im.get_imageid(),
caption:im.get_caption(),
date_taken_utc:im.get_date_taken_utc(),
date_taken_mars:im.get_date_taken_mars(),
subframe_rect:im.get_subframe_rect(),
scale_factor:im.get_scale_factor(),
instrument:im.get_instrument(),
filter_name:im.get_filter_name(),
decompand:default_step_status(),
debayer:default_step_status(),
flatfield:default_step_status(),
radiometric:default_step_status(),
inpaint:default_step_status(),
cropped:default_step_status(),
camera_vector:im.get_camera_vector(),
camera_model_component_list:im.get_camera_model_component_list(),
camera_position:im.get_camera_position(),
camera_model_type:im.get_camera_model_type(),
site:im.get_site(),
drive:im.get_drive(),
mast_el:im.get_mast_el(),
mast_az:im.get_mast_az(),
sclk:im.get_sclk(),
}
}
pub fn load_image_metadata(json_path:&String) -> error::Result<Metadata> {
let mut file = match File::open(&json_path) {
Err(why) => panic!("couldn't open {}", why),
Ok(file) => file,
};
let mut buf : Vec<u8> = Vec::default();
file.read_to_end(&mut buf).unwrap();
let json = String::from_utf8(buf).unwrap();
let metadata = serde_json::from_str(&json).unwrap();
Ok(metadata)
} | 29.244444 | 74 | 0.651976 |
6131c1a85e3536c99376dfb7bdd22e6858247803 | 23,900 | #[doc = "Register `INTENCLR` reader"]
pub struct R(crate::R<INTENCLR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<INTENCLR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<INTENCLR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<INTENCLR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `INTENCLR` writer"]
pub struct W(crate::W<INTENCLR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<INTENCLR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<INTENCLR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<INTENCLR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Write '1' to Disable interrupt for STOPPED event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STOPPED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<STOPPED_A> for bool {
#[inline(always)]
fn from(variant: STOPPED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `STOPPED` reader - Write '1' to Disable interrupt for STOPPED event"]
pub struct STOPPED_R(crate::FieldReader<bool, STOPPED_A>);
impl STOPPED_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
STOPPED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> STOPPED_A {
match self.bits {
false => STOPPED_A::DISABLED,
true => STOPPED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == STOPPED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == STOPPED_A::ENABLED
}
}
impl core::ops::Deref for STOPPED_R {
type Target = crate::FieldReader<bool, STOPPED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for STOPPED event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STOPPED_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<STOPPED_AW> for bool {
#[inline(always)]
fn from(variant: STOPPED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `STOPPED` writer - Write '1' to Disable interrupt for STOPPED event"]
pub struct STOPPED_W<'a> {
w: &'a mut W,
}
impl<'a> STOPPED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: STOPPED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(STOPPED_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Write '1' to Disable interrupt for SEQSTARTED\\[0\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQSTARTED0_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<SEQSTARTED0_A> for bool {
#[inline(always)]
fn from(variant: SEQSTARTED0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQSTARTED0` reader - Write '1' to Disable interrupt for SEQSTARTED\\[0\\]
event"]
pub struct SEQSTARTED0_R(crate::FieldReader<bool, SEQSTARTED0_A>);
impl SEQSTARTED0_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SEQSTARTED0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SEQSTARTED0_A {
match self.bits {
false => SEQSTARTED0_A::DISABLED,
true => SEQSTARTED0_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == SEQSTARTED0_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == SEQSTARTED0_A::ENABLED
}
}
impl core::ops::Deref for SEQSTARTED0_R {
type Target = crate::FieldReader<bool, SEQSTARTED0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for SEQSTARTED\\[0\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQSTARTED0_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<SEQSTARTED0_AW> for bool {
#[inline(always)]
fn from(variant: SEQSTARTED0_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQSTARTED0` writer - Write '1' to Disable interrupt for SEQSTARTED\\[0\\]
event"]
pub struct SEQSTARTED0_W<'a> {
w: &'a mut W,
}
impl<'a> SEQSTARTED0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SEQSTARTED0_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(SEQSTARTED0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
#[doc = "Write '1' to Disable interrupt for SEQSTARTED\\[1\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQSTARTED1_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<SEQSTARTED1_A> for bool {
#[inline(always)]
fn from(variant: SEQSTARTED1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQSTARTED1` reader - Write '1' to Disable interrupt for SEQSTARTED\\[1\\]
event"]
pub struct SEQSTARTED1_R(crate::FieldReader<bool, SEQSTARTED1_A>);
impl SEQSTARTED1_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SEQSTARTED1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SEQSTARTED1_A {
match self.bits {
false => SEQSTARTED1_A::DISABLED,
true => SEQSTARTED1_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == SEQSTARTED1_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == SEQSTARTED1_A::ENABLED
}
}
impl core::ops::Deref for SEQSTARTED1_R {
type Target = crate::FieldReader<bool, SEQSTARTED1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for SEQSTARTED\\[1\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQSTARTED1_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<SEQSTARTED1_AW> for bool {
#[inline(always)]
fn from(variant: SEQSTARTED1_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQSTARTED1` writer - Write '1' to Disable interrupt for SEQSTARTED\\[1\\]
event"]
pub struct SEQSTARTED1_W<'a> {
w: &'a mut W,
}
impl<'a> SEQSTARTED1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SEQSTARTED1_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(SEQSTARTED1_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | ((value as u32 & 0x01) << 3);
self.w
}
}
#[doc = "Write '1' to Disable interrupt for SEQEND\\[0\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQEND0_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<SEQEND0_A> for bool {
#[inline(always)]
fn from(variant: SEQEND0_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQEND0` reader - Write '1' to Disable interrupt for SEQEND\\[0\\]
event"]
pub struct SEQEND0_R(crate::FieldReader<bool, SEQEND0_A>);
impl SEQEND0_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SEQEND0_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SEQEND0_A {
match self.bits {
false => SEQEND0_A::DISABLED,
true => SEQEND0_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == SEQEND0_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == SEQEND0_A::ENABLED
}
}
impl core::ops::Deref for SEQEND0_R {
type Target = crate::FieldReader<bool, SEQEND0_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for SEQEND\\[0\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQEND0_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<SEQEND0_AW> for bool {
#[inline(always)]
fn from(variant: SEQEND0_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQEND0` writer - Write '1' to Disable interrupt for SEQEND\\[0\\]
event"]
pub struct SEQEND0_W<'a> {
w: &'a mut W,
}
impl<'a> SEQEND0_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SEQEND0_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(SEQEND0_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4);
self.w
}
}
#[doc = "Write '1' to Disable interrupt for SEQEND\\[1\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQEND1_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<SEQEND1_A> for bool {
#[inline(always)]
fn from(variant: SEQEND1_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQEND1` reader - Write '1' to Disable interrupt for SEQEND\\[1\\]
event"]
pub struct SEQEND1_R(crate::FieldReader<bool, SEQEND1_A>);
impl SEQEND1_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
SEQEND1_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SEQEND1_A {
match self.bits {
false => SEQEND1_A::DISABLED,
true => SEQEND1_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == SEQEND1_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == SEQEND1_A::ENABLED
}
}
impl core::ops::Deref for SEQEND1_R {
type Target = crate::FieldReader<bool, SEQEND1_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for SEQEND\\[1\\]
event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SEQEND1_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<SEQEND1_AW> for bool {
#[inline(always)]
fn from(variant: SEQEND1_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `SEQEND1` writer - Write '1' to Disable interrupt for SEQEND\\[1\\]
event"]
pub struct SEQEND1_W<'a> {
w: &'a mut W,
}
impl<'a> SEQEND1_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SEQEND1_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(SEQEND1_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | ((value as u32 & 0x01) << 5);
self.w
}
}
#[doc = "Write '1' to Disable interrupt for PWMPERIODEND event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PWMPERIODEND_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<PWMPERIODEND_A> for bool {
#[inline(always)]
fn from(variant: PWMPERIODEND_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PWMPERIODEND` reader - Write '1' to Disable interrupt for PWMPERIODEND event"]
pub struct PWMPERIODEND_R(crate::FieldReader<bool, PWMPERIODEND_A>);
impl PWMPERIODEND_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
PWMPERIODEND_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PWMPERIODEND_A {
match self.bits {
false => PWMPERIODEND_A::DISABLED,
true => PWMPERIODEND_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == PWMPERIODEND_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == PWMPERIODEND_A::ENABLED
}
}
impl core::ops::Deref for PWMPERIODEND_R {
type Target = crate::FieldReader<bool, PWMPERIODEND_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for PWMPERIODEND event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PWMPERIODEND_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<PWMPERIODEND_AW> for bool {
#[inline(always)]
fn from(variant: PWMPERIODEND_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PWMPERIODEND` writer - Write '1' to Disable interrupt for PWMPERIODEND event"]
pub struct PWMPERIODEND_W<'a> {
w: &'a mut W,
}
impl<'a> PWMPERIODEND_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PWMPERIODEND_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(PWMPERIODEND_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | ((value as u32 & 0x01) << 6);
self.w
}
}
#[doc = "Write '1' to Disable interrupt for LOOPSDONE event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LOOPSDONE_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<LOOPSDONE_A> for bool {
#[inline(always)]
fn from(variant: LOOPSDONE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LOOPSDONE` reader - Write '1' to Disable interrupt for LOOPSDONE event"]
pub struct LOOPSDONE_R(crate::FieldReader<bool, LOOPSDONE_A>);
impl LOOPSDONE_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
LOOPSDONE_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> LOOPSDONE_A {
match self.bits {
false => LOOPSDONE_A::DISABLED,
true => LOOPSDONE_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == LOOPSDONE_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == LOOPSDONE_A::ENABLED
}
}
impl core::ops::Deref for LOOPSDONE_R {
type Target = crate::FieldReader<bool, LOOPSDONE_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to Disable interrupt for LOOPSDONE event\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LOOPSDONE_AW {
#[doc = "1: Disable"]
CLEAR = 1,
}
impl From<LOOPSDONE_AW> for bool {
#[inline(always)]
fn from(variant: LOOPSDONE_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `LOOPSDONE` writer - Write '1' to Disable interrupt for LOOPSDONE event"]
pub struct LOOPSDONE_W<'a> {
w: &'a mut W,
}
impl<'a> LOOPSDONE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: LOOPSDONE_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Disable"]
#[inline(always)]
pub fn clear(self) -> &'a mut W {
self.variant(LOOPSDONE_AW::CLEAR)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | ((value as u32 & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 1 - Write '1' to Disable interrupt for STOPPED event"]
#[inline(always)]
pub fn stopped(&self) -> STOPPED_R {
STOPPED_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Write '1' to Disable interrupt for SEQSTARTED\\[0\\]
event"]
#[inline(always)]
pub fn seqstarted0(&self) -> SEQSTARTED0_R {
SEQSTARTED0_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Write '1' to Disable interrupt for SEQSTARTED\\[1\\]
event"]
#[inline(always)]
pub fn seqstarted1(&self) -> SEQSTARTED1_R {
SEQSTARTED1_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Write '1' to Disable interrupt for SEQEND\\[0\\]
event"]
#[inline(always)]
pub fn seqend0(&self) -> SEQEND0_R {
SEQEND0_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Write '1' to Disable interrupt for SEQEND\\[1\\]
event"]
#[inline(always)]
pub fn seqend1(&self) -> SEQEND1_R {
SEQEND1_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Write '1' to Disable interrupt for PWMPERIODEND event"]
#[inline(always)]
pub fn pwmperiodend(&self) -> PWMPERIODEND_R {
PWMPERIODEND_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Write '1' to Disable interrupt for LOOPSDONE event"]
#[inline(always)]
pub fn loopsdone(&self) -> LOOPSDONE_R {
LOOPSDONE_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - Write '1' to Disable interrupt for STOPPED event"]
#[inline(always)]
pub fn stopped(&mut self) -> STOPPED_W {
STOPPED_W { w: self }
}
#[doc = "Bit 2 - Write '1' to Disable interrupt for SEQSTARTED\\[0\\]
event"]
#[inline(always)]
pub fn seqstarted0(&mut self) -> SEQSTARTED0_W {
SEQSTARTED0_W { w: self }
}
#[doc = "Bit 3 - Write '1' to Disable interrupt for SEQSTARTED\\[1\\]
event"]
#[inline(always)]
pub fn seqstarted1(&mut self) -> SEQSTARTED1_W {
SEQSTARTED1_W { w: self }
}
#[doc = "Bit 4 - Write '1' to Disable interrupt for SEQEND\\[0\\]
event"]
#[inline(always)]
pub fn seqend0(&mut self) -> SEQEND0_W {
SEQEND0_W { w: self }
}
#[doc = "Bit 5 - Write '1' to Disable interrupt for SEQEND\\[1\\]
event"]
#[inline(always)]
pub fn seqend1(&mut self) -> SEQEND1_W {
SEQEND1_W { w: self }
}
#[doc = "Bit 6 - Write '1' to Disable interrupt for PWMPERIODEND event"]
#[inline(always)]
pub fn pwmperiodend(&mut self) -> PWMPERIODEND_W {
PWMPERIODEND_W { w: self }
}
#[doc = "Bit 7 - Write '1' to Disable interrupt for LOOPSDONE event"]
#[inline(always)]
pub fn loopsdone(&mut self) -> LOOPSDONE_W {
LOOPSDONE_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Disable interrupt\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intenclr](index.html) module"]
pub struct INTENCLR_SPEC;
impl crate::RegisterSpec for INTENCLR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [intenclr::R](R) reader structure"]
impl crate::Readable for INTENCLR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [intenclr::W](W) writer structure"]
impl crate::Writable for INTENCLR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets INTENCLR to value 0"]
impl crate::Resettable for INTENCLR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.949875 | 406 | 0.583096 |
611676e67298877664389956c8b446da8cc6530f | 4,174 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::common_utils::common::macros::{fx_err_and_bail, with_line};
use anyhow::Error;
use fidl_fuchsia_hardware_power_statecontrol::{AdminMarker, AdminProxy, RebootReason};
use fuchsia_component as app;
use fuchsia_syslog::macros::{fx_log_err, fx_log_info};
/// Perform Fuchsia Device Manager fidl operations.
///
/// Note this object is shared among all threads created by server.
///
#[derive(Debug)]
pub struct HardwarePowerStatecontrolFacade {}
impl HardwarePowerStatecontrolFacade {
pub fn new() -> HardwarePowerStatecontrolFacade {
HardwarePowerStatecontrolFacade {}
}
fn get_admin_proxy(&self) -> Result<AdminProxy, Error> {
let tag = "HardwarePowerStatecontrolFacade";
match app::client::connect_to_protocol::<AdminMarker>() {
Ok(p) => Ok(p),
Err(err) => fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to create device admin proxy: {:?}", err)
),
}
}
/// Reboot the Fuchsia device
pub async fn suspend_reboot(&self) -> Result<(), Error> {
let tag = "HardwarePowerStatecontrolFacade::suspend_reboot";
fx_log_info!("Executing Suspend: REBOOT");
if let Err(err) = self.get_admin_proxy()?.reboot(RebootReason::UserRequest).await? {
fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to change power control state: {:?}", err)
)
}
Ok(())
}
/// Reboot the Fuchsia device into the bootloader
pub async fn suspend_reboot_bootloader(&self) -> Result<(), Error> {
let tag = "HardwarePowerStatecontrolFacade::suspend_reboot_bootloader";
fx_log_info!("Executing Suspend: REBOOT_BOOTLOADER");
if let Err(err) = self.get_admin_proxy()?.reboot_to_bootloader().await? {
fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to change power control state: {:?}", err)
)
}
Ok(())
}
/// Reboot the Fuchsia device into recovery
pub async fn suspend_reboot_recovery(&self) -> Result<(), Error> {
let tag = "HardwarePowerStatecontrolFacade::suspend_reboot_recovery";
fx_log_info!("Executing Suspend: REBOOT_RECOVERY");
if let Err(err) = self.get_admin_proxy()?.reboot_to_recovery().await? {
fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to change power control state: {:?}", err)
)
}
Ok(())
}
/// Power off the Fuchsia device
pub async fn suspend_poweroff(&self) -> Result<(), Error> {
let tag = "HardwarePowerStatecontrolFacade::suspend_poweroff";
fx_log_info!("Executing Suspend: POWEROFF");
if let Err(err) = self.get_admin_proxy()?.poweroff().await? {
fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to change power control state: {:?}", err)
)
}
Ok(())
}
/// Suspend MEXEC the Fuchsia device
pub async fn suspend_mexec(&self) -> Result<(), Error> {
let tag = "HardwarePowerStatecontrolFacade::suspend_mexec";
fx_log_info!("Executing Suspend: MEXEC");
if let Err(err) = self.get_admin_proxy()?.mexec().await? {
fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to change power control state: {:?}", err)
)
}
Ok(())
}
/// RSuspend RAM on the Fuchsia device
pub async fn suspend_ram(&self) -> Result<(), Error> {
let tag = "HardwarePowerStatecontrolFacade::suspend_ram";
fx_log_info!("Executing Suspend: SUSPEND_RAM");
if let Err(err) = self.get_admin_proxy()?.suspend_to_ram().await? {
fx_err_and_bail!(
&with_line!(tag),
format_err!("Failed to change power control state: {:?}", err)
)
}
Ok(())
}
}
| 36.295652 | 92 | 0.601581 |
f52bfaef70f2cdd298cb212dc2096d92f03e50a0 | 546 | use crate::core::{InternalVM, VMInstruction};
use crate::core::{RuntimeError, VirtualMachine};
use algebra::Field;
use r1cs_core::ConstraintSystem;
use zinc_bytecode::instructions::StoreSequence;
impl<F, CS> VMInstruction<F, CS> for StoreSequence
where
F: Field,
CS: ConstraintSystem<F>,
{
fn execute(&self, vm: &mut VirtualMachine<F, CS>) -> Result<(), RuntimeError> {
for i in 0..self.len {
let value = vm.pop()?;
vm.store(self.address + self.len - i - 1, value)?;
}
Ok(())
}
}
| 26 | 83 | 0.622711 |
ac7f091ed86a958e69fdf54594d164bc3c7c1de7 | 340 | pub mod bbs;
pub mod message;
pub mod thread;
pub use bbs::{get_id_by_bbs_path_name, Bbs};
pub use message::{get_message_count, Message};
pub use thread::{get_thread_title, Thread};
pub mod sql {
pub use super::bbs::CREATE_BBS_TABLE;
pub use super::message::CREATE_MESSAGE_TABLE;
pub use super::thread::CREATE_THREAD_TABLE;
}
| 24.285714 | 49 | 0.744118 |
4b1566e77298310a7edd068cc83befebdf727890 | 4,369 | #[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - no description available"]
pub lut0: LUT,
_reserved1: [u8; 12usize],
#[doc = "0x20 - no description available"]
pub lut1: LUT,
_reserved2: [u8; 12usize],
#[doc = "0x40 - no description available"]
pub lut2: LUT,
_reserved3: [u8; 12usize],
#[doc = "0x60 - no description available"]
pub lut3: LUT,
_reserved4: [u8; 12usize],
#[doc = "0x80 - no description available"]
pub lut4: LUT,
_reserved5: [u8; 12usize],
#[doc = "0xa0 - no description available"]
pub lut5: LUT,
_reserved6: [u8; 12usize],
#[doc = "0xc0 - no description available"]
pub lut6: LUT,
_reserved7: [u8; 12usize],
#[doc = "0xe0 - no description available"]
pub lut7: LUT,
_reserved8: [u8; 12usize],
#[doc = "0x100 - no description available"]
pub lut8: LUT,
_reserved9: [u8; 12usize],
#[doc = "0x120 - no description available"]
pub lut9: LUT,
_reserved10: [u8; 12usize],
#[doc = "0x140 - no description available"]
pub lut10: LUT,
_reserved11: [u8; 12usize],
#[doc = "0x160 - no description available"]
pub lut11: LUT,
_reserved12: [u8; 12usize],
#[doc = "0x180 - no description available"]
pub lut12: LUT,
_reserved13: [u8; 12usize],
#[doc = "0x1a0 - no description available"]
pub lut13: LUT,
_reserved14: [u8; 12usize],
#[doc = "0x1c0 - no description available"]
pub lut14: LUT,
_reserved15: [u8; 12usize],
#[doc = "0x1e0 - no description available"]
pub lut15: LUT,
_reserved16: [u8; 12usize],
#[doc = "0x200 - no description available"]
pub lut16: LUT,
_reserved17: [u8; 12usize],
#[doc = "0x220 - no description available"]
pub lut17: LUT,
_reserved18: [u8; 12usize],
#[doc = "0x240 - no description available"]
pub lut18: LUT,
_reserved19: [u8; 12usize],
#[doc = "0x260 - no description available"]
pub lut19: LUT,
_reserved20: [u8; 12usize],
#[doc = "0x280 - no description available"]
pub lut20: LUT,
_reserved21: [u8; 12usize],
#[doc = "0x2a0 - no description available"]
pub lut21: LUT,
_reserved22: [u8; 12usize],
#[doc = "0x2c0 - no description available"]
pub lut22: LUT,
_reserved23: [u8; 12usize],
#[doc = "0x2e0 - no description available"]
pub lut23: LUT,
_reserved24: [u8; 12usize],
#[doc = "0x300 - no description available"]
pub lut24: LUT,
_reserved25: [u8; 12usize],
#[doc = "0x320 - no description available"]
pub lut25: LUT,
_reserved26: [u8; 1228usize],
#[doc = "0x800 - Specifies the Truth Table contents for LUTLUTn"]
pub lut_truth: [crate::Reg<lut_truth::LUT_TRUTH_SPEC>; 26],
_reserved27: [u8; 152usize],
#[doc = "0x900 - Provides the current state of the 8 designated PLU Outputs."]
pub outputs: crate::Reg<outputs::OUTPUTS_SPEC>,
#[doc = "0x904 - Wakeup interrupt control for PLU"]
pub wakeint_ctrl: crate::Reg<wakeint_ctrl::WAKEINT_CTRL_SPEC>,
_reserved29: [u8; 760usize],
#[doc = "0xc00 - Selects the source to be connected to PLU Output OUTPUT_n"]
pub output_mux: [crate::Reg<output_mux::OUTPUT_MUX_SPEC>; 8],
}
#[doc = r"Register block"]
#[repr(C)]
pub struct LUT {
#[doc = "0x00 - LUTn input x MUX"]
pub lut_inp_mux: [crate::Reg<self::lut::lut_inp_mux::LUT_INP_MUX_SPEC>; 5],
}
#[doc = r"Register block"]
#[doc = "no description available"]
pub mod lut;
#[doc = "LUT_TRUTH register accessor: an alias for `Reg<LUT_TRUTH_SPEC>`"]
pub type LUT_TRUTH = crate::Reg<lut_truth::LUT_TRUTH_SPEC>;
#[doc = "Specifies the Truth Table contents for LUTLUTn"]
pub mod lut_truth;
#[doc = "OUTPUTS register accessor: an alias for `Reg<OUTPUTS_SPEC>`"]
pub type OUTPUTS = crate::Reg<outputs::OUTPUTS_SPEC>;
#[doc = "Provides the current state of the 8 designated PLU Outputs."]
pub mod outputs;
#[doc = "WAKEINT_CTRL register accessor: an alias for `Reg<WAKEINT_CTRL_SPEC>`"]
pub type WAKEINT_CTRL = crate::Reg<wakeint_ctrl::WAKEINT_CTRL_SPEC>;
#[doc = "Wakeup interrupt control for PLU"]
pub mod wakeint_ctrl;
#[doc = "OUTPUT_MUX register accessor: an alias for `Reg<OUTPUT_MUX_SPEC>`"]
pub type OUTPUT_MUX = crate::Reg<output_mux::OUTPUT_MUX_SPEC>;
#[doc = "Selects the source to be connected to PLU Output OUTPUT_n"]
pub mod output_mux;
| 37.025424 | 82 | 0.65095 |
5d7eb218901043ddb5c6d562e61ad7dcc3effd36 | 1,348 | //! Utilities for encoding and decoding frames.
//!
//! Contains adapters to go from streams of bytes, [`AsyncRead`] and
//! [`AsyncWrite`], to framed streams implementing [`Sink`] and [`Stream`].
//! Framed streams are also known as [transports].
//!
//! [`AsyncRead`]: #
//! [`AsyncWrite`]: #
//! [`Sink`]: #
//! [`Stream`]: #
//! [transports]: #
#![deny(missing_docs, missing_debug_implementations)]
#![doc(hidden, html_root_url = "https://docs.rs/tokio-codec/0.1.0")]
// _tokio_codec are the items that belong in the `tokio_codec` crate. However, because we need to
// maintain backward compatibility until the next major breaking change, they are defined here.
// When the next breaking change comes, they should be moved to the `tokio_codec` crate and become
// independent.
//
// The primary reason we can't move these to `tokio-codec` now is because, again for backward
// compatibility reasons, we need to keep `Decoder` and `Encoder` in tokio_io::codec. And `Decoder`
// and `Encoder` needs to reference `Framed`. So they all still need to still be in the same
// module.
mod decoder;
mod encoder;
mod framed;
mod framed_read;
mod framed_write;
pub use self::decoder::Decoder;
pub use self::encoder::Encoder;
pub use self::framed::{Framed, FramedParts};
pub use self::framed_read::FramedRead;
pub use self::framed_write::FramedWrite;
| 36.432432 | 99 | 0.718101 |
9c9270e642c4a57c2bc1e78113cc4b687a31ab0e | 4,476 | // Copyright (c) 2020 Xu Shaohua <[email protected]>. All rights reserved.
// Use of this source is governed by Apache-2.0 License that can be found
// in the LICENSE file.
#![allow(non_snake_case)]
#![allow(non_camel_case_types)]
pub type c_char = u8;
// First import architecture specific types.
#[cfg(target_arch = "aarch64")]
#[path = "aarch64/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "arm")]
#[path = "arm/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "mips")]
#[path = "mips/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "mips64")]
#[path = "mips64/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "powerpc64")]
#[path = "ppc64/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "s390x")]
#[path = "s390x/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "x86")]
#[path = "x86/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(target_arch = "x86_64")]
#[path = "x86_64/mod.rs"]
mod arch;
pub use arch::*;
#[cfg(not(any(
target_arch = "arm",
target_arch = "aarch64",
target_arch = "x86",
target_arch = "x86_64"
)))]
mod page;
#[cfg(not(any(
target_arch = "arm",
target_arch = "aarch64",
target_arch = "x86",
target_arch = "x86_64"
)))]
pub use page::*;
#[cfg(target_arch = "aarch64")]
mod signal;
#[cfg(target_arch = "aarch64")]
pub use signal::*;
mod aio;
mod aio_abi;
mod bitsperlong;
mod bpf;
mod capability;
mod compat;
mod dqblk_xfs;
mod eventpoll;
mod fcntl;
mod fs;
mod fs_readdir;
mod futex;
mod getcpu;
mod hugetlb_encode;
mod io_uring;
mod ioctl;
mod ioctls;
mod ioprio;
mod ipc;
mod ipcbuf;
mod kcmp;
mod key;
mod limits;
mod linux_dirent;
mod linux_fs;
mod linux_fs_types;
mod linux_net;
mod linux_quota;
mod linux_signal;
mod linux_socket;
mod linux_time64;
mod linux_timex;
mod membarrier;
mod memfd;
mod mempolicy;
mod mman;
mod mount;
mod mqueue;
mod msg;
mod msgbuf;
mod perf_event;
mod personality;
mod poll;
mod posix_types;
mod prctl;
mod ptrace;
mod quota;
mod resource;
mod rseq;
mod sched;
mod sched_types;
mod seccomp;
mod sem;
mod shm;
mod shmbuf;
mod siginfo;
mod signal_defs;
mod socket;
mod sockios;
mod splice;
mod statfs;
mod swap;
mod sysctl;
mod sysinfo;
mod termbits;
mod termios;
mod time;
mod time_types;
mod times;
mod timex;
mod types;
mod uapi_fadvise;
mod uapi_fcntl;
mod uapi_in;
mod uapi_in6;
mod uapi_inotify;
mod uapi_kexec;
mod uapi_mman;
mod uapi_mman_common;
mod uapi_net;
mod uapi_reboot;
mod uapi_resource;
mod uapi_serial;
mod uapi_socket;
mod uapi_stat;
mod uapi_timerfd;
mod uapi_wait;
mod uapi_xattr;
mod uio;
mod utime;
mod utsname;
pub use aio::*;
pub use aio_abi::*;
pub use bitsperlong::*;
pub use bpf::*;
pub use capability::*;
pub use compat::*;
pub use dqblk_xfs::*;
pub use eventpoll::*;
pub use fcntl::*;
pub use fs::*;
pub use fs_readdir::*;
pub use futex::*;
pub use getcpu::*;
pub use hugetlb_encode::*;
pub use io_uring::*;
pub use ioctl::*;
pub use ioctls::*;
pub use ioprio::*;
pub use ipc::*;
pub use ipcbuf::*;
pub use kcmp::*;
pub use key::*;
pub use limits::*;
pub use linux_dirent::*;
pub use linux_fs::*;
pub use linux_fs_types::*;
pub use linux_net::*;
pub use linux_quota::*;
pub use linux_signal::*;
pub use linux_socket::*;
pub use linux_time64::*;
pub use linux_timex::*;
pub use membarrier::*;
pub use memfd::*;
pub use mempolicy::*;
pub use mman::*;
pub use mman::*;
pub use mount::*;
pub use mqueue::*;
pub use msg::*;
pub use msgbuf::*;
pub use perf_event::*;
pub use personality::*;
pub use poll::*;
pub use posix_types::*;
pub use prctl::*;
pub use ptrace::*;
pub use quota::*;
pub use resource::*;
pub use rseq::*;
pub use sched::*;
pub use sched_types::*;
pub use seccomp::*;
pub use sem::*;
pub use shm::*;
pub use shmbuf::*;
pub use siginfo::*;
pub use signal_defs::*;
pub use socket::*;
pub use sockios::*;
pub use splice::*;
pub use statfs::*;
pub use swap::*;
pub use sysctl::*;
pub use sysinfo::*;
pub use termbits::*;
pub use termios::*;
pub use time::*;
pub use time_types::*;
pub use times::*;
pub use timex::*;
pub use types::*;
pub use uapi_fadvise::*;
pub use uapi_fcntl::*;
pub use uapi_in::*;
pub use uapi_in6::*;
pub use uapi_inotify::*;
pub use uapi_kexec::*;
pub use uapi_mman::*;
pub use uapi_mman_common::*;
pub use uapi_net::*;
pub use uapi_reboot::*;
pub use uapi_resource::*;
pub use uapi_serial::*;
pub use uapi_socket::*;
pub use uapi_stat::*;
pub use uapi_timerfd::*;
pub use uapi_wait::*;
pub use uapi_xattr::*;
pub use uio::*;
pub use utime::*;
pub use utsname::*;
| 17.484375 | 75 | 0.684093 |
500c2e994d8774e9df3296afba07f3aedf8c29fa | 393 | //! Alternative implementations to control the application via stdin.
use super::Event;
use std::sync::mpsc::Sender;
pub struct ConsoleInput {
// TODO
}
impl ConsoleInput {
pub fn new(sender: Sender<Event>) -> ConsoleInput {
// TODO
ConsoleInput {
// TODO
}
}
}
impl Drop for ConsoleInput {
fn drop(&mut self) {
// TODO
}
}
| 15.72 | 69 | 0.580153 |
7a82bdcd556afcf7a6eabe7bb78af462c7cea57e | 5,726 | //! Canonical definitions of `home_dir`, `cargo_home`, and `rustup_home`.
//!
//! This provides the definition of `home_dir` used by Cargo and
//! rustup, as well functions to find the correct value of
//! `CARGO_HOME` and `RUSTUP_HOME`.
//!
//! See also the [`dirs`](https://docs.rs/dirs) crate.
//!
//! _Note that as of 2019/08/06 it appears that cargo uses this crate. And
//! rustup has used this crate since 2019/08/21._
//!
//! The definition of `home_dir` provided by the standard library is
//! incorrect because it considers the `HOME` environment variable on
//! Windows. This causes surprising situations where a Rust program
//! will behave differently depending on whether it is run under a
//! Unix emulation environment like Cygwin or MinGW. Neither Cargo nor
//! rustup use the standard libraries definition - they use the
//! definition here.
//!
//! This crate further provides two functions, `cargo_home` and
//! `rustup_home`, which are the canonical way to determine the
//! location that Cargo and rustup store their data.
//!
//! See also this [discussion].
//!
//! [discussion]: https://github.com/rust-lang/rust/pull/46799#issuecomment-361156935
#![doc(html_root_url = "https://docs.rs/home/0.5.3")]
#![deny(rust_2018_idioms)]
#[cfg(windows)]
mod windows;
use std::env;
use std::io;
use std::path::{Path, PathBuf};
/// Returns the path of the current user's home directory if known.
///
/// # Unix
///
/// Returns the value of the `HOME` environment variable if it is set
/// and not equal to the empty string. Otherwise, it tries to determine the
/// home directory by invoking the `getpwuid_r` function on the UID of the
/// current user.
///
/// # Windows
///
/// Returns the value of the `USERPROFILE` environment variable if it
/// is set and not equal to the empty string. If both do not exist,
/// [`SHGetFolderPathW`][msdn] is used to return the appropriate path.
///
/// [msdn]: https://docs.microsoft.com/en-us/windows/win32/api/shlobj_core/nf-shlobj_core-shgetfolderpathw
///
/// # Examples
///
/// ```
/// match home::home_dir() {
/// Some(path) => println!("{}", path.display()),
/// None => println!("Impossible to get your home dir!"),
/// }
/// ```
pub fn home_dir() -> Option<PathBuf> {
home_dir_inner()
}
#[cfg(windows)]
use windows::home_dir_inner;
#[cfg(any(unix, target_os = "redox"))]
fn home_dir_inner() -> Option<PathBuf> {
#[allow(deprecated)]
env::home_dir()
}
/// Returns the storage directory used by Cargo, often knowns as
/// `.cargo` or `CARGO_HOME`.
///
/// It returns one of the following values, in this order of
/// preference:
///
/// - The value of the `CARGO_HOME` environment variable, if it is
/// an absolute path.
/// - The value of the current working directory joined with the value
/// of the `CARGO_HOME` environment variable, if `CARGO_HOME` is a
/// relative directory.
/// - The `.cargo` directory in the user's home directory, as reported
/// by the `home_dir` function.
///
/// # Errors
///
/// This function fails if it fails to retrieve the current directory,
/// or if the home directory cannot be determined.
///
/// # Examples
///
/// ```
/// match home::cargo_home() {
/// Ok(path) => println!("{}", path.display()),
/// Err(err) => eprintln!("Cannot get your cargo home dir: {:?}", err),
/// }
/// ```
pub fn cargo_home() -> io::Result<PathBuf> {
let cwd = env::current_dir()?;
cargo_home_with_cwd(&cwd)
}
/// Returns the storage directory used by Cargo within `cwd`.
/// For more details, see [`cargo_home`](fn.cargo_home.html).
pub fn cargo_home_with_cwd(cwd: &Path) -> io::Result<PathBuf> {
match env::var_os("CARGO_HOME").filter(|h| !h.is_empty()) {
Some(home) => {
let home = PathBuf::from(home);
if home.is_absolute() {
Ok(home)
} else {
Ok(cwd.join(&home))
}
}
_ => home_dir()
.map(|p| p.join(".cargo"))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "could not find cargo home dir")),
}
}
/// Returns the storage directory used by rustup, often knowns as
/// `.rustup` or `RUSTUP_HOME`.
///
/// It returns one of the following values, in this order of
/// preference:
///
/// - The value of the `RUSTUP_HOME` environment variable, if it is
/// an absolute path.
/// - The value of the current working directory joined with the value
/// of the `RUSTUP_HOME` environment variable, if `RUSTUP_HOME` is a
/// relative directory.
/// - The `.rustup` directory in the user's home directory, as reported
/// by the `home_dir` function.
///
/// # Errors
///
/// This function fails if it fails to retrieve the current directory,
/// or if the home directory cannot be determined.
///
/// # Examples
///
/// ```
/// match home::rustup_home() {
/// Ok(path) => println!("{}", path.display()),
/// Err(err) => eprintln!("Cannot get your rustup home dir: {:?}", err),
/// }
/// ```
pub fn rustup_home() -> io::Result<PathBuf> {
let cwd = env::current_dir()?;
rustup_home_with_cwd(&cwd)
}
/// Returns the storage directory used by rustup within `cwd`.
/// For more details, see [`rustup_home`](fn.rustup_home.html).
pub fn rustup_home_with_cwd(cwd: &Path) -> io::Result<PathBuf> {
match env::var_os("RUSTUP_HOME").filter(|h| !h.is_empty()) {
Some(home) => {
let home = PathBuf::from(home);
if home.is_absolute() {
Ok(home)
} else {
Ok(cwd.join(&home))
}
}
_ => home_dir()
.map(|d| d.join(".rustup"))
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "could not find rustup home dir")),
}
}
| 32.72 | 106 | 0.634824 |
210005b6c66ca1f51386b55db4f4da109ee86438 | 10,855 | // Copyright 2017 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Representation and calculation of movement within a lineoffset.
use std::cmp::max;
use crate::line_offset::LineOffset;
use crate::selection::{HorizPos, SelRegion, Selection};
use crate::word_boundaries::WordCursor;
use xi_rope::{Cursor, LinesMetric, Rope};
/// The specification of a movement.
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Movement {
/// Move to the left by one grapheme cluster.
Left,
/// Move to the right by one grapheme cluster.
Right,
/// Move to the left by one word.
LeftWord,
/// Move to the right by one word.
RightWord,
/// Move to left end of visible line.
LeftOfLine,
/// Move to right end of visible line.
RightOfLine,
/// Move up one visible line.
Up,
/// Move down one visible line.
Down,
/// Move up one viewport height.
UpPage,
/// Move down one viewport height.
DownPage,
/// Move up to the next line that can preserve the cursor position.
UpExactPosition,
/// Move down to the next line that can preserve the cursor position.
DownExactPosition,
/// Move to the start of the text line.
StartOfParagraph,
/// Move to the end of the text line.
EndOfParagraph,
/// Move to the end of the text line, or next line if already at end.
EndOfParagraphKill,
/// Move to the start of the document.
StartOfDocument,
/// Move to the end of the document
EndOfDocument,
}
/// Compute movement based on vertical motion by the given number of lines.
///
/// Note: in non-exceptional cases, this function preserves the `horiz`
/// field of the selection region.
fn vertical_motion(
r: SelRegion,
lo: &dyn LineOffset,
text: &Rope,
line_delta: isize,
modify: bool,
) -> (usize, Option<HorizPos>) {
let (col, line) = selection_position(r, lo, text, line_delta < 0, modify);
let n_lines = lo.line_of_offset(text, text.len());
// This code is quite careful to avoid integer overflow.
// TODO: write tests to verify
if line_delta < 0 && (-line_delta as usize) > line {
return (0, Some(col));
}
let line = if line_delta < 0 {
line - (-line_delta as usize)
} else {
line.saturating_add(line_delta as usize)
};
if line > n_lines {
return (text.len(), Some(col));
}
let new_offset = lo.line_col_to_offset(text, line, col);
(new_offset, Some(col))
}
/// Compute movement based on vertical motion by the given number of lines skipping
/// any line that is shorter than the current cursor position.
fn vertical_motion_exact_pos(
r: SelRegion,
lo: &dyn LineOffset,
text: &Rope,
move_up: bool,
modify: bool,
) -> (usize, Option<HorizPos>) {
let (col, init_line) = selection_position(r, lo, text, move_up, modify);
let n_lines = lo.line_of_offset(text, text.len());
let mut line_length =
lo.offset_of_line(text, init_line.saturating_add(1)) - lo.offset_of_line(text, init_line);
if move_up && init_line == 0 {
return (lo.line_col_to_offset(text, init_line, col), Some(col));
}
let mut line = if move_up { init_line - 1 } else { init_line.saturating_add(1) };
// If the active columns is longer than the current line, use the current line length.
let col = if line_length < col { line_length - 1 } else { col };
loop {
line_length = lo.offset_of_line(text, line + 1) - lo.offset_of_line(text, line);
// If the line is longer than the current cursor position, break.
// We use > instead of >= because line_length includes newline.
if line_length > col {
break;
}
// If you are trying to add a selection past the end of the file or before the first line, return original selection
if line >= n_lines || (line == 0 && move_up) {
line = init_line;
break;
}
line = if move_up { line - 1 } else { line.saturating_add(1) };
}
(lo.line_col_to_offset(text, line, col), Some(col))
}
/// Based on the current selection position this will return the cursor position, the current line, and the
/// total number of lines of the file.
fn selection_position(
r: SelRegion,
lo: &dyn LineOffset,
text: &Rope,
move_up: bool,
modify: bool,
) -> (HorizPos, usize) {
// The active point of the selection
let active = if modify {
r.end
} else if move_up {
r.min()
} else {
r.max()
};
let col = if let Some(col) = r.horiz { col } else { lo.offset_to_line_col(text, active).1 };
let line = lo.line_of_offset(text, active);
(col, line)
}
/// When paging through a file, the number of lines from the previous page
/// that will also be visible in the next.
const SCROLL_OVERLAP: isize = 2;
/// Computes the actual desired amount of scrolling (generally slightly
/// less than the height of the viewport, to allow overlap).
fn scroll_height(height: usize) -> isize {
max(height as isize - SCROLL_OVERLAP, 1)
}
/// Compute the result of movement on one selection region.
///
/// # Arguments
///
/// * `height` - viewport height
pub fn region_movement(
m: Movement,
r: SelRegion,
lo: &dyn LineOffset,
height: usize,
text: &Rope,
modify: bool,
) -> SelRegion {
let (offset, horiz) = match m {
Movement::Left => {
if r.is_caret() || modify {
if let Some(offset) = text.prev_grapheme_offset(r.end) {
(offset, None)
} else {
(0, r.horiz)
}
} else {
(r.min(), None)
}
}
Movement::Right => {
if r.is_caret() || modify {
if let Some(offset) = text.next_grapheme_offset(r.end) {
(offset, None)
} else {
(r.end, r.horiz)
}
} else {
(r.max(), None)
}
}
Movement::LeftWord => {
let mut word_cursor = WordCursor::new(text, r.end);
let offset = word_cursor.prev_boundary().unwrap_or(0);
(offset, None)
}
Movement::RightWord => {
let mut word_cursor = WordCursor::new(text, r.end);
let offset = word_cursor.next_boundary().unwrap_or_else(|| text.len());
(offset, None)
}
Movement::LeftOfLine => {
let line = lo.line_of_offset(text, r.end);
let offset = lo.offset_of_line(text, line);
(offset, None)
}
Movement::RightOfLine => {
let line = lo.line_of_offset(text, r.end);
let mut offset = text.len();
// calculate end of line
let next_line_offset = lo.offset_of_line(text, line + 1);
if line < lo.line_of_offset(text, offset) {
if let Some(prev) = text.prev_grapheme_offset(next_line_offset) {
offset = prev;
}
}
(offset, None)
}
Movement::Up => vertical_motion(r, lo, text, -1, modify),
Movement::Down => vertical_motion(r, lo, text, 1, modify),
Movement::UpExactPosition => vertical_motion_exact_pos(r, lo, text, true, modify),
Movement::DownExactPosition => vertical_motion_exact_pos(r, lo, text, false, modify),
Movement::StartOfParagraph => {
// Note: TextEdit would start at modify ? r.end : r.min()
let mut cursor = Cursor::new(&text, r.end);
let offset = cursor.prev::<LinesMetric>().unwrap_or(0);
(offset, None)
}
Movement::EndOfParagraph => {
// Note: TextEdit would start at modify ? r.end : r.max()
let mut offset = r.end;
let mut cursor = Cursor::new(&text, offset);
if let Some(next_para_offset) = cursor.next::<LinesMetric>() {
if cursor.is_boundary::<LinesMetric>() {
if let Some(eol) = text.prev_grapheme_offset(next_para_offset) {
offset = eol;
}
} else if cursor.pos() == text.len() {
offset = text.len();
}
(offset, None)
} else {
//in this case we are already on a last line so just moving to EOL
(text.len(), None)
}
}
Movement::EndOfParagraphKill => {
// Note: TextEdit would start at modify ? r.end : r.max()
let mut offset = r.end;
let mut cursor = Cursor::new(&text, offset);
if let Some(next_para_offset) = cursor.next::<LinesMetric>() {
offset = next_para_offset;
if cursor.is_boundary::<LinesMetric>() {
if let Some(eol) = text.prev_grapheme_offset(next_para_offset) {
if eol != r.end {
offset = eol;
}
}
}
}
(offset, None)
}
Movement::UpPage => vertical_motion(r, lo, text, -scroll_height(height), modify),
Movement::DownPage => vertical_motion(r, lo, text, scroll_height(height), modify),
Movement::StartOfDocument => (0, None),
Movement::EndOfDocument => (text.len(), None),
};
SelRegion::new(if modify { r.start } else { offset }, offset).with_horiz(horiz)
}
/// Compute a new selection by applying a movement to an existing selection.
///
/// In a multi-region selection, this function applies the movement to each
/// region in the selection, and returns the union of the results.
///
/// If `modify` is `true`, the selections are modified, otherwise the results
/// of individual region movements become carets.
///
/// # Arguments
///
/// * `height` - viewport height
pub fn selection_movement(
m: Movement,
s: &Selection,
lo: &dyn LineOffset,
height: usize,
text: &Rope,
modify: bool,
) -> Selection {
let mut result = Selection::new();
for &r in s.iter() {
let new_region = region_movement(m, r, lo, height, text, modify);
result.add_region(new_region);
}
result
}
| 34.791667 | 124 | 0.588485 |
e4a05da8fede786ca7161b3b745990414f4b7d8b | 4,186 | use std::path::Path;
use std::env;
use clap::{
crate_authors, crate_description, crate_name, crate_version, App, Arg, ArgMatches, SubCommand,
};
use mask::command::Command;
fn main() {
let args: Vec<String> = env::args().collect();
let maybe_maskfile = args.get(1);
let maybe_path = args.get(2);
let maskfile_path = match (maybe_maskfile, maybe_path) {
(Some(a), Some(path)) if a == "--maskfile" => Path::new(path),
_ => Path::new("./maskfile.md"),
};
let maskfile = mask::loader::read_maskfile(maskfile_path);
if maskfile.is_err() {
return eprintln!("ERROR: {}", maskfile.unwrap_err());
}
let root_command = mask::parser::build_command_structure(maskfile.unwrap());
let cli_app = App::new(crate_name!())
.version(crate_version!())
.author(crate_authors!())
.about(crate_description!());
let matches = build_subcommands(cli_app, &root_command.subcommands).get_matches();
let chosen_cmd = find_command(&matches, &root_command.subcommands);
if chosen_cmd.is_none() {
// TODO: echo --help for root command
println!("Missing SUBCOMMAND");
return;
}
let _ = mask::executor::execute_command(chosen_cmd.unwrap());
}
fn build_subcommands<'a, 'b>(
mut cli_app: App<'a, 'b>,
subcommands: &'a Vec<Command>,
) -> App<'a, 'b> {
for c in subcommands {
let mut subcmd = SubCommand::with_name(&c.name).about(c.desc.as_ref());
if !c.subcommands.is_empty() {
subcmd = build_subcommands(subcmd, &c.subcommands);
}
// Add all required arguments
for a in &c.required_args {
let arg = Arg::with_name(&a.name).required(true);
subcmd = subcmd.arg(arg);
}
// Add all optional flags
for f in &c.option_flags {
let arg = Arg::with_name(&f.name)
.help(&f.desc)
.short(&f.short)
.long(&f.long)
.takes_value(f.takes_value)
.multiple(f.multiple);
subcmd = subcmd.arg(arg);
}
cli_app = cli_app.subcommand(subcmd);
}
// This is needed to prevent clap from complaining. It should be removed once
// clap 3.x is released. See https://github.com/clap-rs/clap/issues/748
let custom_maskfile_path = Arg::with_name("maskfile")
.help("Path to a different maskfile you want to use")
.short("m")
.long("maskfile")
.takes_value(true)
.multiple(false);
cli_app.arg(custom_maskfile_path)
}
fn find_command<'a>(matches: &ArgMatches, subcommands: &Vec<Command>) -> Option<Command> {
let mut command = None;
// The child subcommand that was used
if let Some(subcommand_name) = matches.subcommand_name() {
if let Some(matches) = matches.subcommand_matches(subcommand_name) {
for c in subcommands {
if c.name == subcommand_name {
// Check if a subcommand was called, otherwise return this command
command = find_command(matches, &c.subcommands)
.or(Some(c.clone()).map(|c| get_command_options(c, &matches)));
}
}
}
}
return command;
}
fn get_command_options(mut cmd: Command, matches: &ArgMatches) -> Command {
// Check all required args
for arg in &mut cmd.required_args {
arg.val = matches.value_of(arg.name.clone()).unwrap().to_string();
}
// Check all optional flags
for flag in &mut cmd.option_flags {
flag.val = if flag.takes_value {
// Extract the value
matches
.value_of(flag.name.clone())
.or(Some(""))
.unwrap()
.to_string()
} else {
// Check if the boolean flag is present and set to "true".
// It's a string since it's set as an environment variable.
let val = if matches.is_present(flag.name.clone()) {
"true".to_string()
} else {
"".to_string()
};
val
};
}
cmd
}
| 31.007407 | 98 | 0.571906 |
d50c987c85ea105bbf51e68963b09b6c7d62c4e2 | 779 | #[doc = "Reader of register ENTRY0"]
pub type R = crate::R<u32, super::ENTRY0>;
#[doc = "Reader of field `EPRES`"]
pub type EPRES_R = crate::R<bool, bool>;
#[doc = "Reader of field `FMT`"]
pub type FMT_R = crate::R<bool, bool>;
#[doc = "Reader of field `ADDOFF`"]
pub type ADDOFF_R = crate::R<u32, u32>;
impl R {
#[doc = "Bit 0 - Entry Present"]
#[inline(always)]
pub fn epres(&self) -> EPRES_R {
EPRES_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Format"]
#[inline(always)]
pub fn fmt(&self) -> FMT_R {
FMT_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bits 12:31 - Address Offset"]
#[inline(always)]
pub fn addoff(&self) -> ADDOFF_R {
ADDOFF_R::new(((self.bits >> 12) & 0x000f_ffff) as u32)
}
}
| 29.961538 | 63 | 0.559692 |
f53617f3741065008bf3ceee63129924f8ecb893 | 3,004 | use bencher::{benchmark_group, benchmark_main, Bencher};
fn fill_aa_tiny_skia(bencher: &mut Bencher) {
use tiny_skia::*;
let mut paint = Paint::default();
paint.set_color_rgba8(50, 127, 150, 200);
paint.anti_alias = true;
let mut pb = PathBuilder::new();
pb.move_to(500.0, 20.0);
pb.cubic_to(650.0, 320.0, 770.0, 650.0, 800.0, 980.0);
pb.line_to(20.0, 380.0);
pb.line_to(200.0, 980.0);
pb.cubic_to(230.0, 650.0, 350.0, 320.0, 500.0, 20.0);
pb.close();
let path = pb.finish().unwrap();
let mut pixmap = Pixmap::new(1000, 1000).unwrap();
bencher.iter(|| {
pixmap.fill_path(&path, &paint, FillRule::EvenOdd, Transform::identity(), None);
});
}
fn fill_aa_skia(bencher: &mut Bencher) {
use skia_rs::*;
let mut surface = Surface::new_rgba_premultiplied(1000, 1000).unwrap();
let mut paint = Paint::new();
paint.set_color(50, 127, 150, 200);
paint.set_blend_mode(BlendMode::SourceOver);
paint.set_anti_alias(true);
let mut path = Path::new();
path.move_to(500.0, 20.0);
path.cubic_to(650.0, 320.0, 770.0, 650.0, 800.0, 980.0);
path.line_to(20.0, 380.0);
path.line_to(200.0, 980.0);
path.cubic_to(230.0, 650.0, 350.0, 320.0, 500.0, 20.0);
path.close();
path.set_fill_type(FillType::EvenOdd);
bencher.iter(|| {
surface.draw_path(&path, &paint);
});
}
fn fill_aa_raqote(bencher: &mut Bencher) {
use raqote::*;
let mut dt = DrawTarget::new(1000, 1000);
let mut path = {
let mut pb = PathBuilder::new();
pb.move_to(500.0, 20.0);
pb.cubic_to(650.0, 320.0, 770.0, 650.0, 800.0, 980.0);
pb.line_to(20.0, 380.0);
pb.line_to(200.0, 980.0);
pb.cubic_to(230.0, 650.0, 350.0, 320.0, 500.0, 20.0);
pb.close();
pb.finish()
};
path.winding = Winding::EvenOdd;
// raqote uses ARGB order.
let src = Source::from(Color::new(200, 50, 127, 150));
let draw_opt = DrawOptions {
blend_mode: BlendMode::SrcOver,
alpha: 1.0,
antialias: AntialiasMode::Gray,
};
bencher.iter(|| {
dt.fill(&path, &src, &draw_opt);
});
}
fn fill_aa_cairo(bencher: &mut Bencher) {
use cairo::*;
let surface = ImageSurface::create(Format::ARgb32, 1000, 1000).unwrap();
let cr = Context::new(&surface);
cr.move_to(500.0, 20.0);
cr.curve_to(650.0, 320.0, 770.0, 650.0, 800.0, 980.0);
cr.line_to(20.0, 380.0);
cr.line_to(200.0, 980.0);
cr.curve_to(230.0, 650.0, 350.0, 320.0, 500.0, 20.0);
cr.close_path();
cr.set_source_rgba(50.0 / 255.0, 127.0 / 255.0, 150.0 / 255.0, 200.0 / 255.0);
cr.set_antialias(Antialias::Subpixel); // TODO: or Gray?
cr.set_fill_rule(FillRule::EvenOdd);
cr.set_operator(Operator::Over);
bencher.iter(|| {
cr.fill_preserve();
});
}
benchmark_group!(fill_aa,
fill_aa_tiny_skia,
fill_aa_skia,
fill_aa_raqote,
fill_aa_cairo
);
benchmark_main!(fill_aa);
| 26.821429 | 88 | 0.599867 |
ff2e431a6ee942cf2ba735b3804e072a2c632b07 | 62,012 | use super::{TypedReceiver, WorkerState};
use phala_crypto::{
aead, ecdh,
sr25519::{Persistence, KDF},
};
use phala_mq::MessageDispatcher;
use phala_types::{
messaging::{
GatekeeperEvent, KeyDistribution, MessageOrigin, MiningInfoUpdateEvent, MiningReportEvent,
RandomNumber, RandomNumberEvent, SettleInfo, SystemEvent, WorkerEvent, WorkerEventWithKey,
},
EcdhPublicKey, WorkerPublicKey,
};
use sp_core::{hashing, sr25519};
use crate::types::BlockInfo;
use std::{
collections::{BTreeMap, VecDeque},
convert::TryInto,
};
use fixed_macro::types::U64F64 as fp;
use log::debug;
use msg_trait::MessageChannel;
use phactory_api::prpc as pb;
use tokenomic::{FixedPoint, TokenomicInfo};
/// Block interval to generate pseudo-random on chain
///
/// WARNING: this interval need to be large enough considering the latency of mq
const VRF_INTERVAL: u32 = 5;
// pesudo_random_number = blake2_256(last_random_number, block_number, derived_master_key)
//
// NOTICE: we abandon the random number involving master key signature, since the malleability of sr25519 signature
// refer to: https://github.com/w3f/schnorrkel/blob/34cdb371c14a73cbe86dfd613ff67d61662b4434/old/README.md#a-note-on-signature-malleability
fn next_random_number(
master_key: &sr25519::Pair,
block_number: chain::BlockNumber,
last_random_number: RandomNumber,
) -> RandomNumber {
let derived_random_key = master_key
.derive_sr25519_pair(&[b"random_number"])
.expect("should not fail with valid info");
let mut buf: Vec<u8> = last_random_number.to_vec();
buf.extend(block_number.to_be_bytes().iter().copied());
buf.extend(derived_random_key.dump_secret_key().iter().copied());
hashing::blake2_256(buf.as_ref())
}
struct WorkerInfo {
state: WorkerState,
waiting_heartbeats: VecDeque<chain::BlockNumber>,
unresponsive: bool,
tokenomic: TokenomicInfo,
heartbeat_flag: bool,
last_heartbeat_for_block: chain::BlockNumber,
last_heartbeat_at_block: chain::BlockNumber,
last_gk_responsive_event: i32,
last_gk_responsive_event_at_block: chain::BlockNumber,
}
impl WorkerInfo {
fn new(pubkey: WorkerPublicKey) -> Self {
Self {
state: WorkerState::new(pubkey),
waiting_heartbeats: Default::default(),
unresponsive: false,
tokenomic: Default::default(),
heartbeat_flag: false,
last_heartbeat_for_block: 0,
last_heartbeat_at_block: 0,
last_gk_responsive_event: 0,
last_gk_responsive_event_at_block: 0,
}
}
}
pub(crate) struct Gatekeeper<MsgChan> {
master_key: sr25519::Pair,
master_pubkey_on_chain: bool,
registered_on_chain: bool,
egress: MsgChan, // TODO.kevin: syncing the egress state while migrating.
gatekeeper_events: TypedReceiver<GatekeeperEvent>,
mining_events: TypedReceiver<MiningReportEvent>,
system_events: TypedReceiver<SystemEvent>,
workers: BTreeMap<WorkerPublicKey, WorkerInfo>,
// Randomness
last_random_number: RandomNumber,
iv_seq: u64,
// Tokenomic
tokenomic_params: tokenomic::Params,
}
impl<MsgChan> Gatekeeper<MsgChan>
where
MsgChan: MessageChannel,
{
pub fn new(
master_key: sr25519::Pair,
recv_mq: &mut MessageDispatcher,
egress: MsgChan,
) -> Self {
egress.set_dummy(true);
Self {
master_key,
master_pubkey_on_chain: false,
registered_on_chain: false,
egress,
gatekeeper_events: recv_mq.subscribe_bound(),
mining_events: recv_mq.subscribe_bound(),
system_events: recv_mq.subscribe_bound(),
workers: Default::default(),
last_random_number: [0_u8; 32],
iv_seq: 0,
tokenomic_params: tokenomic::test_params(),
}
}
fn generate_iv(&mut self, block_number: chain::BlockNumber) -> aead::IV {
let derived_key = self
.master_key
.derive_sr25519_pair(&[b"iv_generator"])
.expect("should not fail with valid info");
let mut buf: Vec<u8> = Vec::new();
buf.extend(derived_key.dump_secret_key().iter().copied());
buf.extend(block_number.to_be_bytes().iter().copied());
buf.extend(self.iv_seq.to_be_bytes().iter().copied());
self.iv_seq += 1;
let hash = hashing::blake2_256(buf.as_ref());
hash[0..12]
.try_into()
.expect("should never fail given correct length; qed;")
}
pub fn register_on_chain(&mut self) {
info!("Gatekeeper: register on chain");
self.egress.set_dummy(false);
self.registered_on_chain = true;
}
#[allow(unused)]
pub fn unregister_on_chain(&mut self) {
info!("Gatekeeper: unregister on chain");
self.egress.set_dummy(true);
self.registered_on_chain = false;
}
pub fn registered_on_chain(&self) -> bool {
self.registered_on_chain
}
pub fn master_pubkey_uploaded(&mut self) {
self.master_pubkey_on_chain = true;
}
pub fn share_master_key(
&mut self,
pubkey: &WorkerPublicKey,
ecdh_pubkey: &EcdhPublicKey,
block_number: chain::BlockNumber,
) {
info!("Gatekeeper: try dispatch master key");
let derived_key = self
.master_key
.derive_sr25519_pair(&[&crate::generate_random_info()])
.expect("should not fail with valid info; qed.");
let my_ecdh_key = derived_key
.derive_ecdh_key()
.expect("ecdh key derivation should never failed with valid master key; qed.");
let secret = ecdh::agree(&my_ecdh_key, &ecdh_pubkey.0)
.expect("should never fail with valid ecdh key; qed.");
let iv = self.generate_iv(block_number);
let mut data = self.master_key.dump_secret_key().to_vec();
aead::encrypt(&iv, &secret, &mut data).expect("Failed to encrypt master key");
self.egress
.push_message(KeyDistribution::master_key_distribution(
*pubkey,
my_ecdh_key
.public()
.as_ref()
.try_into()
.expect("should never fail given pubkey with correct length; qed;"),
data,
iv,
));
}
pub fn process_messages(&mut self, block: &BlockInfo<'_>) {
if !self.master_pubkey_on_chain {
info!("Gatekeeper: not handling the messages because Gatekeeper has not launched on chain");
return;
}
let sum_share: FixedPoint = self
.workers
.values()
.filter(|info| !info.unresponsive)
.map(|info| info.tokenomic.share())
.sum();
let mut processor = GKMessageProcesser {
state: self,
block,
report: MiningInfoUpdateEvent::new(block.block_number, block.now_ms),
sum_share,
};
processor.process();
let report = processor.report;
if !report.is_empty() {
self.egress.push_message(report);
}
}
pub fn emit_random_number(&mut self, block_number: chain::BlockNumber) {
if block_number % VRF_INTERVAL != 0 {
return;
}
let random_number =
next_random_number(&self.master_key, block_number, self.last_random_number);
info!(
"Gatekeeper: emit random number {} in block {}",
hex::encode(&random_number),
block_number
);
self.egress.push_message(GatekeeperEvent::new_random_number(
block_number,
random_number,
self.last_random_number,
));
self.last_random_number = random_number;
}
pub fn worker_state(&self, pubkey: &WorkerPublicKey) -> Option<pb::WorkerState> {
let info = self.workers.get(pubkey)?;
Some(pb::WorkerState {
registered: info.state.registered,
unresponsive: info.unresponsive,
bench_state: info.state.bench_state.as_ref().map(|state| pb::BenchState {
start_block: state.start_block,
start_time: state.start_time,
duration: state.duration,
}),
mining_state: info
.state
.mining_state
.as_ref()
.map(|state| pb::MiningState {
session_id: state.session_id,
paused: matches!(state.state, super::MiningState::Paused),
start_time: state.start_time,
}),
waiting_heartbeats: info.waiting_heartbeats.iter().copied().collect(),
last_heartbeat_for_block: info.last_heartbeat_for_block,
last_heartbeat_at_block: info.last_heartbeat_at_block,
last_gk_responsive_event: info.last_gk_responsive_event,
last_gk_responsive_event_at_block: info.last_gk_responsive_event_at_block,
tokenomic_info: if info.state.mining_state.is_some() {
Some(info.tokenomic.clone().into())
} else {
None
},
})
}
}
struct GKMessageProcesser<'a, MsgChan> {
state: &'a mut Gatekeeper<MsgChan>,
block: &'a BlockInfo<'a>,
report: MiningInfoUpdateEvent<chain::BlockNumber>,
sum_share: FixedPoint,
}
impl<MsgChan> GKMessageProcesser<'_, MsgChan>
where
MsgChan: MessageChannel,
{
fn process(&mut self) {
debug!("Gatekeeper: processing block {}", self.block.block_number);
self.prepare();
loop {
let ok = phala_mq::select! {
message = self.state.mining_events => match message {
Ok((_, event, origin)) => {
debug!("Processing mining report: {:?}, origin: {}", event, origin);
self.process_mining_report(origin, event);
}
Err(e) => {
error!("Read message failed: {:?}", e);
}
},
message = self.state.system_events => match message {
Ok((_, event, origin)) => {
debug!("Processing system event: {:?}, origin: {}", event, origin);
self.process_system_event(origin, event);
}
Err(e) => {
error!("Read message failed: {:?}", e);
}
},
message = self.state.gatekeeper_events => match message {
Ok((_, event, origin)) => {
self.process_gatekeeper_event(origin, event);
}
Err(e) => {
error!("Read message failed: {:?}", e);
}
},
};
if ok.is_none() {
// All messages processed
break;
}
}
self.block_post_process();
debug!("Gatekeeper: processed block {}", self.block.block_number);
}
fn prepare(&mut self) {
for worker in self.state.workers.values_mut() {
worker.heartbeat_flag = false;
}
}
fn block_post_process(&mut self) {
for worker_info in self.state.workers.values_mut() {
debug!(
"[{}] block_post_process",
hex::encode(&worker_info.state.pubkey)
);
let mut tracker = WorkerSMTracker {
waiting_heartbeats: &mut worker_info.waiting_heartbeats,
};
worker_info
.state
.on_block_processed(self.block, &mut tracker);
if worker_info.state.mining_state.is_none() {
debug!(
"[{}] Mining already stopped, do nothing.",
hex::encode(&worker_info.state.pubkey)
);
continue;
}
if worker_info.unresponsive {
if worker_info.heartbeat_flag {
debug!(
"[{}] case5: Unresponsive, successful heartbeat.",
hex::encode(&worker_info.state.pubkey)
);
worker_info.unresponsive = false;
self.report
.recovered_to_online
.push(worker_info.state.pubkey);
worker_info.last_gk_responsive_event =
pb::ResponsiveEvent::ExitUnresponsive as _;
worker_info.last_gk_responsive_event_at_block = self.block.block_number;
}
} else if let Some(&hb_sent_at) = worker_info.waiting_heartbeats.get(0) {
if self.block.block_number - hb_sent_at
> self.state.tokenomic_params.heartbeat_window
{
debug!(
"[{}] case3: Idle, heartbeat failed, current={} waiting for {}.",
hex::encode(&worker_info.state.pubkey),
self.block.block_number,
hb_sent_at
);
self.report.offline.push(worker_info.state.pubkey);
worker_info.unresponsive = true;
worker_info.last_gk_responsive_event =
pb::ResponsiveEvent::EnterUnresponsive as _;
worker_info.last_gk_responsive_event_at_block = self.block.block_number;
}
}
let params = &self.state.tokenomic_params;
if worker_info.unresponsive {
debug!(
"[{}] case3/case4: Idle, heartbeat failed or Unresponsive, no event",
hex::encode(&worker_info.state.pubkey)
);
worker_info.tokenomic.update_v_slash(params, self.block.block_number);
} else if !worker_info.heartbeat_flag {
debug!(
"[{}] case1: Idle, no event",
hex::encode(&worker_info.state.pubkey)
);
worker_info.tokenomic.update_v_idle(params);
}
}
}
fn process_mining_report(&mut self, origin: MessageOrigin, event: MiningReportEvent) {
let worker_pubkey = if let MessageOrigin::Worker(pubkey) = origin {
pubkey
} else {
error!("Invalid origin {:?} sent a {:?}", origin, event);
return;
};
match event {
MiningReportEvent::Heartbeat {
session_id,
challenge_block,
challenge_time,
iterations,
} => {
let worker_info = match self.state.workers.get_mut(&worker_pubkey) {
Some(info) => info,
None => {
error!(
"Unknown worker {} sent a {:?}",
hex::encode(worker_pubkey),
event
);
return;
}
};
worker_info.last_heartbeat_at_block = self.block.block_number;
worker_info.last_heartbeat_for_block = challenge_block;
if Some(&challenge_block) != worker_info.waiting_heartbeats.get(0) {
error!("Fatal error: Unexpected heartbeat {:?}", event);
error!("Sent from worker {}", hex::encode(worker_pubkey));
error!("Waiting heartbeats {:#?}", worker_info.waiting_heartbeats);
// The state has been poisoned. Make no sence to keep moving on.
panic!("GK or Worker state poisoned");
}
// The oldest one comfirmed.
let _ = worker_info.waiting_heartbeats.pop_front();
let mining_state = if let Some(state) = &worker_info.state.mining_state {
state
} else {
debug!(
"[{}] Mining already stopped, ignore the heartbeat.",
hex::encode(&worker_info.state.pubkey)
);
return;
};
if session_id != mining_state.session_id {
debug!(
"[{}] Heartbeat response to previous mining sessions, ignore it.",
hex::encode(&worker_info.state.pubkey)
);
return;
}
worker_info.heartbeat_flag = true;
let tokenomic = &mut worker_info.tokenomic;
tokenomic.update_p_instant(self.block.now_ms, iterations);
tokenomic.challenge_time_last = challenge_time;
tokenomic.iteration_last = iterations;
if worker_info.unresponsive {
debug!(
"[{}] heartbeat handling case5: Unresponsive, successful heartbeat.",
hex::encode(&worker_info.state.pubkey)
);
} else {
debug!("[{}] heartbeat handling case2: Idle, successful heartbeat, report to pallet", hex::encode(&worker_info.state.pubkey));
let (payout, treasury) = worker_info.tokenomic.update_v_heartbeat(
&self.state.tokenomic_params,
self.sum_share,
self.block.now_ms,
self.block.block_number,
);
// NOTE: keep the reporting order (vs the one while mining stop).
self.report.settle.push(SettleInfo {
pubkey: worker_pubkey,
v: worker_info.tokenomic.v.to_bits(),
payout: payout.to_bits(),
treasury: treasury.to_bits(),
});
}
}
}
}
fn process_system_event(&mut self, origin: MessageOrigin, event: SystemEvent) {
if !origin.is_pallet() {
error!("Invalid origin {:?} sent a {:?}", origin, event);
return;
}
// Create the worker info on it's first time registered
if let SystemEvent::WorkerEvent(WorkerEventWithKey {
pubkey,
event: WorkerEvent::Registered(_),
}) = &event
{
let _ = self
.state
.workers
.entry(*pubkey)
.or_insert_with(|| WorkerInfo::new(*pubkey));
}
let log_on = log::log_enabled!(log::Level::Debug);
// TODO.kevin: Avoid unnecessary iteration for WorkerEvents.
for worker_info in self.state.workers.values_mut() {
// Replay the event on worker state, and collect the egressed heartbeat into waiting_heartbeats.
let mut tracker = WorkerSMTracker {
waiting_heartbeats: &mut worker_info.waiting_heartbeats,
};
debug!("for worker {}", hex::encode(&worker_info.state.pubkey));
worker_info
.state
.process_event(self.block, &event, &mut tracker, log_on);
}
match &event {
SystemEvent::WorkerEvent(e) => {
if let Some(worker) = self.state.workers.get_mut(&e.pubkey) {
match &e.event {
WorkerEvent::Registered(info) => {
worker.tokenomic.confidence_level = info.confidence_level;
}
WorkerEvent::BenchStart { .. } => {}
WorkerEvent::BenchScore(_) => {}
WorkerEvent::MiningStart {
session_id: _, // Aready recorded by the state machine.
init_v,
init_p,
} => {
let v = FixedPoint::from_bits(*init_v);
let prev = worker.tokenomic;
// NOTE.kevin: To track the heartbeats by global timeline, don't clear the waiting_heartbeats.
// worker.waiting_heartbeats.clear();
worker.unresponsive = false;
worker.tokenomic = TokenomicInfo {
v,
v_init: v,
payable: fp!(0),
v_update_at: self.block.now_ms,
v_update_block: self.block.block_number,
iteration_last: 0,
challenge_time_last: self.block.now_ms,
p_bench: FixedPoint::from_num(*init_p),
p_instant: FixedPoint::from_num(*init_p),
confidence_level: prev.confidence_level,
last_payout: fp!(0),
last_payout_at_block: 0,
total_payout: fp!(0),
total_payout_count: 0,
last_slash: fp!(0),
last_slash_at_block: 0,
total_slash: fp!(0),
total_slash_count: 0,
};
}
WorkerEvent::MiningStop => {
// TODO.kevin: report the final V?
// We may need to report a Stop event in worker.
// Then GK report the final V to pallet, when observed the Stop event from worker.
// The pallet wait for the final V report in CoolingDown state.
// Pallet ---------(Stop)--------> Worker
// Worker ----(Rest Heartbeats)--> *
// Worker --------(Stopped)------> *
// GK --------(Final V)------> Pallet
// Just report the final V ATM.
// NOTE: keep the reporting order (vs the one while heartbeat).
self.report.settle.push(SettleInfo {
pubkey: worker.state.pubkey,
v: worker.tokenomic.v.to_bits(),
payout: 0,
treasury: 0,
})
}
WorkerEvent::MiningEnterUnresponsive => {}
WorkerEvent::MiningExitUnresponsive => {}
}
}
}
SystemEvent::HeartbeatChallenge(_) => {}
}
}
fn process_gatekeeper_event(&mut self, origin: MessageOrigin, event: GatekeeperEvent) {
info!("Incoming gatekeeper event: {:?}", event);
match event {
GatekeeperEvent::NewRandomNumber(random_number_event) => {
self.process_random_number_event(origin, random_number_event)
}
GatekeeperEvent::TokenomicParametersChanged(params) => {
if origin.is_pallet() {
self.state.tokenomic_params = params.into();
info!(
"Tokenomic parameter updated: {:#?}",
&self.state.tokenomic_params
);
}
}
GatekeeperEvent::RepairV => {
if origin.is_pallet() {
info!("Repairing V");
// Fixup the V for those workers that have been slashed due to the initial tokenomic parameters
// not being applied.
//
// See below links for more detail:
// https://github.com/Phala-Network/phala-blockchain/issues/489
// https://github.com/Phala-Network/phala-blockchain/issues/495
// https://forum.phala.network/t/topic/2753#timeline
// https://forum.phala.network/t/topic/2909
self.state.workers.values_mut().for_each(|w| {
if w.state.mining_state.is_some() && w.tokenomic.v < w.tokenomic.v_init {
w.tokenomic.v = w.tokenomic.v_init;
}
})
}
}
}
}
/// Verify on-chain random number
fn process_random_number_event(&mut self, origin: MessageOrigin, event: RandomNumberEvent) {
if !origin.is_gatekeeper() {
error!("Invalid origin {:?} sent a {:?}", origin, event);
return;
};
let expect_random = next_random_number(
&self.state.master_key,
event.block_number,
event.last_random_number,
);
// instead of checking the origin, we directly verify the random to avoid access storage
if expect_random != event.random_number {
error!("Fatal error: Expect random number {:?}", expect_random);
panic!("GK state poisoned");
}
}
}
struct WorkerSMTracker<'a> {
waiting_heartbeats: &'a mut VecDeque<chain::BlockNumber>,
}
impl super::WorkerStateMachineCallback for WorkerSMTracker<'_> {
fn heartbeat(
&mut self,
_session_id: u32,
challenge_block: runtime::BlockNumber,
_challenge_time: u64,
_iterations: u64,
) {
debug!("Worker should emit heartbeat for {}", challenge_block);
self.waiting_heartbeats.push_back(challenge_block);
}
}
mod tokenomic {
pub use fixed::types::U64F64 as FixedPoint;
use fixed_macro::types::U64F64 as fp;
use fixed_sqrt::FixedSqrt as _;
use phala_types::messaging::TokenomicParameters;
fn square(v: FixedPoint) -> FixedPoint {
v * v
}
fn conf_score(level: u8) -> FixedPoint {
match level {
1 | 2 | 3 | 128 => fp!(1),
4 => fp!(0.8),
5 => fp!(0.7),
_ => fp!(0),
}
}
#[derive(Default, Clone, Copy)]
pub struct TokenomicInfo {
pub v: FixedPoint,
pub v_init: FixedPoint,
pub payable: FixedPoint,
pub v_update_at: u64,
pub v_update_block: u32,
pub iteration_last: u64,
pub challenge_time_last: u64,
pub p_bench: FixedPoint,
pub p_instant: FixedPoint,
pub confidence_level: u8,
pub last_payout: FixedPoint,
pub last_payout_at_block: chain::BlockNumber,
pub total_payout: FixedPoint,
pub total_payout_count: chain::BlockNumber,
pub last_slash: FixedPoint,
pub last_slash_at_block: chain::BlockNumber,
pub total_slash: FixedPoint,
pub total_slash_count: chain::BlockNumber,
}
impl From<TokenomicInfo> for super::pb::TokenomicInfo {
fn from(info: TokenomicInfo) -> Self {
Self {
v: info.v.to_string(),
v_init: info.v_init.to_string(),
payable: info.payable.to_string(),
v_update_at: info.v_update_at,
v_update_block: info.v_update_block,
iteration_last: info.iteration_last,
challenge_time_last: info.challenge_time_last,
p_bench: info.p_bench.to_string(),
p_instant: info.p_instant.to_string(),
confidence_level: info.confidence_level as _,
last_payout: info.last_payout.to_string(),
last_payout_at_block: info.last_payout_at_block,
last_slash: info.last_slash.to_string(),
last_slash_at_block: info.last_slash_at_block,
total_payout: info.total_payout.to_string(),
total_payout_count: info.total_payout_count,
total_slash: info.total_slash.to_string(),
total_slash_count: info.total_slash_count,
}
}
}
#[derive(Debug)]
pub struct Params {
rho: FixedPoint,
slash_rate: FixedPoint,
budget_per_block: FixedPoint,
v_max: FixedPoint,
cost_k: FixedPoint,
cost_b: FixedPoint,
treasury_ration: FixedPoint,
payout_ration: FixedPoint,
pub heartbeat_window: u32,
}
impl From<TokenomicParameters> for Params {
fn from(params: TokenomicParameters) -> Self {
let treasury_ration = FixedPoint::from_bits(params.treasury_ratio);
let payout_ration = fp!(1) - treasury_ration;
Params {
rho: FixedPoint::from_bits(params.rho),
slash_rate: FixedPoint::from_bits(params.slash_rate),
budget_per_block: FixedPoint::from_bits(params.budget_per_block),
v_max: FixedPoint::from_bits(params.v_max),
cost_k: FixedPoint::from_bits(params.cost_k),
cost_b: FixedPoint::from_bits(params.cost_b),
treasury_ration,
payout_ration,
heartbeat_window: params.heartbeat_window,
}
}
}
pub fn test_params() -> Params {
Params {
rho: fp!(1.000000666600231),
slash_rate: fp!(0.0000033333333333333240063),
budget_per_block: fp!(100),
v_max: fp!(30000),
cost_k: fp!(0.000000015815258751856933056),
cost_b: fp!(0.000033711472602739674283),
treasury_ration: fp!(0.2),
payout_ration: fp!(0.8),
heartbeat_window: 10,
}
}
impl TokenomicInfo {
/// case1: Idle, no event
pub fn update_v_idle(&mut self, params: &Params) {
let cost_idle = params.cost_k * self.p_bench + params.cost_b;
let perf_multiplier = if self.p_bench == fp!(0) {
fp!(1)
} else {
self.p_instant / self.p_bench
};
let delta_v = perf_multiplier * ((params.rho - fp!(1)) * self.v + cost_idle);
let v = self.v + delta_v;
self.v = v.min(params.v_max);
self.payable += delta_v;
}
/// case2: Idle, successful heartbeat
/// return payout
pub fn update_v_heartbeat(
&mut self,
params: &Params,
sum_share: FixedPoint,
now_ms: u64,
block_number: u32,
) -> (FixedPoint, FixedPoint) {
const NO_UPDATE: (FixedPoint, FixedPoint) = (fp!(0), fp!(0));
if sum_share == fp!(0) {
return NO_UPDATE;
}
if self.payable == fp!(0) {
return NO_UPDATE;
}
if block_number <= self.v_update_block {
// May receive more than one heartbeat for a single worker in a single block.
return NO_UPDATE;
}
let share = self.share();
if share == fp!(0) {
return NO_UPDATE;
}
let blocks = FixedPoint::from_num(block_number - self.v_update_block);
let budget = share / sum_share * params.budget_per_block * blocks;
let to_payout = budget * params.payout_ration;
let to_treasury = budget * params.treasury_ration;
let actual_payout = self.payable.max(fp!(0)).min(to_payout); // w
let actual_treasury = (actual_payout / to_payout) * to_treasury; // to_payout > 0
self.v -= actual_payout;
self.payable = fp!(0);
self.v_update_at = now_ms;
self.v_update_block = block_number;
// stats
self.last_payout = actual_payout;
self.last_payout_at_block = block_number;
self.total_payout += actual_payout;
self.total_payout_count += 1;
(actual_payout, actual_treasury)
}
pub fn update_v_slash(&mut self, params: &Params, block_number: chain::BlockNumber) {
let slash = self.v * params.slash_rate;
self.v -= slash;
self.payable = fp!(0);
// stats
self.last_slash = slash;
self.last_slash_at_block = block_number;
self.total_slash += slash;
self.total_slash_count += 1;
}
pub fn share(&self) -> FixedPoint {
(square(self.v) + square(fp!(2) * self.p_instant * conf_score(self.confidence_level)))
.sqrt()
}
pub fn update_p_instant(&mut self, now: u64, iterations: u64) {
if now <= self.challenge_time_last {
return;
}
if iterations < self.iteration_last {
self.iteration_last = iterations;
}
let dt = FixedPoint::from_num(now - self.challenge_time_last) / 1000;
let p = FixedPoint::from_num(iterations - self.iteration_last) / dt * 6; // 6s iterations
self.p_instant = p.min(self.p_bench * fp!(1.2));
}
}
}
mod msg_trait {
use parity_scale_codec::Encode;
use phala_mq::{BindTopic, MessageSigner};
pub trait MessageChannel {
fn push_message<M: Encode + BindTopic>(&self, message: M);
fn set_dummy(&self, dummy: bool);
}
impl<T: MessageSigner> MessageChannel for phala_mq::MessageChannel<T> {
fn push_message<M: Encode + BindTopic>(&self, message: M) {
self.send(&message);
}
fn set_dummy(&self, dummy: bool) {
self.set_dummy(dummy);
}
}
}
#[cfg(test)]
pub mod tests {
use super::{msg_trait::MessageChannel, BlockInfo, FixedPoint, Gatekeeper};
use fixed_macro::types::U64F64 as fp;
use parity_scale_codec::{Decode, Encode};
use phala_mq::{BindTopic, Message, MessageDispatcher, MessageOrigin};
use phala_types::{messaging as msg, WorkerPublicKey};
use std::cell::RefCell;
type MiningInfoUpdateEvent = super::MiningInfoUpdateEvent<chain::BlockNumber>;
trait DispatcherExt {
fn dispatch_bound<M: Encode + BindTopic>(&mut self, sender: &MessageOrigin, msg: M);
}
impl DispatcherExt for MessageDispatcher {
fn dispatch_bound<M: Encode + BindTopic>(&mut self, sender: &MessageOrigin, msg: M) {
let _ = self.dispatch(mk_msg(sender, msg));
}
}
fn mk_msg<M: Encode + BindTopic>(sender: &MessageOrigin, msg: M) -> Message {
Message {
sender: sender.clone(),
destination: M::topic().into(),
payload: msg.encode(),
}
}
#[derive(Default)]
struct CollectChannel {
messages: RefCell<Vec<Message>>,
}
impl CollectChannel {
fn drain(&self) -> Vec<Message> {
self.messages.borrow_mut().drain(..).collect()
}
fn drain_decode<M: Decode + BindTopic>(&self) -> Vec<M> {
self.drain()
.into_iter()
.filter_map(|m| {
if &m.destination.path()[..] == &M::topic() {
Decode::decode(&mut &m.payload[..]).ok()
} else {
None
}
})
.collect()
}
fn drain_mining_info_update_event(&self) -> Vec<MiningInfoUpdateEvent> {
self.drain_decode()
}
fn clear(&self) {
self.messages.borrow_mut().clear();
}
}
impl MessageChannel for CollectChannel {
fn push_message<M: Encode + BindTopic>(&self, message: M) {
let message = Message {
sender: MessageOrigin::Gatekeeper,
destination: M::topic().into(),
payload: message.encode(),
};
self.messages.borrow_mut().push(message);
}
fn set_dummy(&self, _dummy: bool) {}
}
struct Roles {
mq: MessageDispatcher,
gk: Gatekeeper<CollectChannel>,
workers: [WorkerPublicKey; 2],
}
impl Roles {
fn test_roles() -> Roles {
use sp_core::crypto::Pair;
let mut mq = MessageDispatcher::new();
let egress = CollectChannel::default();
let key = sp_core::sr25519::Pair::from_seed(&[1u8; 32]);
let mut gk = Gatekeeper::new(key, &mut mq, egress);
gk.master_pubkey_on_chain = true;
Roles {
mq,
gk,
workers: [
WorkerPublicKey::from_raw([0x01u8; 32]),
WorkerPublicKey::from_raw([0x02u8; 32]),
],
}
}
fn for_worker(&mut self, n: usize) -> ForWorker {
ForWorker {
mq: &mut self.mq,
pubkey: &self.workers[n],
}
}
fn get_worker(&self, n: usize) -> &super::WorkerInfo {
&self.gk.workers[&self.workers[n]]
}
fn get_worker_mut(&mut self, n: usize) -> &mut super::WorkerInfo {
self.gk.workers.get_mut(&self.workers[n]).unwrap()
}
}
struct ForWorker<'a> {
mq: &'a mut MessageDispatcher,
pubkey: &'a WorkerPublicKey,
}
impl ForWorker<'_> {
fn pallet_say(&mut self, event: msg::WorkerEvent) {
let sender = MessageOrigin::Pallet(b"Pallet".to_vec());
let message = msg::SystemEvent::new_worker_event(self.pubkey.clone(), event);
self.mq.dispatch_bound(&sender, message);
}
fn say<M: Encode + BindTopic>(&mut self, message: M) {
let sender = MessageOrigin::Worker(self.pubkey.clone());
self.mq.dispatch_bound(&sender, message);
}
fn challenge(&mut self) {
use sp_core::U256;
let sender = MessageOrigin::Pallet(b"Pallet".to_vec());
// Use the same hash algrithm as the worker to produce the seed, so that only this worker will
// respond to the challenge
let pkh = sp_core::blake2_256(self.pubkey.as_ref());
let hashed_id: U256 = pkh.into();
let challenge = msg::HeartbeatChallenge {
seed: hashed_id,
online_target: U256::zero(),
};
let message = msg::SystemEvent::HeartbeatChallenge(challenge);
self.mq.dispatch_bound(&sender, message);
}
fn heartbeat(&mut self, session_id: u32, block: chain::BlockNumber, iterations: u64) {
let message = msg::MiningReportEvent::Heartbeat {
session_id,
challenge_block: block,
challenge_time: block_ts(block),
iterations,
};
self.say(message)
}
}
fn with_block(block_number: chain::BlockNumber, call: impl FnOnce(&BlockInfo)) {
// GK never use the storage ATM.
let storage = crate::Storage::default();
let mut mq = phala_mq::MessageDispatcher::new();
let block = BlockInfo {
block_number,
now_ms: block_ts(block_number),
storage: &storage,
recv_mq: &mut mq,
side_task_man: &mut Default::default(),
};
call(&block);
}
fn block_ts(block_number: chain::BlockNumber) -> u64 {
block_number as u64 * 12000
}
#[test]
fn gk_should_be_able_to_observe_worker_states() {
let mut r = Roles::test_roles();
with_block(1, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
assert_eq!(r.gk.workers.len(), 1);
assert!(r.get_worker(0).state.registered);
with_block(2, |block| {
let mut worker1 = r.for_worker(1);
worker1.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: 1,
init_p: 100,
});
r.gk.process_messages(block);
});
assert_eq!(
r.gk.workers.len(),
1,
"Unregistered worker should not start mining."
);
}
#[test]
fn gk_should_not_miss_any_heartbeats_cross_session() {
let mut r = Roles::test_roles();
with_block(1, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
assert_eq!(r.gk.workers.len(), 1);
assert!(r.get_worker(0).state.registered);
with_block(2, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: 1,
init_p: 100,
});
worker0.challenge();
r.gk.process_messages(block);
});
// Stop mining before the heartbeat response.
with_block(3, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStop);
r.gk.process_messages(block);
});
with_block(4, |block| {
r.gk.process_messages(block);
});
with_block(5, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 2,
init_v: 1,
init_p: 100,
});
worker0.challenge();
r.gk.process_messages(block);
});
// Force enter unresponsive
with_block(100, |block| {
r.gk.process_messages(block);
});
assert_eq!(
r.get_worker(0).waiting_heartbeats.len(),
2,
"There should be 2 waiting HBs"
);
assert!(
r.get_worker(0).unresponsive,
"The worker should be unresponsive now"
);
with_block(101, |block| {
let mut worker = r.for_worker(0);
// Response the first challenge.
worker.heartbeat(1, 2, 10000000);
r.gk.process_messages(block);
});
assert_eq!(
r.get_worker(0).waiting_heartbeats.len(),
1,
"There should be only one waiting HBs"
);
assert!(
r.get_worker(0).unresponsive,
"The worker should still be unresponsive now"
);
with_block(102, |block| {
let mut worker = r.for_worker(0);
// Response the second challenge.
worker.heartbeat(2, 5, 10000000);
r.gk.process_messages(block);
});
assert!(
!r.get_worker(0).unresponsive,
"The worker should be mining idle now"
);
}
#[test]
fn gk_should_reward_normal_workers_do_not_hit_the_seed_case1() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(1).to_bits(),
init_p: 100,
});
r.gk.process_messages(block);
});
block_number += 1;
// Normal Idle state, no event
let v_snap = r.get_worker(0).tokenomic.v;
r.gk.egress.clear();
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert!(!r.get_worker(0).unresponsive, "Worker should be online");
assert_eq!(
r.gk.egress.drain_mining_info_update_event().len(),
0,
"Should not report any event"
);
assert!(
v_snap < r.get_worker(0).tokenomic.v,
"Worker should be rewarded"
);
// Once again.
let v_snap = r.get_worker(0).tokenomic.v;
r.gk.egress.clear();
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert!(!r.get_worker(0).unresponsive, "Worker should be online");
assert_eq!(
r.gk.egress.drain_mining_info_update_event().len(),
0,
"Should not report any event"
);
assert!(
v_snap < r.get_worker(0).tokenomic.v,
"Worker should be rewarded"
);
}
#[test]
fn gk_should_report_payout_for_normal_heartbeats_case2() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(1).to_bits(),
init_p: 100,
});
worker0.challenge();
r.gk.process_messages(block);
});
let challenge_block = block_number;
block_number += r.gk.tokenomic_params.heartbeat_window;
// About to timeout then A heartbeat received, report payout event.
let v_snap = r.get_worker(0).tokenomic.v;
r.gk.egress.clear();
with_block(block_number, |block| {
let mut worker = r.for_worker(0);
worker.heartbeat(1, challenge_block, 10000000);
r.gk.process_messages(block);
});
assert!(!r.get_worker(0).unresponsive, "Worker should be online");
assert!(
v_snap > r.get_worker(0).tokenomic.v,
"Worker should be paid out"
);
{
let messages = r.gk.egress.drain_mining_info_update_event();
assert_eq!(messages.len(), 1);
assert_eq!(messages[0].offline.len(), 0);
assert_eq!(messages[0].recovered_to_online.len(), 0);
assert_eq!(messages[0].settle.len(), 1);
}
}
#[test]
fn gk_should_slash_and_report_offline_workers_case3() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(1).to_bits(),
init_p: 100,
});
worker0.challenge();
r.gk.process_messages(block);
});
assert!(r.get_worker(0).state.mining_state.is_some());
block_number += r.gk.tokenomic_params.heartbeat_window;
// About to timeout
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert!(!r.get_worker(0).unresponsive);
let v_snap = r.get_worker(0).tokenomic.v;
block_number += 1;
// Heartbeat timed out
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert!(r.get_worker(0).unresponsive);
{
let offline = [r.workers[0].clone()].to_vec();
let expected_message = MiningInfoUpdateEvent {
block_number,
timestamp_ms: block_ts(block_number),
offline,
recovered_to_online: Vec::new(),
settle: Vec::new(),
};
let messages = r.gk.egress.drain_mining_info_update_event();
assert_eq!(messages.len(), 1);
assert_eq!(messages[0], expected_message);
}
assert!(
v_snap > r.get_worker(0).tokenomic.v,
"Worker should be slashed"
);
r.gk.egress.clear();
let v_snap = r.get_worker(0).tokenomic.v;
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert_eq!(
r.gk.egress.drain_mining_info_update_event().len(),
0,
"Should not report offline workers"
);
assert!(
v_snap > r.get_worker(0).tokenomic.v,
"Worker should be slashed again"
);
}
#[test]
fn gk_should_slash_offline_workers_sliently_case4() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(1).to_bits(),
init_p: 100,
});
worker0.challenge();
r.gk.process_messages(block);
});
block_number += r.gk.tokenomic_params.heartbeat_window;
// About to timeout
with_block(block_number, |block| {
r.gk.process_messages(block);
});
block_number += 1;
// Heartbeat timed out
with_block(block_number, |block| {
r.gk.process_messages(block);
});
r.gk.egress.clear();
// Worker already offline, don't report again until one more heartbeat received.
let v_snap = r.get_worker(0).tokenomic.v;
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert_eq!(
r.gk.egress.drain_mining_info_update_event().len(),
0,
"Should not report offline workers"
);
assert!(
v_snap > r.get_worker(0).tokenomic.v,
"Worker should be slashed"
);
let v_snap = r.get_worker(0).tokenomic.v;
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert_eq!(
r.gk.egress.drain_mining_info_update_event().len(),
0,
"Should not report offline workers"
);
assert!(
v_snap > r.get_worker(0).tokenomic.v,
"Worker should be slashed again"
);
}
#[test]
fn gk_should_report_recovered_workers_case5() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(1).to_bits(),
init_p: 100,
});
worker0.challenge();
r.gk.process_messages(block);
});
let challenge_block = block_number;
block_number += r.gk.tokenomic_params.heartbeat_window;
// About to timeout
with_block(block_number, |block| {
r.gk.process_messages(block);
});
block_number += 1;
// Heartbeat timed out
with_block(block_number, |block| {
r.gk.process_messages(block);
});
r.gk.egress.clear();
// Worker offline, report recover event on the next heartbeat received.
let v_snap = r.get_worker(0).tokenomic.v;
block_number += 1;
with_block(block_number, |block| {
let mut worker = r.for_worker(0);
worker.heartbeat(1, challenge_block, 10000000);
r.gk.process_messages(block);
});
assert_eq!(
v_snap,
r.get_worker(0).tokenomic.v,
"Worker should not be slashed or rewarded"
);
{
let recovered_to_online = [r.workers[0].clone()].to_vec();
let expected_message = MiningInfoUpdateEvent {
block_number,
timestamp_ms: block_ts(block_number),
offline: Vec::new(),
recovered_to_online,
settle: Vec::new(),
};
let messages = r.gk.egress.drain_mining_info_update_event();
assert_eq!(messages.len(), 1, "Should report recover event");
assert_eq!(messages[0], expected_message);
}
}
#[test]
fn check_tokenomic_numerics() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::BenchScore(3000));
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(3000).to_bits(),
init_p: 100,
});
r.gk.process_messages(block);
});
assert!(r.get_worker(0).state.mining_state.is_some());
assert_eq!(r.get_worker(0).tokenomic.p_bench, fp!(100));
assert_eq!(r.get_worker(0).tokenomic.v, fp!(3000.00203509369147797934));
// V increment for one day
for _ in 0..3600 * 24 / 12 {
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
}
assert_eq!(r.get_worker(0).tokenomic.v, fp!(3014.6899337932040476463));
// Payout
block_number += 1;
r.for_worker(0).challenge();
with_block(block_number, |block| {
r.gk.process_messages(block);
});
// Check heartbeat updates
assert_eq!(r.get_worker(0).tokenomic.challenge_time_last, 24000);
assert_eq!(r.get_worker(0).tokenomic.iteration_last, 0);
r.for_worker(0)
.heartbeat(1, block_number, (110 * 7200 * 12 / 6) as u64);
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
assert_eq!(r.get_worker(0).tokenomic.v, fp!(3000));
assert_eq!(
r.get_worker(0).tokenomic.p_instant,
fp!(109.96945292974173840575)
);
// Payout settlement has correct treasury split
let report = r.gk.egress.drain_mining_info_update_event();
assert_eq!(
FixedPoint::from_bits(report[0].settle[0].payout),
fp!(14.69197867920878555043)
);
assert_eq!(
FixedPoint::from_bits(report[0].settle[0].treasury),
fp!(3.6729946698021946595)
);
// Slash 0.1% (1hr + 10 blocks challenge window)
let _ = r.gk.egress.drain_mining_info_update_event();
r.for_worker(0).challenge();
for _ in 0..=3600 / 12 + 10 {
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
}
assert!(r.get_worker(0).unresponsive);
let report = r.gk.egress.drain_mining_info_update_event();
assert_eq!(report[0].offline, vec![r.workers[0].clone()]);
assert_eq!(r.get_worker(0).tokenomic.v, fp!(2997.0260877851113935014));
// TODO(hangyin): also check miner reconnection and V recovery
}
#[test]
fn should_payout_at_v_max() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
let mut worker0 = r.for_worker(0);
worker0.pallet_say(msg::WorkerEvent::BenchScore(3000));
worker0.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(30000).to_bits(),
init_p: 3000,
});
r.gk.process_messages(block);
});
// Mine for 24h
for _ in 0..7200 {
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
}
// Trigger payout
block_number += 1;
with_block(block_number, |block| {
r.for_worker(0).challenge();
r.gk.process_messages(block);
});
r.for_worker(0).heartbeat(1, block_number, 1000000 as u64);
block_number += 1;
with_block(block_number, |block| {
r.gk.process_messages(block);
});
// Check payout
assert_eq!(r.get_worker(0).tokenomic.v, fp!(29855.38985958385856094607));
assert_eq!(r.get_worker(0).tokenomic.payable, fp!(0));
let report = r.gk.egress.drain_mining_info_update_event();
assert_eq!(
FixedPoint::from_bits(report[0].settle[0].payout),
fp!(144.61014041614143905393)
);
}
#[test]
fn test_update_p_instant() {
let mut info = super::TokenomicInfo {
p_bench: fp!(100),
..Default::default()
};
// Normal
info.update_p_instant(100_000, 1000);
info.challenge_time_last = 90_000;
info.iteration_last = 1000;
assert_eq!(info.p_instant, fp!(60));
// Reset
info.update_p_instant(200_000, 999);
assert_eq!(info.p_instant, fp!(0));
}
#[test]
fn test_repair_v() {
let mut r = Roles::test_roles();
let mut block_number = 1;
// Register worker
with_block(block_number, |block| {
for i in 0..=1 {
let mut worker = r.for_worker(i);
worker.pallet_say(msg::WorkerEvent::Registered(msg::WorkerInfo {
confidence_level: 2,
}));
}
r.gk.process_messages(block);
});
// Start mining & send heartbeat challenge
block_number += 1;
with_block(block_number, |block| {
for i in 0..=1 {
let mut worker = r.for_worker(i);
worker.pallet_say(msg::WorkerEvent::BenchScore(3000));
worker.pallet_say(msg::WorkerEvent::MiningStart {
session_id: 1,
init_v: fp!(30000).to_bits(),
init_p: 3000,
});
}
r.gk.process_messages(block);
});
for i in 0..=1 {
let worker = r.get_worker_mut(i);
worker.tokenomic.v = fp!(100);
worker.tokenomic.v_init = fp!(200);
assert!(worker.tokenomic.v < worker.tokenomic.v_init);
}
assert_eq!(r.get_worker(0).tokenomic.v, fp!(100));
assert_eq!(r.get_worker(1).tokenomic.v, fp!(100));
block_number += 1;
with_block(block_number, |block| {
let sender = MessageOrigin::Pallet(b"Pallet".to_vec());
r.mq.dispatch_bound(&sender, msg::GatekeeperEvent::RepairV);
r.gk.process_messages(block);
});
// Should repaired and rewarded
assert_eq!(r.get_worker(0).tokenomic.v, fp!(200.00021447729505831407));
assert_eq!(r.get_worker(1).tokenomic.v, fp!(200.00021447729505831407));
}
}
| 35.314351 | 146 | 0.531994 |
e94c568a2d06bbea236296e804c173d9aea15061 | 8,369 | extern crate gnuplot;
extern crate smoomars;
use smoomars::utils::*;
use smoomars::{StewartPotentialGrid, stewart, SmoothType, idw_interpolation, Bbox,
rbf_interpolation, stewart_interpolation};
use gnuplot::*;
fn example(c: Common) {
let obs_points = parse_geojson_points("/home/mz/Bureau/input_ra.geojson", "value").unwrap();
let bbox = Bbox::new(0.8, 4.2, 31.8, 35.2);
let (reso_lon, reso_lat) = (80, 80);
let conf1 = StewartPotentialGrid::new(30000.0,
3.0,
SmoothType::Exponential,
&bbox,
reso_lat,
reso_lon,
2);
let res_stew: Vec<SphericalPtValue> = stewart(&conf1, &obs_points).unwrap();
let mut z1 = Vec::with_capacity(res_stew.len());
for pt in res_stew {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Population potentials", &[])
.surface(z1.iter(),
reso_lon as usize,
reso_lat as usize,
Some((0.8, 31.8, 4.2, 35.2)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
// let conf2 = StewartPotentialGrid::new(30000.0, 3.0, SmoothType::Pareto, &bbox, reso_lat, reso_lon, true);
// let res_stew_pareto: Vec<SphericalPtValue> = stewart(&conf2, &obs_points).unwrap();
let res_stew_pareto: Vec<SphericalPtValue> = stewart_interpolation(reso_lon,
reso_lat,
&bbox,
&obs_points,
"pareto",
30000.0,
Some(3.0))
.unwrap();
let mut z1 = Vec::with_capacity(res_stew_pareto.len());
for pt in res_stew_pareto {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Population potentials", &[])
.surface(z1.iter(),
reso_lon as usize,
reso_lat as usize,
Some((0.8, 31.8, 4.2, 35.2)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
let result_idw: Vec<SphericalPtValue> =
idw_interpolation(reso_lon, reso_lat, &bbox, &obs_points, 2.0).unwrap();
let mut z1 = Vec::with_capacity(result_idw.len());
for pt in result_idw {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Idw (power 2)", &[])
.surface(z1.iter(),
reso_lon as usize,
reso_lat as usize,
Some((0.8, 31.8, 4.2, 35.2)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
let result_idw: Vec<SphericalPtValue> =
idw_interpolation(reso_lon, reso_lat, &bbox, &obs_points, 0.1).unwrap();
let mut z1 = Vec::with_capacity(result_idw.len());
for pt in result_idw {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Idw (power 0.1)", &[])
.surface(z1.iter(),
reso_lon as usize,
reso_lat as usize,
Some((0.8, 31.8, 4.2, 35.2)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
let obs_points_two_stocks = parse_csv_points::<CartesianPtValue>("examples/two_stocks.csv")
.unwrap();
let bbox = Bbox::new(0.0, 10.0, 0.0, 10.0);
let (reso_x, reso_y) = (100, 100);
let conf1 = StewartPotentialGrid::new(2.5,
2.0,
SmoothType::Exponential,
&bbox,
reso_x,
reso_y,
1);
let res_stew: Vec<CartesianPtValue> = stewart(&conf1, &obs_points_two_stocks).unwrap();
let mut z1 = Vec::with_capacity(res_stew.len());
for pt in res_stew {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Two stocks. Stewart Exponential (beta=2)", &[])
.surface(z1.iter(),
reso_x as usize,
reso_y as usize,
Some((0.0, 0.0, 10.0, 10.0)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
let conf1 =
StewartPotentialGrid::new(2.5, 2.0, SmoothType::Pareto, &bbox, reso_x, reso_y, 1);
let res_stew: Vec<CartesianPtValue> = stewart(&conf1, &obs_points_two_stocks).unwrap();
let mut z1 = Vec::with_capacity(res_stew.len());
for pt in res_stew {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Two stocks. Stewart Pareto (beta=2)", &[])
.surface(z1.iter(),
reso_x as usize,
reso_y as usize,
Some((0.0, 0.0, 10.0, 10.0)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
let (reso_x, reso_y) = (40, 40);
let res_rbf: Vec<CartesianPtValue> = rbf_interpolation(reso_x,
reso_y,
&bbox,
&obs_points_two_stocks,
"inverse_multiquadratic",
Some(1.66))
.unwrap();
let mut z1 = Vec::with_capacity(res_rbf.len());
for pt in res_rbf {
z1.push(pt.get_value());
}
let mut fg = Figure::new();
c.set_term(&mut fg);
fg.axes3d()
.set_title("Two stocks. Inverse multiquadratic RBF interpolation (epsilon: 1.66).",
&[])
.surface(z1.iter(),
reso_x as usize,
reso_y as usize,
Some((0.0, 0.0, 10.0, 10.0)),
&[])
.set_x_label("X", &[])
.set_y_label("Y", &[])
.set_z_label("Z", &[])
.set_z_range(Auto, Auto)
.set_palette(HELIX)
.set_view(45.0, 175.0);
c.show(&mut fg, None);
}
pub struct Common {
pub no_show: bool,
pub term: Option<String>,
}
impl Common {
pub fn new() -> Option<Common> {
Some(Common {
no_show: false,
term: None,
})
}
pub fn show(&self, fg: &mut Figure, filename: Option<&str>) {
if !self.no_show {
fg.show();
}
if filename.is_some() {
fg.echo_to_file(filename.unwrap());
}
}
pub fn set_term(&self, fg: &mut Figure) {
self.term.as_ref().map(|t| { fg.set_terminal(&t[..], ""); });
}
}
fn main() {
Common::new().map(|c| example(c));
}
| 34.020325 | 112 | 0.454535 |
e2bd67574c34adb72da740c89d47a925afe6abbf | 5,261 | // Generated from definition io.k8s.api.rbac.v1beta1.Subject
/// Subject contains a reference to the object or user identities a role binding applies to. This can either hold a direct API object reference, or a value for non-objects such as user and group names.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct Subject {
/// APIGroup holds the API group of the referenced subject. Defaults to "" for ServiceAccount subjects. Defaults to "rbac.authorization.k8s.io" for User and Group subjects.
pub api_group: Option<String>,
/// Kind of object being referenced. Values defined by this API group are "User", "Group", and "ServiceAccount". If the Authorizer does not recognized the kind value, the Authorizer should report an error.
pub kind: String,
/// Name of the object being referenced.
pub name: String,
/// Namespace of the referenced object. If the object kind is non-namespace, such as "User" or "Group", and this value is not empty the Authorizer should report an error.
pub namespace: Option<String>,
}
impl<'de> serde::Deserialize<'de> for Subject {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_group,
Key_kind,
Key_name,
Key_namespace,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiGroup" => Field::Key_api_group,
"kind" => Field::Key_kind,
"name" => Field::Key_name,
"namespace" => Field::Key_namespace,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Subject;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("Subject")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_api_group: Option<String> = None;
let mut value_kind: Option<String> = None;
let mut value_name: Option<String> = None;
let mut value_namespace: Option<String> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_group => value_api_group = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_kind => value_kind = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_name => value_name = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_namespace => value_namespace = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(Subject {
api_group: value_api_group,
kind: value_kind.ok_or_else(|| serde::de::Error::missing_field("kind"))?,
name: value_name.ok_or_else(|| serde::de::Error::missing_field("name"))?,
namespace: value_namespace,
})
}
}
deserializer.deserialize_struct(
"Subject",
&[
"apiGroup",
"kind",
"name",
"namespace",
],
Visitor,
)
}
}
impl serde::Serialize for Subject {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"Subject",
2 +
self.api_group.as_ref().map_or(0, |_| 1) +
self.namespace.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.api_group {
serde::ser::SerializeStruct::serialize_field(&mut state, "apiGroup", value)?;
}
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", &self.kind)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "name", &self.name)?;
if let Some(value) = &self.namespace {
serde::ser::SerializeStruct::serialize_field(&mut state, "namespace", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 43.122951 | 209 | 0.540582 |
4b486a8ae6d8ad0c7607efa50ccd2da14debded7 | 1,970 | use crate::eval::Expr;
use gc::{Finalize, Gc, Trace};
use std::path::PathBuf;
/// A parent Expr's scope is used to provide tooling for its child Exprs.
/// This enum would provide four scope types:
/// - None: Used for calculated literals. For example, for the string
/// interpolation `"prefix${suffix}"`, the literal `prefix` by itself
/// cannot be referenced anywhere except at its definition, so we don't
/// need context-aware tooling for it. For `${suffix}`, however, we would
/// inherit the parent's scope.
/// - Root: Provided for each file. Used for providing global variables
/// and tracking the `import` dependency tree. Also used for detecting
/// which file an expression is defined in.
/// - Normal: Created by `let in` and `rec { }`. All the variable names
/// can be derived using static analysis with rnix-parser; we don't need
/// to evaluate anything to detect if a variable name is in this scope
/// - With: Created by `with $VAR` expressions. We need to evaluate $VAR
/// to determine whether a variable name is in scope.
#[derive(Trace, Finalize)]
pub enum Scope {
Root(PathBuf),
}
impl Scope {
/// Finds the Expr of an identifier in the scope.
///
/// This would do two passes up the tree:
/// 1. Check Scope::Normal and Scope::Root
/// 2. Check Scope::With, which requires evaluation
///
/// See https://github.com/NixOS/nix/issues/490 for an explanation
/// of why Nix works this way.
///
/// Examples:
/// ```plain
/// nix-repl> let import = 1; in import
/// 1 # found in Scope::Normal, which we reach before Scope::Root
/// nix-repl> import
/// «primop» # found in Scope::Root
/// nix-repl> with { import = 1; }; import
/// «primop» # found in Scope::Root, which we reach before Scope::With
/// ```
#[allow(dead_code)] // this function will be implemented later
pub fn get(&self, _name: &str) -> Option<Gc<Expr>> {
None
}
}
| 40.204082 | 75 | 0.651269 |
14d3089b1b269b2ea7cfe65954820e7ff4d1829d | 4,002 | #[derive(Debug, Clone, Eq, PartialEq)]
pub enum Literal {
/// Number like `10`, `'0'`
Number(i32),
/// Char like `'0'`
Char(u8),
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Variable {
/// Ident
Ident(String),
/// Ident set like `(a|b)`
Set(Vec<Variable>),
/// Attribute access like `a.b`
Attribute { target: Box<Variable>, attr: String },
/// Dereference like `*a`
Deref(Box<Variable>),
/// Index access like `a[1]`
Index { target: Box<Variable>, index: i32 },
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Term {
Literal(Literal),
Variable(Variable),
}
impl Literal {
pub fn term(self) -> Term {
Term::Literal(self)
}
}
impl Variable {
pub fn ident<S: Into<String>>(name: S) -> Variable {
Variable::Ident(name.into())
}
pub fn attr<S: Into<String>>(self, attr: S) -> Variable {
Variable::Attribute {
target: Box::new(self),
attr: attr.into(),
}
}
pub fn dereference(self) -> Variable {
Variable::Deref(Box::new(self))
}
pub fn index(self, index: i32) -> Variable {
Variable::Index {
target: Box::new(self),
index,
}
}
pub fn term(self) -> Term {
Term::Variable(self)
}
pub fn expr(self) -> Expr {
Expr::Variable(self)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Expr {
/// Variable reference
Variable(Variable),
/// Region like `{a += 1; b}`
Region { body: Vec<Stmt>, ret: Box<Expr> },
}
impl Expr {
pub fn stmt(self) -> Stmt {
Stmt::Expr(self)
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Stmt {
/// Expression
Expr(Expr),
/// Call function like `f(a, b)`
Call { name: String, args: Vec<Variable> },
/// Assign add like `a += b;`
AssignAdd {
target: Variable,
value: Term,
factor: i32,
},
/// Assign sub like `a -= b;`
AssignSub {
target: Variable,
value: Term,
factor: i32,
},
/// While statement like `while a { a += 1 }`
While { condition: Expr, body: Vec<Stmt> },
/// Bra-ket like `bra a { b += 1; c } ket (a|c);`
Braket {
bra: Variable,
body: Vec<Stmt>,
ret: Expr,
ket: Variable,
},
/// Move like `move {a -> b; c -> d;}`
Move(Vec<(Variable, Variable)>),
}
impl Stmt {
pub fn call<S: Into<String>>(name: S, args: Vec<Variable>) -> Stmt {
Stmt::Call {
name: name.into(),
args,
}
}
}
pub enum Def {
/// Function definition
FuncDef(FuncDef),
/// Struct definition
StructDef(StructDef),
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct FuncDef {
pub name: String,
pub args: Vec<TypedDeclaration>,
pub body: Vec<Stmt>,
}
impl FuncDef {
pub fn new<S: Into<String>>(name: S, args: Vec<TypedDeclaration>, body: Vec<Stmt>) -> FuncDef {
FuncDef {
name: name.into(),
args,
body,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct StructDef {
pub name: String,
pub fields: Vec<TypedDeclaration>,
}
impl StructDef {
pub fn new<S: Into<String>>(name: S, fields: Vec<TypedDeclaration>) -> StructDef {
StructDef {
name: name.into(),
fields,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct TypedDeclaration {
pub name: String,
pub typ: Type,
}
impl TypedDeclaration {
pub fn new<S: Into<String>>(name: S, typ: Type) -> TypedDeclaration {
TypedDeclaration {
name: name.into(),
typ,
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum Type {
Value(String),
Array(String),
}
impl Type {
pub fn value<S: Into<String>>(name: S) -> Type {
Type::Value(name.into())
}
pub fn array<S: Into<String>>(name: S) -> Type {
Type::Array(name.into())
}
}
| 21.063158 | 99 | 0.533983 |
bb832533cb4691e47d71205f28c3efeaa9c513dd | 5,057 | use core::cell::UnsafeCell;
use core::future::Future;
use core::mem;
use core::ptr;
use core::task::{Context, Poll, Waker};
use cortex_m::peripheral::NVIC;
use cortex_m::peripheral::{scb, SCB};
use executor::raw::TaskHeader;
use ptr::NonNull;
use crate::executor;
use crate::interrupt::{Interrupt, InterruptExt};
/// Synchronization primitive. Allows creating awaitable signals that may be passed between tasks.
///
/// For more advanced use cases, please consider [futures-intrusive](https://crates.io/crates/futures-intrusive) channels or mutexes.
pub struct Signal<T> {
state: UnsafeCell<State<T>>,
}
enum State<T> {
None,
Waiting(Waker),
Signaled(T),
}
unsafe impl<T: Send> Send for Signal<T> {}
unsafe impl<T: Send> Sync for Signal<T> {}
impl<T: Send> Signal<T> {
pub const fn new() -> Self {
Self {
state: UnsafeCell::new(State::None),
}
}
/// Mark this Signal as completed.
pub fn signal(&self, val: T) {
critical_section::with(|_| unsafe {
let state = &mut *self.state.get();
if let State::Waiting(waker) = mem::replace(state, State::Signaled(val)) {
waker.wake();
}
})
}
pub fn reset(&self) {
critical_section::with(|_| unsafe {
let state = &mut *self.state.get();
*state = State::None
})
}
pub fn poll_wait(&self, cx: &mut Context<'_>) -> Poll<T> {
critical_section::with(|_| unsafe {
let state = &mut *self.state.get();
match state {
State::None => {
*state = State::Waiting(cx.waker().clone());
Poll::Pending
}
State::Waiting(w) if w.will_wake(cx.waker()) => Poll::Pending,
State::Waiting(_) => panic!("waker overflow"),
State::Signaled(_) => match mem::replace(state, State::None) {
State::Signaled(res) => Poll::Ready(res),
_ => unreachable!(),
},
}
})
}
/// Future that completes when this Signal has been signaled.
pub fn wait(&self) -> impl Future<Output = T> + '_ {
futures::future::poll_fn(move |cx| self.poll_wait(cx))
}
/// non-blocking method to check whether this signal has been signaled.
pub fn signaled(&self) -> bool {
critical_section::with(|_| matches!(unsafe { &*self.state.get() }, State::Signaled(_)))
}
}
// ==========
pub fn wake_on_interrupt(interrupt: &mut impl Interrupt, waker: &Waker) {
interrupt.disable();
interrupt.set_handler(irq_wake_handler);
interrupt.set_handler_context(unsafe { executor::raw::task_from_waker(waker) }.as_ptr() as _);
interrupt.enable();
}
unsafe fn irq_wake_handler(ctx: *mut ()) {
if let Some(task) = NonNull::new(ctx as *mut TaskHeader) {
executor::raw::wake_task(task);
}
let irq = match SCB::vect_active() {
scb::VectActive::Interrupt { irqn } => irqn,
_ => unreachable!(),
};
NVIC::mask(crate::interrupt::NrWrap(irq as u16));
}
// ==========
struct NrWrap(u8);
unsafe impl cortex_m::interrupt::Nr for NrWrap {
fn nr(&self) -> u8 {
self.0
}
}
/// Creates a future that completes when the specified Interrupt is triggered.
///
/// The input handler is unregistered when this Future is dropped.
///
/// Example:
/// ``` no_compile
/// use embassy::traits::*;
/// use embassy::util::InterruptFuture;
/// use embassy_stm32::interrupt; // Adjust this to your MCU's embassy HAL.
/// #[embassy::task]
/// async fn demo_interrupt_future() {
/// // Using STM32f446 interrupt names, adjust this to your application as necessary.
/// // Wait for TIM2 to tick.
/// let mut tim2_interrupt = interrupt::take!(TIM2);
/// InterruptFuture::new(&mut tim2_interrupt).await;
/// // TIM2 interrupt went off, do something...
/// }
/// ```
pub struct InterruptFuture<'a, I: Interrupt> {
interrupt: &'a mut I,
}
impl<'a, I: Interrupt> Drop for InterruptFuture<'a, I> {
fn drop(&mut self) {
self.interrupt.disable();
self.interrupt.remove_handler();
}
}
impl<'a, I: Interrupt> InterruptFuture<'a, I> {
pub fn new(interrupt: &'a mut I) -> Self {
interrupt.disable();
interrupt.set_handler(irq_wake_handler);
interrupt.set_handler_context(ptr::null_mut());
interrupt.unpend();
interrupt.enable();
Self { interrupt }
}
}
impl<'a, I: Interrupt> Unpin for InterruptFuture<'a, I> {}
impl<'a, I: Interrupt> Future for InterruptFuture<'a, I> {
type Output = ();
fn poll(self: core::pin::Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
let s = unsafe { self.get_unchecked_mut() };
s.interrupt.set_handler_context(unsafe {
executor::raw::task_from_waker(&cx.waker()).cast().as_ptr()
});
if s.interrupt.is_enabled() {
Poll::Pending
} else {
Poll::Ready(())
}
}
}
| 29.401163 | 133 | 0.584932 |
2244b17b822e70bf41b84b36ac430db74b5ec64a | 4,854 | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
use ::test::run_test;
#[test]
fn vcvtps2qq_1() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM6)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 125, 138, 123, 238], OperandSize::Dword)
}
#[test]
fn vcvtps2qq_2() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(XMM0)), operand2: Some(Indirect(EBX, Some(OperandSize::Qword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 125, 139, 123, 3], OperandSize::Dword)
}
#[test]
fn vcvtps2qq_3() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(XMM19)), operand2: Some(Direct(XMM31)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 129, 125, 138, 123, 223], OperandSize::Qword)
}
#[test]
fn vcvtps2qq_4() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(XMM13)), operand2: Some(IndirectScaledIndexed(RDX, RSI, Two, Some(OperandSize::Qword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 113, 125, 139, 123, 44, 114], OperandSize::Qword)
}
#[test]
fn vcvtps2qq_5() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(YMM7)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 125, 174, 123, 250], OperandSize::Dword)
}
#[test]
fn vcvtps2qq_6() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(YMM2)), operand2: Some(IndirectScaledIndexed(EDI, ESI, Two, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 125, 170, 123, 20, 119], OperandSize::Dword)
}
#[test]
fn vcvtps2qq_7() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(YMM6)), operand2: Some(Direct(XMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 125, 171, 123, 245], OperandSize::Qword)
}
#[test]
fn vcvtps2qq_8() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(YMM31)), operand2: Some(Indirect(RCX, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 97, 125, 172, 123, 57], OperandSize::Qword)
}
#[test]
fn vcvtps2qq_9() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(ZMM7)), operand2: Some(Direct(YMM2)), operand3: None, operand4: None, lock: false, rounding_mode: Some(RoundingMode::Zero), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 125, 251, 123, 250], OperandSize::Dword)
}
#[test]
fn vcvtps2qq_10() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(ZMM6)), operand2: Some(Indirect(EAX, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 125, 206, 123, 48], OperandSize::Dword)
}
#[test]
fn vcvtps2qq_11() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(ZMM20)), operand2: Some(Direct(YMM14)), operand3: None, operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 193, 125, 223, 123, 230], OperandSize::Qword)
}
#[test]
fn vcvtps2qq_12() {
run_test(&Instruction { mnemonic: Mnemonic::VCVTPS2QQ, operand1: Some(Direct(ZMM7)), operand2: Some(IndirectDisplaced(RSI, 2028039502, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 241, 125, 201, 123, 190, 78, 109, 225, 120], OperandSize::Qword)
}
| 70.347826 | 402 | 0.710136 |
648ffe5654810ae5e0b1f59240a677759a319f3c | 24 | struct S (#[attr] f32);
| 12 | 23 | 0.583333 |
218a9b229e0df0fb2605606c0d609b9edd4d6e60 | 41,556 | //! Parsing and validation of builtin attributes
use rustc_ast::{self as ast, Attribute, Lit, LitKind, MetaItem, MetaItemKind, NestedMetaItem};
use rustc_ast_pretty::pprust;
use rustc_errors::{struct_span_err, Applicability};
use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg};
use rustc_macros::HashStable_Generic;
use rustc_session::parse::{feature_err, ParseSess};
use rustc_session::Session;
use rustc_span::hygiene::Transparency;
use rustc_span::{symbol::sym, symbol::Symbol, Span};
use std::num::NonZeroU32;
use version_check::Version;
pub fn is_builtin_attr(attr: &Attribute) -> bool {
attr.is_doc_comment() || attr.ident().filter(|ident| is_builtin_attr_name(ident.name)).is_some()
}
enum AttrError {
MultipleItem(String),
UnknownMetaItem(String, &'static [&'static str]),
MissingSince,
NonIdentFeature,
MissingFeature,
MultipleStabilityLevels,
UnsupportedLiteral(&'static str, /* is_bytestr */ bool),
}
fn handle_errors(sess: &ParseSess, span: Span, error: AttrError) {
let diag = &sess.span_diagnostic;
match error {
AttrError::MultipleItem(item) => {
struct_span_err!(diag, span, E0538, "multiple '{}' items", item).emit();
}
AttrError::UnknownMetaItem(item, expected) => {
let expected = expected.iter().map(|name| format!("`{}`", name)).collect::<Vec<_>>();
struct_span_err!(diag, span, E0541, "unknown meta item '{}'", item)
.span_label(span, format!("expected one of {}", expected.join(", ")))
.emit();
}
AttrError::MissingSince => {
struct_span_err!(diag, span, E0542, "missing 'since'").emit();
}
AttrError::NonIdentFeature => {
struct_span_err!(diag, span, E0546, "'feature' is not an identifier").emit();
}
AttrError::MissingFeature => {
struct_span_err!(diag, span, E0546, "missing 'feature'").emit();
}
AttrError::MultipleStabilityLevels => {
struct_span_err!(diag, span, E0544, "multiple stability levels").emit();
}
AttrError::UnsupportedLiteral(msg, is_bytestr) => {
let mut err = struct_span_err!(diag, span, E0565, "{}", msg);
if is_bytestr {
if let Ok(lint_str) = sess.source_map().span_to_snippet(span) {
err.span_suggestion(
span,
"consider removing the prefix",
lint_str[1..].to_string(),
Applicability::MaybeIncorrect,
);
}
}
err.emit();
}
}
}
#[derive(Clone, PartialEq, Encodable, Decodable)]
pub enum InlineAttr {
None,
Hint,
Always,
Never,
}
#[derive(Clone, Encodable, Decodable)]
pub enum InstructionSetAttr {
ArmA32,
ArmT32,
}
#[derive(Clone, Encodable, Decodable)]
pub enum OptimizeAttr {
None,
Speed,
Size,
}
#[derive(Copy, Clone, PartialEq)]
pub enum UnwindAttr {
Allowed,
Aborts,
}
/// Determine what `#[unwind]` attribute is present in `attrs`, if any.
pub fn find_unwind_attr(sess: &Session, attrs: &[Attribute]) -> Option<UnwindAttr> {
attrs.iter().fold(None, |ia, attr| {
if sess.check_name(attr, sym::unwind) {
if let Some(meta) = attr.meta() {
if let MetaItemKind::List(items) = meta.kind {
if items.len() == 1 {
if items[0].has_name(sym::allowed) {
return Some(UnwindAttr::Allowed);
} else if items[0].has_name(sym::aborts) {
return Some(UnwindAttr::Aborts);
}
}
struct_span_err!(
sess.diagnostic(),
attr.span,
E0633,
"malformed `unwind` attribute input"
)
.span_label(attr.span, "invalid argument")
.span_suggestions(
attr.span,
"the allowed arguments are `allowed` and `aborts`",
(vec!["allowed", "aborts"])
.into_iter()
.map(|s| format!("#[unwind({})]", s)),
Applicability::MachineApplicable,
)
.emit();
}
}
}
ia
})
}
/// Represents the following attributes:
///
/// - `#[stable]`
/// - `#[unstable]`
#[derive(Encodable, Decodable, Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(HashStable_Generic)]
pub struct Stability {
pub level: StabilityLevel,
pub feature: Symbol,
}
/// Represents the `#[rustc_const_unstable]` and `#[rustc_const_stable]` attributes.
#[derive(Encodable, Decodable, Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[derive(HashStable_Generic)]
pub struct ConstStability {
pub level: StabilityLevel,
pub feature: Symbol,
/// whether the function has a `#[rustc_promotable]` attribute
pub promotable: bool,
}
/// The available stability levels.
#[derive(Encodable, Decodable, PartialEq, Copy, Clone, Debug, Eq, Hash)]
#[derive(HashStable_Generic)]
pub enum StabilityLevel {
// Reason for the current stability level and the relevant rust-lang issue
Unstable { reason: Option<Symbol>, issue: Option<NonZeroU32>, is_soft: bool },
Stable { since: Symbol },
}
impl StabilityLevel {
pub fn is_unstable(&self) -> bool {
matches!(self, StabilityLevel::Unstable { .. })
}
pub fn is_stable(&self) -> bool {
matches!(self, StabilityLevel::Stable { .. })
}
}
/// Collects stability info from all stability attributes in `attrs`.
/// Returns `None` if no stability attributes are found.
pub fn find_stability(
sess: &Session,
attrs: &[Attribute],
item_sp: Span,
) -> (Option<Stability>, Option<ConstStability>) {
find_stability_generic(sess, attrs.iter(), item_sp)
}
fn find_stability_generic<'a, I>(
sess: &Session,
attrs_iter: I,
item_sp: Span,
) -> (Option<Stability>, Option<ConstStability>)
where
I: Iterator<Item = &'a Attribute>,
{
use StabilityLevel::*;
let mut stab: Option<Stability> = None;
let mut const_stab: Option<ConstStability> = None;
let mut promotable = false;
let diagnostic = &sess.parse_sess.span_diagnostic;
'outer: for attr in attrs_iter {
if ![
sym::rustc_const_unstable,
sym::rustc_const_stable,
sym::unstable,
sym::stable,
sym::rustc_promotable,
]
.iter()
.any(|&s| attr.has_name(s))
{
continue; // not a stability level
}
sess.mark_attr_used(attr);
let meta = attr.meta();
if attr.has_name(sym::rustc_promotable) {
promotable = true;
}
// attributes with data
else if let Some(MetaItem { kind: MetaItemKind::List(ref metas), .. }) = meta {
let meta = meta.as_ref().unwrap();
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
handle_errors(
&sess.parse_sess,
meta.span,
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
);
return false;
}
if let Some(v) = meta.value_str() {
*item = Some(v);
true
} else {
struct_span_err!(diagnostic, meta.span, E0539, "incorrect meta item").emit();
false
}
};
let meta_name = meta.name_or_empty();
match meta_name {
sym::rustc_const_unstable | sym::unstable => {
if meta_name == sym::unstable && stab.is_some() {
handle_errors(
&sess.parse_sess,
attr.span,
AttrError::MultipleStabilityLevels,
);
break;
} else if meta_name == sym::rustc_const_unstable && const_stab.is_some() {
handle_errors(
&sess.parse_sess,
attr.span,
AttrError::MultipleStabilityLevels,
);
break;
}
let mut feature = None;
let mut reason = None;
let mut issue = None;
let mut issue_num = None;
let mut is_soft = false;
for meta in metas {
if let Some(mi) = meta.meta_item() {
match mi.name_or_empty() {
sym::feature => {
if !get(mi, &mut feature) {
continue 'outer;
}
}
sym::reason => {
if !get(mi, &mut reason) {
continue 'outer;
}
}
sym::issue => {
if !get(mi, &mut issue) {
continue 'outer;
}
// These unwraps are safe because `get` ensures the meta item
// is a name/value pair string literal.
issue_num = match &*issue.unwrap().as_str() {
"none" => None,
issue => {
let emit_diag = |msg: &str| {
struct_span_err!(
diagnostic,
mi.span,
E0545,
"`issue` must be a non-zero numeric string \
or \"none\"",
)
.span_label(
mi.name_value_literal().unwrap().span,
msg,
)
.emit();
};
match issue.parse() {
Ok(0) => {
emit_diag(
"`issue` must not be \"0\", \
use \"none\" instead",
);
continue 'outer;
}
Ok(num) => NonZeroU32::new(num),
Err(err) => {
emit_diag(&err.to_string());
continue 'outer;
}
}
}
};
}
sym::soft => {
if !mi.is_word() {
let msg = "`soft` should not have any arguments";
sess.parse_sess.span_diagnostic.span_err(mi.span, msg);
}
is_soft = true;
}
_ => {
handle_errors(
&sess.parse_sess,
meta.span(),
AttrError::UnknownMetaItem(
pprust::path_to_string(&mi.path),
&["feature", "reason", "issue", "soft"],
),
);
continue 'outer;
}
}
} else {
handle_errors(
&sess.parse_sess,
meta.span(),
AttrError::UnsupportedLiteral("unsupported literal", false),
);
continue 'outer;
}
}
match (feature, reason, issue) {
(Some(feature), reason, Some(_)) => {
if !rustc_lexer::is_ident(&feature.as_str()) {
handle_errors(
&sess.parse_sess,
attr.span,
AttrError::NonIdentFeature,
);
continue;
}
let level = Unstable { reason, issue: issue_num, is_soft };
if sym::unstable == meta_name {
stab = Some(Stability { level, feature });
} else {
const_stab =
Some(ConstStability { level, feature, promotable: false });
}
}
(None, _, _) => {
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingFeature);
continue;
}
_ => {
struct_span_err!(diagnostic, attr.span, E0547, "missing 'issue'")
.emit();
continue;
}
}
}
sym::rustc_const_stable | sym::stable => {
if meta_name == sym::stable && stab.is_some() {
handle_errors(
&sess.parse_sess,
attr.span,
AttrError::MultipleStabilityLevels,
);
break;
} else if meta_name == sym::rustc_const_stable && const_stab.is_some() {
handle_errors(
&sess.parse_sess,
attr.span,
AttrError::MultipleStabilityLevels,
);
break;
}
let mut feature = None;
let mut since = None;
for meta in metas {
match meta {
NestedMetaItem::MetaItem(mi) => match mi.name_or_empty() {
sym::feature => {
if !get(mi, &mut feature) {
continue 'outer;
}
}
sym::since => {
if !get(mi, &mut since) {
continue 'outer;
}
}
_ => {
handle_errors(
&sess.parse_sess,
meta.span(),
AttrError::UnknownMetaItem(
pprust::path_to_string(&mi.path),
&["since", "note"],
),
);
continue 'outer;
}
},
NestedMetaItem::Literal(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,
AttrError::UnsupportedLiteral("unsupported literal", false),
);
continue 'outer;
}
}
}
match (feature, since) {
(Some(feature), Some(since)) => {
let level = Stable { since };
if sym::stable == meta_name {
stab = Some(Stability { level, feature });
} else {
const_stab =
Some(ConstStability { level, feature, promotable: false });
}
}
(None, _) => {
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingFeature);
continue;
}
_ => {
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingSince);
continue;
}
}
}
_ => unreachable!(),
}
}
}
// Merge the const-unstable info into the stability info
if promotable {
if let Some(ref mut stab) = const_stab {
stab.promotable = promotable;
} else {
struct_span_err!(
diagnostic,
item_sp,
E0717,
"`rustc_promotable` attribute must be paired with either a `rustc_const_unstable` \
or a `rustc_const_stable` attribute"
)
.emit();
}
}
(stab, const_stab)
}
pub fn find_crate_name(sess: &Session, attrs: &[Attribute]) -> Option<Symbol> {
sess.first_attr_value_str_by_name(attrs, sym::crate_name)
}
/// Tests if a cfg-pattern matches the cfg set
pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) -> bool {
eval_condition(cfg, sess, features, &mut |cfg| {
try_gate_cfg(cfg, sess, features);
let error = |span, msg| {
sess.span_diagnostic.span_err(span, msg);
true
};
if cfg.path.segments.len() != 1 {
return error(cfg.path.span, "`cfg` predicate key must be an identifier");
}
match &cfg.kind {
MetaItemKind::List(..) => {
error(cfg.span, "unexpected parentheses after `cfg` predicate key")
}
MetaItemKind::NameValue(lit) if !lit.kind.is_str() => {
handle_errors(
sess,
lit.span,
AttrError::UnsupportedLiteral(
"literal in `cfg` predicate value must be a string",
lit.kind.is_bytestr(),
),
);
true
}
MetaItemKind::NameValue(..) | MetaItemKind::Word => {
let ident = cfg.ident().expect("multi-segment cfg predicate");
sess.config.contains(&(ident.name, cfg.value_str()))
}
}
})
}
fn try_gate_cfg(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) {
let gate = find_gated_cfg(|sym| cfg.has_name(sym));
if let (Some(feats), Some(gated_cfg)) = (features, gate) {
gate_cfg(&gated_cfg, cfg.span, sess, feats);
}
}
fn gate_cfg(gated_cfg: &GatedCfg, cfg_span: Span, sess: &ParseSess, features: &Features) {
let (cfg, feature, has_feature) = gated_cfg;
if !has_feature(features) && !cfg_span.allows_unstable(*feature) {
let explain = format!("`cfg({})` is experimental and subject to change", cfg);
feature_err(sess, *feature, cfg_span, &explain).emit();
}
}
/// Evaluate a cfg-like condition (with `any` and `all`), using `eval` to
/// evaluate individual items.
pub fn eval_condition(
cfg: &ast::MetaItem,
sess: &ParseSess,
features: Option<&Features>,
eval: &mut impl FnMut(&ast::MetaItem) -> bool,
) -> bool {
match cfg.kind {
ast::MetaItemKind::List(ref mis) if cfg.name_or_empty() == sym::version => {
try_gate_cfg(cfg, sess, features);
let (min_version, span) = match &mis[..] {
[NestedMetaItem::Literal(Lit { kind: LitKind::Str(sym, ..), span, .. })] => {
(sym, span)
}
[NestedMetaItem::Literal(Lit { span, .. })
| NestedMetaItem::MetaItem(MetaItem { span, .. })] => {
sess.span_diagnostic
.struct_span_err(*span, "expected a version literal")
.emit();
return false;
}
[..] => {
sess.span_diagnostic
.struct_span_err(cfg.span, "expected single version literal")
.emit();
return false;
}
};
let min_version = match Version::parse(&min_version.as_str()) {
Some(ver) => ver,
None => {
sess.span_diagnostic.struct_span_err(*span, "invalid version literal").emit();
return false;
}
};
let channel = env!("CFG_RELEASE_CHANNEL");
let nightly = channel == "nightly" || channel == "dev";
let rustc_version = Version::parse(env!("CFG_RELEASE")).unwrap();
// See https://github.com/rust-lang/rust/issues/64796#issuecomment-625474439 for details
if nightly { rustc_version > min_version } else { rustc_version >= min_version }
}
ast::MetaItemKind::List(ref mis) => {
for mi in mis.iter() {
if !mi.is_meta_item() {
handle_errors(
sess,
mi.span(),
AttrError::UnsupportedLiteral("unsupported literal", false),
);
return false;
}
}
// The unwraps below may look dangerous, but we've already asserted
// that they won't fail with the loop above.
match cfg.name_or_empty() {
sym::any => mis
.iter()
.any(|mi| eval_condition(mi.meta_item().unwrap(), sess, features, eval)),
sym::all => mis
.iter()
.all(|mi| eval_condition(mi.meta_item().unwrap(), sess, features, eval)),
sym::not => {
if mis.len() != 1 {
struct_span_err!(
sess.span_diagnostic,
cfg.span,
E0536,
"expected 1 cfg-pattern"
)
.emit();
return false;
}
!eval_condition(mis[0].meta_item().unwrap(), sess, features, eval)
}
_ => {
struct_span_err!(
sess.span_diagnostic,
cfg.span,
E0537,
"invalid predicate `{}`",
pprust::path_to_string(&cfg.path)
)
.emit();
false
}
}
}
ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => eval(cfg),
}
}
#[derive(Encodable, Decodable, Clone, HashStable_Generic)]
pub struct Deprecation {
pub since: Option<Symbol>,
/// The note to issue a reason.
pub note: Option<Symbol>,
/// A text snippet used to completely replace any use of the deprecated item in an expression.
///
/// This is currently unstable.
pub suggestion: Option<Symbol>,
/// Whether to treat the since attribute as being a Rust version identifier
/// (rather than an opaque string).
pub is_since_rustc_version: bool,
}
/// Finds the deprecation attribute. `None` if none exists.
pub fn find_deprecation(sess: &Session, attrs: &[Attribute], item_sp: Span) -> Option<Deprecation> {
find_deprecation_generic(sess, attrs.iter(), item_sp)
}
fn find_deprecation_generic<'a, I>(
sess: &Session,
attrs_iter: I,
item_sp: Span,
) -> Option<Deprecation>
where
I: Iterator<Item = &'a Attribute>,
{
let mut depr: Option<Deprecation> = None;
let diagnostic = &sess.parse_sess.span_diagnostic;
'outer: for attr in attrs_iter {
if !(sess.check_name(attr, sym::deprecated) || sess.check_name(attr, sym::rustc_deprecated))
{
continue;
}
if depr.is_some() {
struct_span_err!(diagnostic, item_sp, E0550, "multiple deprecated attributes").emit();
break;
}
let meta = match attr.meta() {
Some(meta) => meta,
None => continue,
};
let mut since = None;
let mut note = None;
let mut suggestion = None;
match &meta.kind {
MetaItemKind::Word => {}
MetaItemKind::NameValue(..) => note = meta.value_str(),
MetaItemKind::List(list) => {
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
handle_errors(
&sess.parse_sess,
meta.span,
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
);
return false;
}
if let Some(v) = meta.value_str() {
*item = Some(v);
true
} else {
if let Some(lit) = meta.name_value_literal() {
handle_errors(
&sess.parse_sess,
lit.span,
AttrError::UnsupportedLiteral(
"literal in `deprecated` \
value must be a string",
lit.kind.is_bytestr(),
),
);
} else {
struct_span_err!(diagnostic, meta.span, E0551, "incorrect meta item")
.emit();
}
false
}
};
for meta in list {
match meta {
NestedMetaItem::MetaItem(mi) => match mi.name_or_empty() {
sym::since => {
if !get(mi, &mut since) {
continue 'outer;
}
}
sym::note if sess.check_name(attr, sym::deprecated) => {
if !get(mi, &mut note) {
continue 'outer;
}
}
sym::reason if sess.check_name(attr, sym::rustc_deprecated) => {
if !get(mi, &mut note) {
continue 'outer;
}
}
sym::suggestion if sess.check_name(attr, sym::rustc_deprecated) => {
if !get(mi, &mut suggestion) {
continue 'outer;
}
}
_ => {
handle_errors(
&sess.parse_sess,
meta.span(),
AttrError::UnknownMetaItem(
pprust::path_to_string(&mi.path),
if sess.check_name(attr, sym::deprecated) {
&["since", "note"]
} else {
&["since", "reason", "suggestion"]
},
),
);
continue 'outer;
}
},
NestedMetaItem::Literal(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,
AttrError::UnsupportedLiteral(
"item in `deprecated` must be a key/value pair",
false,
),
);
continue 'outer;
}
}
}
}
}
if suggestion.is_some() && sess.check_name(attr, sym::deprecated) {
unreachable!("only allowed on rustc_deprecated")
}
if sess.check_name(attr, sym::rustc_deprecated) {
if since.is_none() {
handle_errors(&sess.parse_sess, attr.span, AttrError::MissingSince);
continue;
}
if note.is_none() {
struct_span_err!(diagnostic, attr.span, E0543, "missing 'reason'").emit();
continue;
}
}
sess.mark_attr_used(&attr);
let is_since_rustc_version = sess.check_name(attr, sym::rustc_deprecated);
depr = Some(Deprecation { since, note, suggestion, is_since_rustc_version });
}
depr
}
#[derive(PartialEq, Debug, Encodable, Decodable, Copy, Clone)]
pub enum ReprAttr {
ReprInt(IntType),
ReprC,
ReprPacked(u32),
ReprSimd,
ReprTransparent,
ReprAlign(u32),
ReprNoNiche,
}
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
#[derive(Encodable, Decodable, HashStable_Generic)]
pub enum IntType {
SignedInt(ast::IntTy),
UnsignedInt(ast::UintTy),
}
impl IntType {
#[inline]
pub fn is_signed(self) -> bool {
use IntType::*;
match self {
SignedInt(..) => true,
UnsignedInt(..) => false,
}
}
}
/// Parse #[repr(...)] forms.
///
/// Valid repr contents: any of the primitive integral type names (see
/// `int_type_of_word`, below) to specify enum discriminant type; `C`, to use
/// the same discriminant size that the corresponding C enum would or C
/// structure layout, `packed` to remove padding, and `transparent` to elegate representation
/// concerns to the only non-ZST field.
pub fn find_repr_attrs(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
use ReprAttr::*;
let mut acc = Vec::new();
let diagnostic = &sess.parse_sess.span_diagnostic;
if attr.has_name(sym::repr) {
if let Some(items) = attr.meta_item_list() {
sess.mark_attr_used(attr);
for item in items {
if !item.is_meta_item() {
handle_errors(
&sess.parse_sess,
item.span(),
AttrError::UnsupportedLiteral(
"meta item in `repr` must be an identifier",
false,
),
);
continue;
}
let mut recognised = false;
if item.is_word() {
let hint = match item.name_or_empty() {
sym::C => Some(ReprC),
sym::packed => Some(ReprPacked(1)),
sym::simd => Some(ReprSimd),
sym::transparent => Some(ReprTransparent),
sym::no_niche => Some(ReprNoNiche),
name => int_type_of_word(name).map(ReprInt),
};
if let Some(h) = hint {
recognised = true;
acc.push(h);
}
} else if let Some((name, value)) = item.name_value_literal() {
let parse_alignment = |node: &ast::LitKind| -> Result<u32, &'static str> {
if let ast::LitKind::Int(literal, ast::LitIntType::Unsuffixed) = node {
if literal.is_power_of_two() {
// rustc_middle::ty::layout::Align restricts align to <= 2^29
if *literal <= 1 << 29 {
Ok(*literal as u32)
} else {
Err("larger than 2^29")
}
} else {
Err("not a power of two")
}
} else {
Err("not an unsuffixed integer")
}
};
let mut literal_error = None;
if name == sym::align {
recognised = true;
match parse_alignment(&value.kind) {
Ok(literal) => acc.push(ReprAlign(literal)),
Err(message) => literal_error = Some(message),
};
} else if name == sym::packed {
recognised = true;
match parse_alignment(&value.kind) {
Ok(literal) => acc.push(ReprPacked(literal)),
Err(message) => literal_error = Some(message),
};
}
if let Some(literal_error) = literal_error {
struct_span_err!(
diagnostic,
item.span(),
E0589,
"invalid `repr(align)` attribute: {}",
literal_error
)
.emit();
}
} else {
if let Some(meta_item) = item.meta_item() {
if meta_item.has_name(sym::align) {
if let MetaItemKind::NameValue(ref value) = meta_item.kind {
recognised = true;
let mut err = struct_span_err!(
diagnostic,
item.span(),
E0693,
"incorrect `repr(align)` attribute format"
);
match value.kind {
ast::LitKind::Int(int, ast::LitIntType::Unsuffixed) => {
err.span_suggestion(
item.span(),
"use parentheses instead",
format!("align({})", int),
Applicability::MachineApplicable,
);
}
ast::LitKind::Str(s, _) => {
err.span_suggestion(
item.span(),
"use parentheses instead",
format!("align({})", s),
Applicability::MachineApplicable,
);
}
_ => {}
}
err.emit();
}
}
}
}
if !recognised {
// Not a word we recognize
struct_span_err!(
diagnostic,
item.span(),
E0552,
"unrecognized representation hint"
)
.emit();
}
}
}
}
acc
}
fn int_type_of_word(s: Symbol) -> Option<IntType> {
use IntType::*;
match s {
sym::i8 => Some(SignedInt(ast::IntTy::I8)),
sym::u8 => Some(UnsignedInt(ast::UintTy::U8)),
sym::i16 => Some(SignedInt(ast::IntTy::I16)),
sym::u16 => Some(UnsignedInt(ast::UintTy::U16)),
sym::i32 => Some(SignedInt(ast::IntTy::I32)),
sym::u32 => Some(UnsignedInt(ast::UintTy::U32)),
sym::i64 => Some(SignedInt(ast::IntTy::I64)),
sym::u64 => Some(UnsignedInt(ast::UintTy::U64)),
sym::i128 => Some(SignedInt(ast::IntTy::I128)),
sym::u128 => Some(UnsignedInt(ast::UintTy::U128)),
sym::isize => Some(SignedInt(ast::IntTy::Isize)),
sym::usize => Some(UnsignedInt(ast::UintTy::Usize)),
_ => None,
}
}
pub enum TransparencyError {
UnknownTransparency(Symbol, Span),
MultipleTransparencyAttrs(Span, Span),
}
pub fn find_transparency(
sess: &Session,
attrs: &[Attribute],
macro_rules: bool,
) -> (Transparency, Option<TransparencyError>) {
let mut transparency = None;
let mut error = None;
for attr in attrs {
if sess.check_name(attr, sym::rustc_macro_transparency) {
if let Some((_, old_span)) = transparency {
error = Some(TransparencyError::MultipleTransparencyAttrs(old_span, attr.span));
break;
} else if let Some(value) = attr.value_str() {
transparency = Some((
match value {
sym::transparent => Transparency::Transparent,
sym::semitransparent => Transparency::SemiTransparent,
sym::opaque => Transparency::Opaque,
_ => {
error = Some(TransparencyError::UnknownTransparency(value, attr.span));
continue;
}
},
attr.span,
));
}
}
}
let fallback = if macro_rules { Transparency::SemiTransparent } else { Transparency::Opaque };
(transparency.map_or(fallback, |t| t.0), error)
}
pub fn allow_internal_unstable<'a>(
sess: &'a Session,
attrs: &'a [Attribute],
) -> Option<impl Iterator<Item = Symbol> + 'a> {
allow_unstable(sess, attrs, sym::allow_internal_unstable)
}
pub fn rustc_allow_const_fn_unstable<'a>(
sess: &'a Session,
attrs: &'a [Attribute],
) -> Option<impl Iterator<Item = Symbol> + 'a> {
allow_unstable(sess, attrs, sym::rustc_allow_const_fn_unstable)
}
fn allow_unstable<'a>(
sess: &'a Session,
attrs: &'a [Attribute],
symbol: Symbol,
) -> Option<impl Iterator<Item = Symbol> + 'a> {
let attrs = sess.filter_by_name(attrs, symbol);
let list = attrs
.filter_map(move |attr| {
attr.meta_item_list().or_else(|| {
sess.diagnostic().span_err(
attr.span,
&format!("`{}` expects a list of feature names", symbol.to_ident_string()),
);
None
})
})
.flatten();
Some(list.into_iter().filter_map(move |it| {
let name = it.ident().map(|ident| ident.name);
if name.is_none() {
sess.diagnostic().span_err(
it.span(),
&format!("`{}` expects feature names", symbol.to_ident_string()),
);
}
name
}))
}
| 39.389573 | 100 | 0.409784 |
50e4900c31a7ee7a13df387d1d594db047f3fad5 | 4,485 | //! FIXME: write short doc here
use super::*;
pub(super) fn opt_type_param_list(p: &mut Parser) {
if !p.at(T![<]) {
return;
}
type_param_list(p);
}
fn type_param_list(p: &mut Parser) {
assert!(p.at(T![<]));
let m = p.start();
p.bump(T![<]);
while !p.at(EOF) && !p.at(T![>]) {
let m = p.start();
// test generic_lifetime_type_attribute
// fn foo<#[derive(Lifetime)] 'a, #[derive(Type)] T>(_: &'a T) {
// }
attributes::outer_attributes(p);
match p.current() {
LIFETIME => lifetime_param(p, m),
IDENT => type_param(p, m),
CONST_KW => type_const_param(p, m),
_ => {
m.abandon(p);
p.err_and_bump("expected type parameter")
}
}
if !p.at(T![>]) && !p.expect(T![,]) {
break;
}
}
p.expect(T![>]);
m.complete(p, TYPE_PARAM_LIST);
}
fn lifetime_param(p: &mut Parser, m: Marker) {
assert!(p.at(LIFETIME));
p.bump(LIFETIME);
if p.at(T![:]) {
lifetime_bounds(p);
}
m.complete(p, LIFETIME_PARAM);
}
fn type_param(p: &mut Parser, m: Marker) {
assert!(p.at(IDENT));
name(p);
if p.at(T![:]) {
bounds(p);
}
// test type_param_default
// struct S<T = i32>;
if p.at(T![=]) {
p.bump(T![=]);
types::type_(p)
}
m.complete(p, TYPE_PARAM);
}
// test const_param
// struct S<const N: u32>;
fn type_const_param(p: &mut Parser, m: Marker) {
assert!(p.at(CONST_KW));
p.bump(T![const]);
name(p);
types::ascription(p);
m.complete(p, CONST_PARAM);
}
// test type_param_bounds
// struct S<T: 'a + ?Sized + (Copy)>;
pub(super) fn bounds(p: &mut Parser) {
assert!(p.at(T![:]));
p.bump(T![:]);
bounds_without_colon(p);
}
fn lifetime_bounds(p: &mut Parser) {
assert!(p.at(T![:]));
p.bump(T![:]);
while p.at(LIFETIME) {
p.bump(LIFETIME);
if !p.eat(T![+]) {
break;
}
}
}
pub(super) fn bounds_without_colon_m(p: &mut Parser, marker: Marker) -> CompletedMarker {
while type_bound(p) {
if !p.eat(T![+]) {
break;
}
}
marker.complete(p, TYPE_BOUND_LIST)
}
pub(super) fn bounds_without_colon(p: &mut Parser) {
let m = p.start();
bounds_without_colon_m(p, m);
}
fn type_bound(p: &mut Parser) -> bool {
let m = p.start();
let has_paren = p.eat(T!['(']);
p.eat(T![?]);
match p.current() {
LIFETIME => p.bump(LIFETIME),
T![for] => types::for_type(p),
_ if paths::is_use_path_start(p) => types::path_type_(p, false),
_ => {
m.abandon(p);
return false;
}
}
if has_paren {
p.expect(T![')']);
}
m.complete(p, TYPE_BOUND);
true
}
// test where_clause
// fn foo()
// where
// 'a: 'b + 'c,
// T: Clone + Copy + 'static,
// Iterator::Item: 'a,
// <T as Iterator>::Item: 'a
// {}
pub(super) fn opt_where_clause(p: &mut Parser) {
if !p.at(T![where]) {
return;
}
let m = p.start();
p.bump(T![where]);
while is_where_predicate(p) {
where_predicate(p);
let comma = p.eat(T![,]);
if is_where_clause_end(p) {
break;
}
if !comma {
p.error("expected comma");
}
}
m.complete(p, WHERE_CLAUSE);
}
fn is_where_predicate(p: &mut Parser) -> bool {
match p.current() {
LIFETIME => true,
T![impl] => false,
token => types::TYPE_FIRST.contains(token),
}
}
fn is_where_clause_end(p: &mut Parser) -> bool {
match p.current() {
T!['{'] | T![;] | T![=] => true,
_ => false,
}
}
fn where_predicate(p: &mut Parser) {
let m = p.start();
match p.current() {
LIFETIME => {
p.bump(LIFETIME);
if p.at(T![:]) {
bounds(p);
} else {
p.error("expected colon");
}
}
T![impl] => {
p.error("expected lifetime or type");
}
_ => {
// test where_pred_for
// fn test<F>()
// where
// for<'a> F: Fn(&'a str)
// { }
types::type_(p);
if p.at(T![:]) {
bounds(p);
} else {
p.error("expected colon");
}
}
}
m.complete(p, WHERE_PRED);
}
| 21.45933 | 89 | 0.473802 |
79b7bd639a94827d2d8f7045a887976f71697545 | 5,310 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
#[cfg(any(feature = "v2_2", feature = "dox"))]
use AuthenticationScheme;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use Credential;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use glib::GString;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use glib::object::Cast;
use glib::object::IsA;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use glib::signal::SignalHandlerId;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use glib::signal::connect_raw;
use glib::translate::*;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use glib_sys;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use std::boxed::Box as Box_;
use std::fmt;
#[cfg(any(feature = "v2_2", feature = "dox"))]
use std::mem::transmute;
use webkit2_sys;
glib_wrapper! {
pub struct AuthenticationRequest(Object<webkit2_sys::WebKitAuthenticationRequest, webkit2_sys::WebKitAuthenticationRequestClass, AuthenticationRequestClass>);
match fn {
get_type => || webkit2_sys::webkit_authentication_request_get_type(),
}
}
pub const NONE_AUTHENTICATION_REQUEST: Option<&AuthenticationRequest> = None;
pub trait AuthenticationRequestExt: 'static {
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn can_save_credentials(&self) -> bool;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn cancel(&self);
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_host(&self) -> Option<GString>;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_port(&self) -> u32;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_proposed_credential(&self) -> Option<Credential>;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_realm(&self) -> Option<GString>;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_scheme(&self) -> AuthenticationScheme;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn is_for_proxy(&self) -> bool;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn is_retry(&self) -> bool;
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn connect_cancelled<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<AuthenticationRequest>> AuthenticationRequestExt for O {
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn can_save_credentials(&self) -> bool {
unsafe {
from_glib(webkit2_sys::webkit_authentication_request_can_save_credentials(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn cancel(&self) {
unsafe {
webkit2_sys::webkit_authentication_request_cancel(self.as_ref().to_glib_none().0);
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_host(&self) -> Option<GString> {
unsafe {
from_glib_none(webkit2_sys::webkit_authentication_request_get_host(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_port(&self) -> u32 {
unsafe {
webkit2_sys::webkit_authentication_request_get_port(self.as_ref().to_glib_none().0)
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_proposed_credential(&self) -> Option<Credential> {
unsafe {
from_glib_full(webkit2_sys::webkit_authentication_request_get_proposed_credential(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_realm(&self) -> Option<GString> {
unsafe {
from_glib_none(webkit2_sys::webkit_authentication_request_get_realm(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn get_scheme(&self) -> AuthenticationScheme {
unsafe {
from_glib(webkit2_sys::webkit_authentication_request_get_scheme(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn is_for_proxy(&self) -> bool {
unsafe {
from_glib(webkit2_sys::webkit_authentication_request_is_for_proxy(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn is_retry(&self) -> bool {
unsafe {
from_glib(webkit2_sys::webkit_authentication_request_is_retry(self.as_ref().to_glib_none().0))
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
fn connect_cancelled<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(self.as_ptr() as *mut _, b"cancelled\0".as_ptr() as *const _,
Some(transmute(cancelled_trampoline::<Self, F> as usize)), Box_::into_raw(f))
}
}
}
#[cfg(any(feature = "v2_2", feature = "dox"))]
unsafe extern "C" fn cancelled_trampoline<P, F: Fn(&P) + 'static>(this: *mut webkit2_sys::WebKitAuthenticationRequest, f: glib_sys::gpointer)
where P: IsA<AuthenticationRequest> {
let f: &F = &*(f as *const F);
f(&AuthenticationRequest::from_glib_borrow(this).unsafe_cast())
}
impl fmt::Display for AuthenticationRequest {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "AuthenticationRequest")
}
}
| 34.038462 | 162 | 0.627684 |
e94c8fb2ef620f1d8512f589d31929f9e5f6be70 | 1,202 | #![cfg(unix)]
#![cfg(feature = "signal")]
#![warn(rust_2018_idioms)]
mod support;
use support::*;
use std::sync::mpsc::channel;
use std::thread;
#[test]
fn multi_loop() {
// An "ordinary" (non-future) channel
let (sender, receiver) = channel();
// Run multiple times, to make sure there are no race conditions
for _ in 0..10 {
// Run multiple event loops, each one in its own thread
let threads: Vec<_> = (0..4)
.map(|_| {
let sender = sender.clone();
thread::spawn(move || {
let mut rt = CurrentThreadRuntime::new().unwrap();
let signal = signal(SignalKind::hangup()).unwrap();
sender.send(()).unwrap();
let _ = run_with_timeout(&mut rt, signal.into_future());
})
})
.collect();
// Wait for them to declare they're ready
for &_ in threads.iter() {
receiver.recv().unwrap();
}
// Send a signal
send_signal(libc::SIGHUP);
// Make sure the threads terminated correctly
for t in threads {
t.join().unwrap();
}
}
}
| 29.317073 | 76 | 0.512479 |
87a1b3ca5a6cd72ae9d57be6f1cde5045949c551 | 1,114 | pub struct Advent;
impl super::common::Advent for Advent {
fn advent_number() -> u8 {
2
}
fn main1(input: &String) -> String {
format!(
"{}",
input
.lines()
.filter(|line| {
let (n1, n2, letter, password): (usize, usize, char, String) =
codegen_stuff::scan!("{}-{} {}: {}", line);
let count = password.chars().filter(|x| *x == letter).count();
n1 <= count && count <= n2
})
.count()
)
}
fn main2(input: &String) -> String {
format!(
"{}",
input
.lines()
.filter(|line| {
let (n1, n2, letter, password): (usize, usize, char, String) =
codegen_stuff::scan!("{}-{} {}: {}", line);
(password.chars().nth(n1 - 1).unwrap() == letter)
!= (password.chars().nth(n2 - 1).unwrap() == letter)
})
.count()
)
}
}
| 29.315789 | 82 | 0.370736 |
33ac3a818be3d86b257374366cca2b1543b88398 | 21,575 | use std::{
borrow::{Borrow, BorrowMut},
convert::TryFrom,
fmt::{Debug, Display, Formatter},
ops::{Deref, DerefMut},
str::FromStr,
};
use itertools::Itertools;
use crate::{Bell, InvalidRowError, Stage};
// Imports used solely for doc comments
#[allow(unused_imports)]
use crate::Block;
use super::{borrowed::Row, BellIter};
/// An owned row.
#[derive(Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
pub struct RowBuf {
/// The [`Bell`]s in the order that they would be rung. Because of the 'valid row' invariant,
/// this can't contain duplicate [`Bell`]s or any [`Bell`]s with number greater than the
/// [`Stage`] of this `RowBuf`.
// This is `pub(super)` so that `super::borrowed::Row` can access it/
pub(super) bell_vec: Vec<Bell>,
}
impl RowBuf {
/* CONSTRUCTORS */
/// Parse a string into a `RowBuf`, skipping any [`char`]s that aren't valid [`Bell`] names.
/// This returns a [`InvalidRowError`] if the `RowBuf` would be invalid.
///
/// # Example
/// ```
/// use bellframe::{Bell, RowBuf, Stage, InvalidRowError};
///
/// // Parsing a valid Row is fine
/// assert_eq!(RowBuf::parse("12543")?.to_string(), "12543");
/// // Parsing valid rows with invalid characters is also fine
/// assert_eq!(RowBuf::parse("4321\t[65 78]")?.to_string(), "43216578");
/// assert_eq!(RowBuf::parse("3|2|1 6|5|4 9|8|7")?.to_string(), "321654987");
/// // Parsing an invalid `Row` returns an error describing the problem
/// assert_eq!(
/// RowBuf::parse("112345"),
/// Err(InvalidRowError::DuplicateBell(Bell::from_number(1).unwrap()))
/// );
/// assert_eq!(
/// RowBuf::parse("12745"),
/// Err(InvalidRowError::BellOutOfStage(
/// Bell::from_number(7).unwrap(),
/// Stage::DOUBLES
/// ))
/// );
/// # Ok::<(), InvalidRowError>(())
/// ```
pub fn parse(s: &str) -> Result<Self, InvalidRowError> {
Self::from_bell_iter(s.chars().filter_map(Bell::from_name))
}
/// Parse a string into a `RowBuf`, extending to the given [`Stage`] if required and skipping
/// any [`char`]s that aren't valid [`Bell`] names. This returns [`InvalidRowError`] if the
/// `RowBuf` would be invalid, and this will produce better error messages than
/// [`RowBuf::parse`] because of the extra information provided by the [`Stage`].
///
/// # Example
/// ```
/// use bellframe::{Bell, RowBuf, Stage, InvalidRowError};
///
/// // Parsing a valid Row is fine
/// assert_eq!(RowBuf::parse("12543")?.to_string(), "12543");
/// // Parsing valid rows with invalid characters is also fine
/// assert_eq!(RowBuf::parse("4321\t[65 78]")?.to_string(), "43216578");
/// assert_eq!(RowBuf::parse("3|2|1 6|5|4 9|8|7")?.to_string(), "321654987");
/// // Parsing an invalid `Row` returns an error describing the problem
/// assert_eq!(
/// RowBuf::parse("112345"),
/// Err(InvalidRowError::DuplicateBell(Bell::from_number(1).unwrap()))
/// );
/// assert_eq!(
/// RowBuf::parse("12745"),
/// Err(InvalidRowError::BellOutOfStage(
/// Bell::from_name('7').unwrap(),
/// Stage::DOUBLES
/// ))
/// );
/// # Ok::<(), InvalidRowError>(())
/// ```
pub fn parse_with_stage(s: &str, stage: Stage) -> Result<Self, InvalidRowError> {
Self::from_vec_with_stage(s.chars().filter_map(Bell::from_name).collect_vec(), stage)
}
/// Creates rounds on a given [`Stage`].
///
/// # Example
/// ```
/// use bellframe::{RowBuf, Stage};
///
/// assert_eq!(RowBuf::rounds(Stage::MINIMUS).to_string(), "1234");
/// assert_eq!(RowBuf::rounds(Stage::CATERS).to_string(), "123456789");
/// ```
pub fn rounds(stage: Stage) -> Self {
// This unsafety is OK, because rounds is always a valid `Row`
unsafe { Self::from_bell_iter_unchecked(stage.bells()) }
}
/// Creates backrounds on a given [`Stage`].
///
/// # Example
/// ```
/// use bellframe::{RowBuf, Stage};
///
/// assert_eq!(RowBuf::backrounds(Stage::MINIMUS).to_string(), "4321");
/// assert_eq!(RowBuf::backrounds(Stage::CATERS).to_string(), "987654321");
/// ```
pub fn backrounds(stage: Stage) -> Self {
// This unsafety is OK, because backrounds is always a valid `Row`
unsafe { Self::from_bell_iter_unchecked(stage.bells().rev()) }
}
/// Creates Queens on a given [`Stage`].
///
/// # Example
/// ```
/// use bellframe::{RowBuf, Stage};
///
/// assert_eq!(RowBuf::queens(Stage::MINIMUS).to_string(), "1324");
/// assert_eq!(RowBuf::queens(Stage::CATERS).to_string(), "135792468");
/// ```
pub fn queens(stage: Stage) -> Self {
let odds = stage.bells().step_by(2);
let evens = stage.bells().skip(1).step_by(2);
// This unsafety is OK, because Queens is always a valid `Row`
unsafe { Self::from_bell_iter_unchecked(odds.chain(evens)) }
}
/* UTILITY CONSTRUCTORS */
/// Creates a `RowBuf` from a [`Vec`] of [`Bell`]s, checking that the resulting `RowBuf` is
/// valid.
///
/// # Example
/// ```
/// use bellframe::{Bell, InvalidRowError, RowBuf};
///
/// // Converting a `Row` from a valid `Vec` of `Bell`s is fine
/// assert_eq!(
/// RowBuf::from_vec(vec![
/// Bell::from_name('4').unwrap(),
/// Bell::from_name('2').unwrap(),
/// Bell::from_name('1').unwrap(),
/// Bell::from_name('3').unwrap(),
/// ])?.to_string(),
/// "4213"
/// );
/// // Converting a `Row` from an invalid `Vec` of `Bell`s is not so fine
/// assert_eq!(
/// RowBuf::from_vec(vec![
/// Bell::from_name('4').unwrap(),
/// Bell::from_name('2').unwrap(),
/// Bell::from_name('1').unwrap(),
/// Bell::from_name('4').unwrap(),
/// ]),
/// Err(InvalidRowError::DuplicateBell(Bell::from_name('4').unwrap()))
/// );
/// # Ok::<(), InvalidRowError>(())
/// ```
pub fn from_vec(bells: Vec<Bell>) -> Result<RowBuf, InvalidRowError> {
let stage = Stage::try_from(bells.len() as u8)?;
// We check validity by keeping a checklist of which `Bell`s we've seen, and checking off
// each bell as we go. PERF: use a bitmap here
let mut checklist = vec![false; bells.len()];
// Loop over all the bells to check them off in the checklist. We do not need to check for
// empty spaces in the checklist once we've done because (by the Pigeon Hole Principle),
// fitting `n` bells into `n` slots with some gaps will always require that a bell is
// either out of range or two bells share a slot.
for &b in &bells {
match checklist.get_mut(b.index()) {
// If the `Bell` is out of range of the checklist, it can't belong within the
// `Stage` of this `Row`
None => return Err(InvalidRowError::BellOutOfStage(b, stage)),
// If the `Bell` has already been seen before, then it must be a duplicate
Some(&mut true) => return Err(InvalidRowError::DuplicateBell(b)),
// If the `Bell` has not been seen before, check off the checklist entry and
// continue
Some(x) => *x = true,
}
}
// If none of the `Bell`s caused errors, the row must be valid
Ok(Self { bell_vec: bells })
}
/// Creates a `RowBuf` from a [`Vec`] of [`Bell`]s, **without** checking that the resulting
/// `RowBuf` is valid. This is the unsafe version of [`RowBuf::from_vec`].
///
/// # Safety
///
/// This function is safe if `bells` corresponds to a valid `Row` according to the CC's
/// Framework. This means that each [`Bell`] is unique, and has [`index`](Bell::index) smaller
/// than the `bells.len()`.
///
/// # Example
/// ```
/// use bellframe::{Bell, InvalidRowError, RowBuf};
///
/// # fn test() -> Option<()> {
/// // Converting a `RowBuf` from a valid `Vec` of `Bell`s is fine, but still unsafe
/// assert_eq!(
/// unsafe {
/// RowBuf::from_vec_unchecked(vec![
/// Bell::from_name('4')?,
/// Bell::from_name('2')?,
/// Bell::from_name('1')?,
/// Bell::from_name('3')?,
/// ])
/// }.to_string(),
/// "4213"
/// );
/// // Converting a `Row` from an invalid `Vec` of `Bell`s compiles and runs,
/// // but silently creates an invalid `Row` and, by extension, silently causes
/// // undefined behaviour
/// assert_eq!(
/// unsafe {
/// RowBuf::from_vec_unchecked(vec![
/// Bell::from_name('4')?,
/// Bell::from_name('2')?,
/// Bell::from_name('1')?,
/// Bell::from_name('4')?,
/// ])
/// }.to_string(),
/// "4214"
/// );
/// # Some(())
/// # }
/// # fn main() { test().unwrap() }
/// ```
#[inline]
pub unsafe fn from_vec_unchecked(bells: Vec<Bell>) -> RowBuf {
RowBuf { bell_vec: bells }
}
/// Utility function that creates a `RowBuf` from an [`Iterator`] of [`Bell`]s, checking that
/// the resulting `RowBuf` is valid.
///
/// # Example
/// ```
/// use bellframe::{Bell, RowBuf, Stage, InvalidRowError};
///
/// // Create a valid row from an iterator over `Bell`s
/// let iter = [0, 3, 4, 2, 1].iter().copied().map(Bell::from_index);
/// let row = RowBuf::from_bell_iter(iter)?;
/// assert_eq!(row.to_string(), "14532");
/// // Attempt to create an invalid row from an iterator over `Bell`s
/// // (we get an error)
/// let iter = [0, 3, 7, 2, 1].iter().copied().map(Bell::from_index);
/// assert_eq!(
/// RowBuf::from_bell_iter(iter),
/// Err(InvalidRowError::BellOutOfStage(
/// Bell::from_name('8').unwrap(),
/// Stage::DOUBLES,
/// ))
/// );
///
/// # Ok::<(), InvalidRowError>(())
/// ```
pub fn from_bell_iter(iter: impl Iterator<Item = Bell>) -> Result<Self, InvalidRowError> {
Self::from_vec(iter.collect_vec())
}
/// Creates a `RowBuf` from a [`Vec`] of [`Bell`]s, **without** checking that the resulting
/// `RowBuf` is valid. This is the unsafe version of [`RowBuf::from_bell_iter`].
///
/// # Safety
///
/// This function is safe if `iter` yields a valid `Row` according to the CC's Framework. This
/// means that each [`Bell`] is unique, and has [`index`](Bell::index) smaller than the number
/// of items yeilded by `iter`.
/// # Example
/// ```
/// use bellframe::{Bell, RowBuf, Stage, InvalidRowError};
///
/// // Create a valid row from an iterator over `Bell`s
/// let iter = [0, 3, 4, 2, 1].iter().copied().map(Bell::from_index);
/// let row = unsafe { RowBuf::from_bell_iter_unchecked(iter) };
/// assert_eq!(row.to_string(), "14532");
/// // Create an invalid row from an iterator over `Bell`s. We get no error,
/// // but doing anything with the resulting `Row` is undefined behaviour
/// let iter = [0, 3, 7, 2, 1].iter().copied().map(Bell::from_index);
/// let row = unsafe { RowBuf::from_bell_iter_unchecked(iter) };
/// assert_eq!(row.to_string(), "14832");
/// ```
pub unsafe fn from_bell_iter_unchecked(iter: impl Iterator<Item = Bell>) -> Self {
Self::from_vec_unchecked(iter.collect())
}
/// Checks the validity of a potential `RowBuf`, extending it to the given [`Stage`] if valid
/// and returning an [`InvalidRowError`] otherwise (consuming the potential `RowBuf` so it
/// can't be used). This will provide nicer errors than [`RowBuf::from_vec`] since this
/// has extra information about the desired [`Stage`] of the potential `RowBuf`.
pub fn from_vec_with_stage(
mut bells: Vec<Bell>,
stage: Stage,
) -> Result<Self, InvalidRowError> {
// We check validity by keeping a checklist of which `Bell`s we've seen, and checking off
// each bell as we go.
let mut checklist = vec![false; stage.num_bells()];
// It's OK to initialise this with the `TREBLE` (and not handle the case where there are no
// bells),
let mut biggest_bell_found = Bell::TREBLE;
// Loop over all the bells to check them off in the checklist
for &b in &bells {
match checklist.get_mut(b.index()) {
// If the `Bell` is out of range of the checklist, it can't belong within the `Stage`
// of this `Row`
None => return Err(InvalidRowError::BellOutOfStage(b, stage)),
// If the `Bell` has already been seen before, then it must be a duplicate
Some(&mut true) => return Err(InvalidRowError::DuplicateBell(b)),
// If the `Bell` has not been seen before, check off the checklist entry and continue
Some(x) => *x = true,
}
biggest_bell_found = b.max(biggest_bell_found);
}
// The Pigeon Hole Principle argument from `check_validity` doesn't apply here, because
// there could be fewer `Bell`s than the `stage` specified. However, this does allow us to
// accurately say when bells are missing so we do another pass over the `checklist` to
// check for missing bells. If this check also passes, then `self` must be a valid `Row`
// of some stage <= `stage`.
//
// The iterator chain runs a linear search the first instance of `false` up to
// `biggest_bell_found`, which is the index of our missing bell. There looks like there is
// an off-by-one error here since we skip checking `biggest_bell_found` which is
// technically within the specified range, but this is OK because (by definition) we know
// that a bell of `biggest_bell_found` has been found, so it cannot be missing.
if let Some((index, _)) = checklist[..biggest_bell_found.index()]
.iter()
.enumerate()
.find(|&(_i, x)| !*x)
{
return Err(InvalidRowError::MissingBell(Bell::from_index(index as u8)));
}
// If no errors were generated so far, then `bells` must represent a [`Row`] (or be empty)
// so we should extend it with 'cover' bells up to the stage. We can't just create a `Row`
// immediately then call `Row::extend_to_stage` because parsing `""` would temporarily
// create a 0-length `Row`
assert!(bells.len() <= stage.num_bells());
let cover_bells = stage.bells().skip(bells.len());
bells.extend(cover_bells);
// This unsafety is OK because we have verified that `bells` corresponds to a `Row`, while
// the no-zero-stage invariant makes sure that `bells` is non-empty
let mut row = unsafe { Self::from_vec_unchecked(bells) };
row.extend_to_stage(stage);
Ok(row)
}
/// Consumes this `RowBuf` and returns the underlying [`Vec`] of [`Bell`]s
#[inline]
pub fn into_bell_vec(self) -> Vec<Bell> {
self.bell_vec
}
/// Converts a [`RowBuf`] into a [`Row`]. Equivalent to `&*self`, but doesn't rely on type
/// inference.
#[inline]
pub fn as_row(&self) -> &Row {
// This unsafety is OK, because `RowBuf` requires its bells to form a valid row according
// to the Framework
unsafe { Row::from_slice_unchecked(&self.bell_vec) }
}
/// Converts a [`RowBuf`] into a [`Row`]. Equivalent to `&*self`, but doesn't rely on type
/// inference.
#[inline]
pub fn as_mut_row(&mut self) -> &mut Row {
// This unsafety is OK, because `RowBuf` requires its bells to form a valid row according
// to the Framework
unsafe { Row::from_mut_slice_unchecked(&mut self.bell_vec) }
}
/* MUTATING OPERATIONS */
/// Extend this `RowBuf` in-place with cover bells until that it has a given [`Stage`].
pub fn extend_to_stage(&mut self, stage: Stage) {
assert!(self.stage() <= stage);
self.bell_vec
.extend(stage.bells().skip(self.bell_vec.len()));
}
/// Overwrites this with the contents of a [`Row`], thus reusing the allocation.
pub fn overwrite_from(&mut self, row: &Row) {
self.bell_vec.clear();
self.bell_vec.extend(row.bell_iter());
}
}
/* CONVERSIONS BETWEEN `Row` AND `RowBuf` */
impl Deref for RowBuf {
type Target = Row;
#[inline]
fn deref(&self) -> &Self::Target {
self.as_row()
}
}
impl DerefMut for RowBuf {
#[inline]
fn deref_mut(&mut self) -> &mut Self::Target {
// This unsafety is OK because the slice of `Bell`s comes from a `RowBuf`, which must
// represent a valid row
unsafe { Row::from_mut_slice_unchecked(&mut self.bell_vec) }
}
}
impl Borrow<Row> for RowBuf {
#[inline]
fn borrow(&self) -> &Row {
self.deref()
}
}
impl BorrowMut<Row> for RowBuf {
#[inline]
fn borrow_mut(&mut self) -> &mut Row {
self.deref_mut()
}
}
impl AsRef<Row> for RowBuf {
#[inline]
fn as_ref(&self) -> &Row {
self.as_row()
}
}
impl AsMut<Row> for RowBuf {
#[inline]
fn as_mut(&mut self) -> &mut Row {
self.as_mut_row()
}
}
impl ToOwned for Row {
type Owned = RowBuf;
#[inline]
fn to_owned(&self) -> Self::Owned {
// We can skip the validity checks here because `Row` is valid by invariant
unsafe { RowBuf::from_bell_iter_unchecked(self.bell_iter()) }
}
}
/* FORMATTING */
impl Debug for RowBuf {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "RowBuf({})", self)
}
}
impl Display for RowBuf {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
// Delegate to `Row`'s implementation
Display::fmt(self.deref(), f)
}
}
/* OTHER TRAITS */
impl<'row> IntoIterator for &'row RowBuf {
type Item = Bell;
type IntoIter = BellIter<'row>;
fn into_iter(self) -> Self::IntoIter {
self.bell_iter()
}
}
impl FromStr for RowBuf {
type Err = InvalidRowError;
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
Self::parse(s)
}
}
impl PartialEq<Row> for RowBuf {
fn eq(&self, other: &Row) -> bool {
self.as_row() == other
}
}
impl PartialEq<RowBuf> for Row {
fn eq(&self, other: &RowBuf) -> bool {
self == other.as_row()
}
}
#[cfg(test)]
mod tests {
use super::*;
use quickcheck_macros::quickcheck;
#[test]
fn row_ref_size() {
assert_eq!(std::mem::size_of::<&Row>(), 16);
}
#[test]
fn parse_with_stage_ok() {
fn check(inp_str: &str, stage: Stage, exp_row: &str) {
assert_eq!(
RowBuf::parse_with_stage(inp_str, stage).unwrap(),
RowBuf::parse(exp_row).unwrap()
);
}
check("321", Stage::SINGLES, "321");
check("321", Stage::MINOR, "321456");
check("1342", Stage::MAJOR, "13425678");
check("123564", Stage::ROYAL, "1235647890");
check("21", Stage::DOUBLES, "21345");
check("", Stage::MINIMUS, "1234");
}
#[test]
fn parse_with_stage_err_dup_bell() {
fn check(inp_str: &str, stage: Stage, dup_bell: char) {
assert_eq!(
RowBuf::parse_with_stage(inp_str, stage),
Err(InvalidRowError::DuplicateBell(
Bell::from_name(dup_bell).unwrap()
))
);
}
check("322", Stage::SINGLES, '2');
check("11", Stage::MAXIMUS, '1');
check("512435", Stage::MINOR, '5');
check("331212", Stage::MINOR, '3');
}
#[test]
fn parse_with_stage_out_of_stage() {
fn check(inp_str: &str, stage: Stage, bell_out_of_range: char) {
assert_eq!(
RowBuf::parse_with_stage(inp_str, stage),
Err(InvalidRowError::BellOutOfStage(
Bell::from_name(bell_out_of_range).unwrap(),
stage
))
);
}
check("0", Stage::SINGLES, '0');
check("3218", Stage::MINOR, '8');
check("12345678", Stage::SINGLES, '4');
}
#[test]
fn parse_with_stage_missing_bell() {
fn check(inp_str: &str, stage: Stage, missing_bell: char) {
assert_eq!(
RowBuf::parse_with_stage(inp_str, stage),
Err(InvalidRowError::MissingBell(
Bell::from_name(missing_bell).unwrap(),
))
);
}
check("13", Stage::SINGLES, '2');
check("14", Stage::MINOR, '2');
check("14567892", Stage::CATERS, '3');
}
#[quickcheck]
fn parse_doesnt_panic(v: String) -> bool {
let _ = v.parse::<RowBuf>();
true // the only way for this test to fail is if `RowBuf::parse` panics
}
#[quickcheck]
fn parse_with_stage_doesnt_panic(s: String, stage: Stage) -> bool {
let _ = RowBuf::parse_with_stage(&s, stage);
true // the only way for this test to fail is if `RowBuf::parse_with_stage` panics
}
}
| 36.444257 | 101 | 0.565747 |
5da20d5a9d3a821bbf4d800f6add7d437d03801a | 1,233 | // The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 8-3: Simple Recursion
use nannou::prelude::*;
fn main() {
nannou::app(model).run();
}
struct Model;
fn model(app: &App) -> Model {
app.set_loop_mode(LoopMode::loop_once());
let _window = app
.new_window()
.with_dimensions(640, 360)
.view(view)
.build()
.unwrap();
Model
}
fn view(app: &App, _model: &Model, frame: &Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
draw_circle(&draw, 0.0, 0.0, 200.0);
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}
// Recursive function
fn draw_circle(draw: &app::Draw, x: f32, y: f32, r: f32) {
let norm_radius = map_range(r, 2.0, 360.0, 0.0, 1.0);
draw.ellipse()
.x_y(x, y)
.radius(r)
.hsva(norm_radius, 0.75, 1.0, norm_radius)
.stroke(BLACK);
if r > 8.0 {
// Four circles! left right, up and down
draw_circle(&draw, x + r, y, r / 2.0);
draw_circle(&draw, x - r, y, r / 2.0);
draw_circle(&draw, x, y + r, r / 2.0);
draw_circle(&draw, x, y - r, r / 2.0);
}
}
| 23.264151 | 61 | 0.556367 |
01e7be4c65e6db3c40ff144c17607b5ea82328d8 | 673 | pub mod back_of_the_house {
pub struct Breakfast {
pub toast: String, // 公有
#[allow(unused)]
seasonal_fruit: String, // 私有
}
pub enum Appetizer {
Soup,
Salad,
}
impl Breakfast {
pub fn summer(toast: &str) -> Breakfast {
Breakfast {
toast: String::from(toast),
seasonal_fruit: String::from("peaches"),
}
}
#[allow(unused)]
fn winter() {
println!("winter");
}
}
}
// 上面这个例子, 你不能直接创建 Breakfast 的实例
// 因为 seasonal_fruit 是私有的, 你无法初始化这个值, 故不能创建实例
// summer 方法帮你初始化了 seasonal_fruit, 这样你自己初始化个 toast 就行了
| 21.03125 | 56 | 0.526003 |
38f086c9221fea82c0a39ea71e140b0f2c143e17 | 8,413 | use rustc_hir::{BorrowKind, Expr, ExprKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use crate::utils::{get_trait_def_id, higher, implements_trait, match_qpath, match_type, paths, span_lint};
declare_clippy_lint! {
/// **What it does:** Checks for iteration that is guaranteed to be infinite.
///
/// **Why is this bad?** While there may be places where this is acceptable
/// (e.g., in event streams), in most cases this is simply an error.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```no_run
/// use std::iter;
///
/// iter::repeat(1_u8).collect::<Vec<_>>();
/// ```
pub INFINITE_ITER,
correctness,
"infinite iteration"
}
declare_clippy_lint! {
/// **What it does:** Checks for iteration that may be infinite.
///
/// **Why is this bad?** While there may be places where this is acceptable
/// (e.g., in event streams), in most cases this is simply an error.
///
/// **Known problems:** The code may have a condition to stop iteration, but
/// this lint is not clever enough to analyze it.
///
/// **Example:**
/// ```rust
/// let infinite_iter = 0..;
/// [0..].iter().zip(infinite_iter.take_while(|x| *x > 5));
/// ```
pub MAYBE_INFINITE_ITER,
pedantic,
"possible infinite iteration"
}
declare_lint_pass!(InfiniteIter => [INFINITE_ITER, MAYBE_INFINITE_ITER]);
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for InfiniteIter {
fn check_expr(&mut self, cx: &LateContext<'a, 'tcx>, expr: &'tcx Expr<'_>) {
let (lint, msg) = match complete_infinite_iter(cx, expr) {
Infinite => (INFINITE_ITER, "infinite iteration detected"),
MaybeInfinite => (MAYBE_INFINITE_ITER, "possible infinite iteration detected"),
Finite => {
return;
},
};
span_lint(cx, lint, expr.span, msg)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Finiteness {
Infinite,
MaybeInfinite,
Finite,
}
use self::Finiteness::{Finite, Infinite, MaybeInfinite};
impl Finiteness {
#[must_use]
fn and(self, b: Self) -> Self {
match (self, b) {
(Finite, _) | (_, Finite) => Finite,
(MaybeInfinite, _) | (_, MaybeInfinite) => MaybeInfinite,
_ => Infinite,
}
}
#[must_use]
fn or(self, b: Self) -> Self {
match (self, b) {
(Infinite, _) | (_, Infinite) => Infinite,
(MaybeInfinite, _) | (_, MaybeInfinite) => MaybeInfinite,
_ => Finite,
}
}
}
impl From<bool> for Finiteness {
#[must_use]
fn from(b: bool) -> Self {
if b {
Infinite
} else {
Finite
}
}
}
/// This tells us what to look for to know if the iterator returned by
/// this method is infinite
#[derive(Copy, Clone)]
enum Heuristic {
/// infinite no matter what
Always,
/// infinite if the first argument is
First,
/// infinite if any of the supplied arguments is
Any,
/// infinite if all of the supplied arguments are
All,
}
use self::Heuristic::{All, Always, Any, First};
/// a slice of (method name, number of args, heuristic, bounds) tuples
/// that will be used to determine whether the method in question
/// returns an infinite or possibly infinite iterator. The finiteness
/// is an upper bound, e.g., some methods can return a possibly
/// infinite iterator at worst, e.g., `take_while`.
const HEURISTICS: [(&str, usize, Heuristic, Finiteness); 19] = [
("zip", 2, All, Infinite),
("chain", 2, Any, Infinite),
("cycle", 1, Always, Infinite),
("map", 2, First, Infinite),
("by_ref", 1, First, Infinite),
("cloned", 1, First, Infinite),
("rev", 1, First, Infinite),
("inspect", 1, First, Infinite),
("enumerate", 1, First, Infinite),
("peekable", 2, First, Infinite),
("fuse", 1, First, Infinite),
("skip", 2, First, Infinite),
("skip_while", 1, First, Infinite),
("filter", 2, First, Infinite),
("filter_map", 2, First, Infinite),
("flat_map", 2, First, Infinite),
("unzip", 1, First, Infinite),
("take_while", 2, First, MaybeInfinite),
("scan", 3, First, MaybeInfinite),
];
fn is_infinite(cx: &LateContext<'_, '_>, expr: &Expr<'_>) -> Finiteness {
match expr.kind {
ExprKind::MethodCall(ref method, _, ref args, _) => {
for &(name, len, heuristic, cap) in &HEURISTICS {
if method.ident.name.as_str() == name && args.len() == len {
return (match heuristic {
Always => Infinite,
First => is_infinite(cx, &args[0]),
Any => is_infinite(cx, &args[0]).or(is_infinite(cx, &args[1])),
All => is_infinite(cx, &args[0]).and(is_infinite(cx, &args[1])),
})
.and(cap);
}
}
if method.ident.name == sym!(flat_map) && args.len() == 2 {
if let ExprKind::Closure(_, _, body_id, _, _) = args[1].kind {
let body = cx.tcx.hir().body(body_id);
return is_infinite(cx, &body.value);
}
}
Finite
},
ExprKind::Block(ref block, _) => block.expr.as_ref().map_or(Finite, |e| is_infinite(cx, e)),
ExprKind::Box(ref e) | ExprKind::AddrOf(BorrowKind::Ref, _, ref e) => is_infinite(cx, e),
ExprKind::Call(ref path, _) => {
if let ExprKind::Path(ref qpath) = path.kind {
match_qpath(qpath, &paths::REPEAT).into()
} else {
Finite
}
},
ExprKind::Struct(..) => higher::range(cx, expr).map_or(false, |r| r.end.is_none()).into(),
_ => Finite,
}
}
/// the names and argument lengths of methods that *may* exhaust their
/// iterators
const POSSIBLY_COMPLETING_METHODS: [(&str, usize); 6] = [
("find", 2),
("rfind", 2),
("position", 2),
("rposition", 2),
("any", 2),
("all", 2),
];
/// the names and argument lengths of methods that *always* exhaust
/// their iterators
const COMPLETING_METHODS: [(&str, usize); 12] = [
("count", 1),
("fold", 3),
("for_each", 2),
("partition", 2),
("max", 1),
("max_by", 2),
("max_by_key", 2),
("min", 1),
("min_by", 2),
("min_by_key", 2),
("sum", 1),
("product", 1),
];
/// the paths of types that are known to be infinitely allocating
const INFINITE_COLLECTORS: [&[&str]; 8] = [
&paths::BINARY_HEAP,
&paths::BTREEMAP,
&paths::BTREESET,
&paths::HASHMAP,
&paths::HASHSET,
&paths::LINKED_LIST,
&paths::VEC,
&paths::VEC_DEQUE,
];
fn complete_infinite_iter(cx: &LateContext<'_, '_>, expr: &Expr<'_>) -> Finiteness {
match expr.kind {
ExprKind::MethodCall(ref method, _, ref args, _) => {
for &(name, len) in &COMPLETING_METHODS {
if method.ident.name.as_str() == name && args.len() == len {
return is_infinite(cx, &args[0]);
}
}
for &(name, len) in &POSSIBLY_COMPLETING_METHODS {
if method.ident.name.as_str() == name && args.len() == len {
return MaybeInfinite.and(is_infinite(cx, &args[0]));
}
}
if method.ident.name == sym!(last) && args.len() == 1 {
let not_double_ended = get_trait_def_id(cx, &paths::DOUBLE_ENDED_ITERATOR).map_or(false, |id| {
!implements_trait(cx, cx.tables().expr_ty(&args[0]), id, &[])
});
if not_double_ended {
return is_infinite(cx, &args[0]);
}
} else if method.ident.name == sym!(collect) {
let ty = cx.tables().expr_ty(expr);
if INFINITE_COLLECTORS.iter().any(|path| match_type(cx, ty, path)) {
return is_infinite(cx, &args[0]);
}
}
},
ExprKind::Binary(op, ref l, ref r) => {
if op.node.is_comparison() {
return is_infinite(cx, l).and(is_infinite(cx, r)).and(MaybeInfinite);
}
}, // TODO: ExprKind::Loop + Match
_ => (),
}
Finite
}
| 32.992157 | 111 | 0.542375 |
5dc5d33c687eede6c1c93ec18252859b0c04644b | 4,742 | //! Handle incoming events from Discord.
//!
//! The handler layer takes [`Context`][ctx] and [`Message`][msg] information as
//! input, and inspects them to determine if they should trigger a
//! [`Task`][task].
//!
//! - First attempting to write response information back to Discord, including
//! user-facing errors.
//! - If that fails, logging information about the communication failure.
//! - If non-user-facing errors occurred, logging them.
//!
//! What response information should actually be written is determined by
//! parsing the input to determine its intent, and in the event of the input
//! forming a command, running it with [`execute()`][execute].
//!
//! [ctx]: serenity::client::Context
//! [msg]: serenity::model::channel::Message
//! [task]: crate::task::Task
//! [execute]: hatysa::command::Command::execute
use chrono::{DateTime, Utc};
use serenity::{
async_trait,
client::{Context, EventHandler},
model::{channel::Message, gateway::Activity, gateway::Ready},
};
use tracing::{Instrument, Level};
use iota_orionis::command::Command;
use crate::task::Task;
/// Hatysa event handler.
///
/// This is the outermost entrypoint for command execution. Messages passed to
/// [`message()`][message] are parsed to determine if they match a command, and
/// if they do, the parsed command is handed to [`command::execute()`][execute].
///
/// [message]: #method.message
/// [execute]: ../command/fn.execute.html
pub struct Handler {
/// The string that must come before all commands' names.
pub prefix: String,
/// The date and time when this handler started running.
pub start_time: DateTime<Utc>,
}
#[async_trait]
impl EventHandler for Handler {
async fn ready(&self, ctx: Context, ready: Ready) {
info!("{} is connected!", ready.user.name);
ctx.set_activity(Activity::playing(&*format!("{}react", self.prefix)))
.await;
}
async fn message(&self, ctx: Context, msg: Message) {
let span = trace_span!("handler");
async move {
if let Some(command) = self.interpret_command(&msg).await {
event!(
Level::DEBUG,
id = msg.id.0,
"message is a command, executing",
);
Task::new(command, ctx, msg).execute().await;
} else {
event!(Level::DEBUG, id = msg.id.0, "message is not a command");
}
}
.instrument(span)
.await;
}
}
impl Handler {
/// Attempt to parse a message as a command. If the message does not contain
/// a valid command, `None` is returned.
async fn interpret_command(&self, msg: &Message) -> Option<Command> {
// Non-private messages must have a prefix on them, but it's optional
// for private messages, so if we don't find a prefix, check if it was a
// private message and allow it if it was.
let tail = msg.content.strip_prefix(&self.prefix).or_else(|| {
if msg.is_private() {
Some(&*msg.content)
} else {
None
}
});
if let Some(tail) = tail {
if let Some(tail) = tail.strip_prefix("clap").map(|tail| tail.trim()) {
Some(Command::Clap {
input: tail.to_string(),
})
} else if tail.starts_with("info") {
Some(Command::Info {
start_time: self.start_time,
})
} else if tail.starts_with("ping") {
Some(Command::Ping)
} else if let Some(tail) = tail.strip_prefix("react").map(|tail| tail.trim()) {
Some(Command::React {
input: tail.to_owned(),
})
} else if let Some(tail) = tail.strip_prefix("sketchify").map(|tail| tail.trim()) {
Some(Command::Sketchify {
url_raw: tail.to_owned(),
})
} else if let Some(tail) = tail.strip_prefix("spongebob").map(|tail| tail.trim()) {
Some(Command::Spongebob {
input: tail.to_string(),
})
} else if let Some(tail) = tail.strip_prefix("wavy").map(|tail| tail.trim()) {
Some(Command::Wavy {
input: tail.to_string(),
})
} else if let Some(tail) = tail.strip_prefix("zalgo").map(|tail| tail.trim()) {
Some(Command::Zalgo {
input: tail.to_string(),
max_chars: None,
})
} else {
None
}
} else {
None
}
}
}
| 35.924242 | 95 | 0.551666 |
878c734890bccd4f251ebc2acb65b030b9e0b4d9 | 6,877 | // Generated from definition io.k8s.api.autoscaling.v2beta2.MetricIdentifier
/// MetricIdentifier defines the name and optionally selector for a metric
#[derive(Clone, Debug, Default, PartialEq)]
pub struct MetricIdentifier {
/// name is the name of the given metric
pub name: String,
/// selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.
pub selector: Option<crate::apimachinery::pkg::apis::meta::v1::LabelSelector>,
}
impl<'de> crate::serde::Deserialize<'de> for MetricIdentifier {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_name,
Key_selector,
Other,
}
impl<'de> crate::serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: crate::serde::Deserializer<'de> {
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: crate::serde::de::Error {
Ok(match v {
"name" => Field::Key_name,
"selector" => Field::Key_selector,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> crate::serde::de::Visitor<'de> for Visitor {
type Value = MetricIdentifier;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("MetricIdentifier")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: crate::serde::de::MapAccess<'de> {
let mut value_name: Option<String> = None;
let mut value_selector: Option<crate::apimachinery::pkg::apis::meta::v1::LabelSelector> = None;
while let Some(key) = crate::serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_name => value_name = Some(crate::serde::de::MapAccess::next_value(&mut map)?),
Field::Key_selector => value_selector = crate::serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: crate::serde::de::IgnoredAny = crate::serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(MetricIdentifier {
name: value_name.ok_or_else(|| crate::serde::de::Error::missing_field("name"))?,
selector: value_selector,
})
}
}
deserializer.deserialize_struct(
"MetricIdentifier",
&[
"name",
"selector",
],
Visitor,
)
}
}
impl crate::serde::Serialize for MetricIdentifier {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: crate::serde::Serializer {
let mut state = serializer.serialize_struct(
"MetricIdentifier",
1 +
self.selector.as_ref().map_or(0, |_| 1),
)?;
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "name", &self.name)?;
if let Some(value) = &self.selector {
crate::serde::ser::SerializeStruct::serialize_field(&mut state, "selector", value)?;
}
crate::serde::ser::SerializeStruct::end(state)
}
}
#[cfg(feature = "schemars")]
impl crate::schemars::JsonSchema for MetricIdentifier {
fn schema_name() -> String {
"io.k8s.api.autoscaling.v2beta2.MetricIdentifier".to_owned()
}
fn json_schema(__gen: &mut crate::schemars::gen::SchemaGenerator) -> crate::schemars::schema::Schema {
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("MetricIdentifier defines the name and optionally selector for a metric".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::Object))),
object: Some(Box::new(crate::schemars::schema::ObjectValidation {
properties: std::array::IntoIter::new([
(
"name".to_owned(),
crate::schemars::schema::Schema::Object(crate::schemars::schema::SchemaObject {
metadata: Some(Box::new(crate::schemars::schema::Metadata {
description: Some("name is the name of the given metric".to_owned()),
..Default::default()
})),
instance_type: Some(crate::schemars::schema::SingleOrVec::Single(Box::new(crate::schemars::schema::InstanceType::String))),
..Default::default()
}),
),
(
"selector".to_owned(),
{
let mut schema_obj = __gen.subschema_for::<crate::apimachinery::pkg::apis::meta::v1::LabelSelector>().into_object();
schema_obj.metadata = Some(Box::new(crate::schemars::schema::Metadata {
description: Some("selector is the string-encoded form of a standard kubernetes label selector for the given metric When set, it is passed as an additional parameter to the metrics server for more specific metrics scoping. When unset, just the metricName will be used to gather metrics.".to_owned()),
..Default::default()
}));
crate::schemars::schema::Schema::Object(schema_obj)
},
),
]).collect(),
required: std::array::IntoIter::new([
"name",
]).map(std::borrow::ToOwned::to_owned).collect(),
..Default::default()
})),
..Default::default()
})
}
}
| 46.782313 | 332 | 0.533663 |
72131627aa5d4dd3303d2ba5a7b307d8de50174e | 28,088 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::probe;
use check::{self, FnCtxt, callee, demand};
use check::UnresolvedTypeAction;
use middle::def_id::DefId;
use middle::subst::{self};
use middle::traits;
use middle::ty::{self, NoPreference, PreferMutLvalue, Ty};
use middle::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr};
use middle::ty::fold::TypeFoldable;
use middle::infer;
use middle::infer::InferCtxt;
use syntax::codemap::Span;
use rustc_front::hir;
struct ConfirmContext<'a, 'tcx:'a> {
fcx: &'a FnCtxt<'a, 'tcx>,
span: Span,
self_expr: &'tcx hir::Expr,
call_expr: &'tcx hir::Expr,
}
struct InstantiatedMethodSig<'tcx> {
/// Function signature of the method being invoked. The 0th
/// argument is the receiver.
method_sig: ty::FnSig<'tcx>,
/// Substitutions for all types/early-bound-regions declared on
/// the method.
all_substs: subst::Substs<'tcx>,
/// Generic bounds on the method's parameters which must be added
/// as pending obligations.
method_predicates: ty::InstantiatedPredicates<'tcx>,
}
pub fn confirm<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span: Span,
self_expr: &'tcx hir::Expr,
call_expr: &'tcx hir::Expr,
unadjusted_self_ty: Ty<'tcx>,
pick: probe::Pick<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>)
-> ty::MethodCallee<'tcx>
{
debug!("confirm(unadjusted_self_ty={:?}, pick={:?}, supplied_method_types={:?})",
unadjusted_self_ty,
pick,
supplied_method_types);
let mut confirm_cx = ConfirmContext::new(fcx, span, self_expr, call_expr);
confirm_cx.confirm(unadjusted_self_ty, pick, supplied_method_types)
}
impl<'a,'tcx> ConfirmContext<'a,'tcx> {
fn new(fcx: &'a FnCtxt<'a, 'tcx>,
span: Span,
self_expr: &'tcx hir::Expr,
call_expr: &'tcx hir::Expr)
-> ConfirmContext<'a, 'tcx>
{
ConfirmContext { fcx: fcx, span: span, self_expr: self_expr, call_expr: call_expr }
}
fn confirm(&mut self,
unadjusted_self_ty: Ty<'tcx>,
pick: probe::Pick<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>)
-> ty::MethodCallee<'tcx>
{
// Adjust the self expression the user provided and obtain the adjusted type.
let self_ty = self.adjust_self_ty(unadjusted_self_ty, &pick);
// Make sure nobody calls `drop()` explicitly.
self.enforce_illegal_method_limitations(&pick);
// Create substitutions for the method's type parameters.
let rcvr_substs = self.fresh_receiver_substs(self_ty, &pick);
let all_substs =
self.instantiate_method_substs(
&pick,
supplied_method_types,
rcvr_substs);
debug!("all_substs={:?}", all_substs);
// Create the final signature for the method, replacing late-bound regions.
let InstantiatedMethodSig {
method_sig, all_substs, method_predicates
} = self.instantiate_method_sig(&pick, all_substs);
let method_self_ty = method_sig.inputs[0];
// Unify the (adjusted) self type with what the method expects.
self.unify_receivers(self_ty, method_self_ty);
// Create the method type
let method_ty = pick.item.as_opt_method().unwrap();
let fty = self.tcx().mk_fn(None, self.tcx().mk_bare_fn(ty::BareFnTy {
sig: ty::Binder(method_sig),
unsafety: method_ty.fty.unsafety,
abi: method_ty.fty.abi.clone(),
}));
// Add any trait/regions obligations specified on the method's type parameters.
self.add_obligations(fty, &all_substs, &method_predicates);
// Create the final `MethodCallee`.
let callee = ty::MethodCallee {
def_id: pick.item.def_id(),
ty: fty,
substs: self.tcx().mk_substs(all_substs)
};
// If this is an `&mut self` method, bias the receiver
// expression towards mutability (this will switch
// e.g. `Deref` to `DerefMut` in overloaded derefs and so on).
self.fixup_derefs_on_method_receiver_if_necessary(&callee);
callee
}
///////////////////////////////////////////////////////////////////////////
// ADJUSTMENTS
fn adjust_self_ty(&mut self,
unadjusted_self_ty: Ty<'tcx>,
pick: &probe::Pick<'tcx>)
-> Ty<'tcx>
{
let (autoref, unsize) = if let Some(mutbl) = pick.autoref {
let region = self.infcx().next_region_var(infer::Autoref(self.span));
let autoref = AutoPtr(self.tcx().mk_region(region), mutbl);
(Some(autoref), pick.unsize.map(|target| {
target.adjust_for_autoref(self.tcx(), Some(autoref))
}))
} else {
// No unsizing should be performed without autoref (at
// least during method dispach). This is because we
// currently only unsize `[T;N]` to `[T]`, and naturally
// that must occur being a reference.
assert!(pick.unsize.is_none());
(None, None)
};
// Commit the autoderefs by calling `autoderef again, but this
// time writing the results into the various tables.
let (autoderefd_ty, n, result) = check::autoderef(self.fcx,
self.span,
unadjusted_self_ty,
Some(self.self_expr),
UnresolvedTypeAction::Error,
NoPreference,
|_, n| {
if n == pick.autoderefs {
Some(())
} else {
None
}
});
assert_eq!(n, pick.autoderefs);
assert_eq!(result, Some(()));
// Write out the final adjustment.
self.fcx.write_adjustment(self.self_expr.id,
AdjustDerefRef(AutoDerefRef {
autoderefs: pick.autoderefs,
autoref: autoref,
unsize: unsize
}));
if let Some(target) = unsize {
target
} else {
autoderefd_ty.adjust_for_autoref(self.tcx(), autoref)
}
}
///////////////////////////////////////////////////////////////////////////
//
/// Returns a set of substitutions for the method *receiver* where all type and region
/// parameters are instantiated with fresh variables. This substitution does not include any
/// parameters declared on the method itself.
///
/// Note that this substitution may include late-bound regions from the impl level. If so,
/// these are instantiated later in the `instantiate_method_sig` routine.
fn fresh_receiver_substs(&mut self,
self_ty: Ty<'tcx>,
pick: &probe::Pick<'tcx>)
-> subst::Substs<'tcx>
{
match pick.kind {
probe::InherentImplPick => {
let impl_def_id = pick.item.container().id();
assert!(self.tcx().impl_trait_ref(impl_def_id).is_none(),
"impl {:?} is not an inherent impl", impl_def_id);
check::impl_self_ty(self.fcx, self.span, impl_def_id).substs
}
probe::ObjectPick => {
let trait_def_id = pick.item.container().id();
self.extract_trait_ref(self_ty, |this, object_ty, data| {
// The object data has no entry for the Self
// Type. For the purposes of this method call, we
// substitute the object type itself. This
// wouldn't be a sound substitution in all cases,
// since each instance of the object type is a
// different existential and hence could match
// distinct types (e.g., if `Self` appeared as an
// argument type), but those cases have already
// been ruled out when we deemed the trait to be
// "object safe".
let original_poly_trait_ref =
data.principal_trait_ref_with_self_ty(this.tcx(), object_ty);
let upcast_poly_trait_ref =
this.upcast(original_poly_trait_ref.clone(), trait_def_id);
let upcast_trait_ref =
this.replace_late_bound_regions_with_fresh_var(&upcast_poly_trait_ref);
debug!("original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}",
original_poly_trait_ref,
upcast_trait_ref,
trait_def_id);
upcast_trait_ref.substs.clone()
})
}
probe::ExtensionImplPick(impl_def_id) => {
// The method being invoked is the method as defined on the trait,
// so return the substitutions from the trait. Consider:
//
// impl<A,B,C> Trait<A,B> for Foo<C> { ... }
//
// If we instantiate A, B, and C with $A, $B, and $C
// respectively, then we want to return the type
// parameters from the trait ([$A,$B]), not those from
// the impl ([$A,$B,$C]) not the receiver type ([$C]).
let impl_polytype = check::impl_self_ty(self.fcx, self.span, impl_def_id);
let impl_trait_ref =
self.fcx.instantiate_type_scheme(
self.span,
&impl_polytype.substs,
&self.tcx().impl_trait_ref(impl_def_id).unwrap());
impl_trait_ref.substs.clone()
}
probe::TraitPick => {
let trait_def_id = pick.item.container().id();
let trait_def = self.tcx().lookup_trait_def(trait_def_id);
// Make a trait reference `$0 : Trait<$1...$n>`
// consisting entirely of type variables. Later on in
// the process we will unify the transformed-self-type
// of the method with the actual type in order to
// unify some of these variables.
self.infcx().fresh_substs_for_trait(self.span,
&trait_def.generics,
self.infcx().next_ty_var())
}
probe::WhereClausePick(ref poly_trait_ref) => {
// Where clauses can have bound regions in them. We need to instantiate
// those to convert from a poly-trait-ref to a trait-ref.
self.replace_late_bound_regions_with_fresh_var(&*poly_trait_ref).substs.clone()
}
}
}
fn extract_trait_ref<R, F>(&mut self, self_ty: Ty<'tcx>, mut closure: F) -> R where
F: FnMut(&mut ConfirmContext<'a, 'tcx>, Ty<'tcx>, &ty::TraitTy<'tcx>) -> R,
{
// If we specified that this is an object method, then the
// self-type ought to be something that can be dereferenced to
// yield an object-type (e.g., `&Object` or `Box<Object>`
// etc).
let (_, _, result) = check::autoderef(self.fcx,
self.span,
self_ty,
None,
UnresolvedTypeAction::Error,
NoPreference,
|ty, _| {
match ty.sty {
ty::TyTrait(ref data) => Some(closure(self, ty, &**data)),
_ => None,
}
});
match result {
Some(r) => r,
None => {
self.tcx().sess.span_bug(
self.span,
&format!("self-type `{}` for ObjectPick never dereferenced to an object",
self_ty))
}
}
}
fn instantiate_method_substs(&mut self,
pick: &probe::Pick<'tcx>,
supplied_method_types: Vec<Ty<'tcx>>,
substs: subst::Substs<'tcx>)
-> subst::Substs<'tcx>
{
// Determine the values for the generic parameters of the method.
// If they were not explicitly supplied, just construct fresh
// variables.
let num_supplied_types = supplied_method_types.len();
let method = pick.item.as_opt_method().unwrap();
let method_types = method.generics.types.get_slice(subst::FnSpace);
let num_method_types = method_types.len();
// Create subst for early-bound lifetime parameters, combining
// parameters from the type and those from the method.
//
// FIXME -- permit users to manually specify lifetimes
let method_regions =
self.fcx.infcx().region_vars_for_defs(
self.span,
pick.item.as_opt_method().unwrap()
.generics.regions.get_slice(subst::FnSpace));
let subst::Substs { types, regions } = substs;
let regions = regions.map(|r| r.with_vec(subst::FnSpace, method_regions));
let mut final_substs = subst::Substs { types: types, regions: regions };
if num_supplied_types == 0 {
self.fcx.infcx().type_vars_for_defs(
self.span,
subst::FnSpace,
&mut final_substs,
method_types);
} else if num_method_types == 0 {
span_err!(self.tcx().sess, self.span, E0035,
"does not take type parameters");
self.fcx.infcx().type_vars_for_defs(
self.span,
subst::FnSpace,
&mut final_substs,
method_types);
} else if num_supplied_types != num_method_types {
span_err!(self.tcx().sess, self.span, E0036,
"incorrect number of type parameters given for this method");
final_substs.types.replace(
subst::FnSpace,
vec![self.tcx().types.err; num_method_types]);
} else {
final_substs.types.replace(subst::FnSpace, supplied_method_types);
}
return final_substs;
}
fn unify_receivers(&mut self,
self_ty: Ty<'tcx>,
method_self_ty: Ty<'tcx>)
{
match self.fcx.mk_subty(false, infer::Misc(self.span), self_ty, method_self_ty) {
Ok(_) => {}
Err(_) => {
self.tcx().sess.span_bug(
self.span,
&format!("{} was a subtype of {} but now is not?",
self_ty, method_self_ty));
}
}
}
///////////////////////////////////////////////////////////////////////////
//
fn instantiate_method_sig(&mut self,
pick: &probe::Pick<'tcx>,
all_substs: subst::Substs<'tcx>)
-> InstantiatedMethodSig<'tcx>
{
debug!("instantiate_method_sig(pick={:?}, all_substs={:?})",
pick,
all_substs);
// Instantiate the bounds on the method with the
// type/early-bound-regions substitutions performed. There can
// be no late-bound regions appearing here.
let method_predicates = pick.item.as_opt_method().unwrap()
.predicates.instantiate(self.tcx(), &all_substs);
let method_predicates = self.fcx.normalize_associated_types_in(self.span,
&method_predicates);
debug!("method_predicates after subst = {:?}",
method_predicates);
// Instantiate late-bound regions and substitute the trait
// parameters into the method type to get the actual method type.
//
// NB: Instantiate late-bound regions first so that
// `instantiate_type_scheme` can normalize associated types that
// may reference those regions.
let method_sig = self.replace_late_bound_regions_with_fresh_var(
&pick.item.as_opt_method().unwrap().fty.sig);
debug!("late-bound lifetimes from method instantiated, method_sig={:?}",
method_sig);
let method_sig = self.fcx.instantiate_type_scheme(self.span, &all_substs, &method_sig);
debug!("type scheme substituted, method_sig={:?}",
method_sig);
InstantiatedMethodSig {
method_sig: method_sig,
all_substs: all_substs,
method_predicates: method_predicates,
}
}
fn add_obligations(&mut self,
fty: Ty<'tcx>,
all_substs: &subst::Substs<'tcx>,
method_predicates: &ty::InstantiatedPredicates<'tcx>) {
debug!("add_obligations: fty={:?} all_substs={:?} method_predicates={:?}",
fty,
all_substs,
method_predicates);
self.fcx.add_obligations_for_parameters(
traits::ObligationCause::misc(self.span, self.fcx.body_id),
method_predicates);
// this is a projection from a trait reference, so we have to
// make sure that the trait reference inputs are well-formed.
self.fcx.add_wf_bounds(
all_substs,
self.call_expr);
// the function type must also be well-formed (this is not
// implied by the substs being well-formed because of inherent
// impls and late-bound regions - see issue #28609).
self.fcx.register_wf_obligation(fty, self.span, traits::MiscObligation);
}
///////////////////////////////////////////////////////////////////////////
// RECONCILIATION
/// When we select a method with an `&mut self` receiver, we have to go convert any
/// auto-derefs, indices, etc from `Deref` and `Index` into `DerefMut` and `IndexMut`
/// respectively.
fn fixup_derefs_on_method_receiver_if_necessary(&self,
method_callee: &ty::MethodCallee) {
let sig = match method_callee.ty.sty {
ty::TyBareFn(_, ref f) => f.sig.clone(),
_ => return,
};
match sig.0.inputs[0].sty {
ty::TyRef(_, ty::TypeAndMut {
ty: _,
mutbl: hir::MutMutable,
}) => {}
_ => return,
}
// Gather up expressions we want to munge.
let mut exprs = Vec::new();
exprs.push(self.self_expr);
loop {
let last = exprs[exprs.len() - 1];
match last.node {
hir::ExprField(ref expr, _) |
hir::ExprTupField(ref expr, _) |
hir::ExprIndex(ref expr, _) |
hir::ExprUnary(hir::UnDeref, ref expr) => exprs.push(&**expr),
_ => break,
}
}
debug!("fixup_derefs_on_method_receiver_if_necessary: exprs={:?}",
exprs);
// Fix up autoderefs and derefs.
for (i, &expr) in exprs.iter().rev().enumerate() {
// Count autoderefs.
let autoderef_count = match self.fcx
.inh
.tables
.borrow()
.adjustments
.get(&expr.id) {
Some(&AdjustDerefRef(ref adj)) => adj.autoderefs,
Some(_) | None => 0,
};
debug!("fixup_derefs_on_method_receiver_if_necessary: i={} expr={:?} \
autoderef_count={}",
i, expr, autoderef_count);
if autoderef_count > 0 {
check::autoderef(self.fcx,
expr.span,
self.fcx.expr_ty(expr),
Some(expr),
UnresolvedTypeAction::Error,
PreferMutLvalue,
|_, autoderefs| {
if autoderefs == autoderef_count + 1 {
Some(())
} else {
None
}
});
}
// Don't retry the first one or we might infinite loop!
if i != 0 {
match expr.node {
hir::ExprIndex(ref base_expr, ref index_expr) => {
// If this is an overloaded index, the
// adjustment will include an extra layer of
// autoref because the method is an &self/&mut
// self method. We have to peel it off to get
// the raw adjustment that `try_index_step`
// expects. This is annoying and horrible. We
// ought to recode this routine so it doesn't
// (ab)use the normal type checking paths.
let adj = self.fcx.inh.tables.borrow().adjustments.get(&base_expr.id)
.cloned();
let (autoderefs, unsize) = match adj {
Some(AdjustDerefRef(adr)) => match adr.autoref {
None => {
assert!(adr.unsize.is_none());
(adr.autoderefs, None)
}
Some(AutoPtr(_, _)) => {
(adr.autoderefs, adr.unsize.map(|target| {
target.builtin_deref(false, NoPreference)
.expect("fixup: AutoPtr is not &T").ty
}))
}
Some(_) => {
self.tcx().sess.span_bug(
base_expr.span,
&format!("unexpected adjustment autoref {:?}",
adr));
}
},
None => (0, None),
Some(_) => {
self.tcx().sess.span_bug(
base_expr.span,
"unexpected adjustment type");
}
};
let (adjusted_base_ty, unsize) = if let Some(target) = unsize {
(target, true)
} else {
(self.fcx.adjust_expr_ty(base_expr,
Some(&AdjustDerefRef(AutoDerefRef {
autoderefs: autoderefs,
autoref: None,
unsize: None
}))), false)
};
let index_expr_ty = self.fcx.expr_ty(&**index_expr);
let result = check::try_index_step(
self.fcx,
ty::MethodCall::expr(expr.id),
expr,
&**base_expr,
adjusted_base_ty,
autoderefs,
unsize,
PreferMutLvalue,
index_expr_ty);
if let Some((input_ty, return_ty)) = result {
demand::suptype(self.fcx, index_expr.span, input_ty, index_expr_ty);
let expr_ty = self.fcx.expr_ty(&*expr);
demand::suptype(self.fcx, expr.span, expr_ty, return_ty);
}
}
hir::ExprUnary(hir::UnDeref, ref base_expr) => {
// if this is an overloaded deref, then re-evaluate with
// a preference for mut
let method_call = ty::MethodCall::expr(expr.id);
if self.fcx.inh.tables.borrow().method_map.contains_key(&method_call) {
check::try_overloaded_deref(
self.fcx,
expr.span,
Some(method_call),
Some(&**base_expr),
self.fcx.expr_ty(&**base_expr),
PreferMutLvalue);
}
}
_ => {}
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// MISCELLANY
fn tcx(&self) -> &'a ty::ctxt<'tcx> {
self.fcx.tcx()
}
fn infcx(&self) -> &'a InferCtxt<'a, 'tcx> {
self.fcx.infcx()
}
fn enforce_illegal_method_limitations(&self, pick: &probe::Pick) {
// Disallow calls to the method `drop` defined in the `Drop` trait.
match pick.item.container() {
ty::TraitContainer(trait_def_id) => {
callee::check_legal_trait_for_method_call(self.fcx.ccx, self.span, trait_def_id)
}
ty::ImplContainer(..) => {}
}
}
fn upcast(&mut self,
source_trait_ref: ty::PolyTraitRef<'tcx>,
target_trait_def_id: DefId)
-> ty::PolyTraitRef<'tcx>
{
let upcast_trait_refs = traits::upcast(self.tcx(),
source_trait_ref.clone(),
target_trait_def_id);
// must be exactly one trait ref or we'd get an ambig error etc
if upcast_trait_refs.len() != 1 {
self.tcx().sess.span_bug(
self.span,
&format!("cannot uniquely upcast `{:?}` to `{:?}`: `{:?}`",
source_trait_ref,
target_trait_def_id,
upcast_trait_refs));
}
upcast_trait_refs.into_iter().next().unwrap()
}
fn replace_late_bound_regions_with_fresh_var<T>(&self, value: &ty::Binder<T>) -> T
where T : TypeFoldable<'tcx>
{
self.infcx().replace_late_bound_regions_with_fresh_var(
self.span, infer::FnCall, value).0
}
}
| 42.429003 | 98 | 0.480525 |
bb3418c337b72bc367c381d538a9d7db180a2e44 | 16,036 | //! The Substrate Node Template runtime. This can be compiled with `#[no_std]`, ready for Wasm.
#![cfg_attr(not(feature = "std"), no_std)]
// `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256.
#![recursion_limit="256"]
// Make the WASM binary available.
#[cfg(feature = "std")]
include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs"));
use sp_std::prelude::*;
use sp_core::{crypto::KeyTypeId, OpaqueMetadata};
use sp_runtime::{
ApplyExtrinsicResult, generic, create_runtime_str, impl_opaque_keys, MultiSignature,
transaction_validity::{TransactionValidity, TransactionSource},
};
use sp_runtime::traits::{
BlakeTwo256, Block as BlockT, IdentityLookup, Verify, IdentifyAccount, NumberFor, Saturating,
};
use sp_api::impl_runtime_apis;
use sp_consensus_aura::sr25519::AuthorityId as AuraId;
use grandpa::{AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList};
use grandpa::fg_primitives;
use sp_version::RuntimeVersion;
#[cfg(feature = "std")]
use sp_version::NativeVersion;
use contracts_rpc_runtime_api::ContractExecResult;
// A few exports that help ease life for downstream crates.
#[cfg(any(feature = "std", test))]
pub use sp_runtime::BuildStorage;
pub use timestamp::Call as TimestampCall;
pub use balances::Call as BalancesCall;
pub use sp_runtime::{Permill, Perbill};
pub use frame_support::{
construct_runtime, parameter_types, StorageValue,
traits::{KeyOwnerProofSystem, Randomness},
weights::{
Weight, IdentityFee,
constants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND},
},
};
/// Importing the contracts Schedule type.
pub use contracts::Schedule as ContractsSchedule;
/// Importing a template pallet
pub use template;
/// An index to a block.
pub type BlockNumber = u32;
/// Alias to 512-bit hash when used in the context of a transaction signature on the chain.
pub type Signature = MultiSignature;
/// Some way of identifying an account on the chain. We intentionally make it equivalent
/// to the public key of our transaction signing scheme.
pub type AccountId = <<Signature as Verify>::Signer as IdentifyAccount>::AccountId;
/// The type for looking up accounts. We don't expect more than 4 billion of them, but you
/// never know...
pub type AccountIndex = u32;
/// Balance of an account.
pub type Balance = u128;
/// Index of a transaction in the chain.
pub type Index = u32;
/// A hash of some data used by the chain.
pub type Hash = sp_core::H256;
/// Digest item type.
pub type DigestItem = generic::DigestItem<Hash>;
/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know
/// the specifics of the runtime. They can then be made to be agnostic over specific formats
/// of data like extrinsics, allowing for them to continue syncing the network through upgrades
/// to even the core data structures.
pub mod opaque {
use super::*;
pub use sp_runtime::OpaqueExtrinsic as UncheckedExtrinsic;
/// Opaque block header type.
pub type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// Opaque block type.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
/// Opaque block identifier type.
pub type BlockId = generic::BlockId<Block>;
impl_opaque_keys! {
pub struct SessionKeys {
pub aura: Aura,
pub grandpa: Grandpa,
}
}
}
/// This runtime version.
pub const VERSION: RuntimeVersion = RuntimeVersion {
spec_name: create_runtime_str!("node-template"),
impl_name: create_runtime_str!("node-template"),
authoring_version: 1,
spec_version: 1,
impl_version: 1,
apis: RUNTIME_API_VERSIONS,
transaction_version: 1,
};
pub const MILLISECS_PER_BLOCK: u64 = 6000;
pub const SLOT_DURATION: u64 = MILLISECS_PER_BLOCK;
// These time units are defined in number of blocks.
pub const MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_BLOCK as BlockNumber);
pub const HOURS: BlockNumber = MINUTES * 60;
pub const DAYS: BlockNumber = HOURS * 24;
// Contracts price units.
pub const MILLICENTS: Balance = 1_000_000_000;
pub const CENTS: Balance = 1_000 * MILLICENTS;
pub const DOLLARS: Balance = 100 * CENTS;
/// The version information used to identify this runtime when compiled natively.
#[cfg(feature = "std")]
pub fn native_version() -> NativeVersion {
NativeVersion {
runtime_version: VERSION,
can_author_with: Default::default(),
}
}
parameter_types! {
pub const BlockHashCount: BlockNumber = 2400;
/// We allow for 2 seconds of compute with a 6 second average block time.
pub const MaximumBlockWeight: Weight = 2 * WEIGHT_PER_SECOND;
pub const AvailableBlockRatio: Perbill = Perbill::from_percent(75);
/// Assume 10% of weight for average on_initialize calls.
pub MaximumExtrinsicWeight: Weight = AvailableBlockRatio::get()
.saturating_sub(Perbill::from_percent(10)) * MaximumBlockWeight::get();
pub const MaximumBlockLength: u32 = 5 * 1024 * 1024;
pub const Version: RuntimeVersion = VERSION;
}
impl system::Trait for Runtime {
/// The basic call filter to use in dispatchable.
type BaseCallFilter = ();
/// The identifier used to distinguish between accounts.
type AccountId = AccountId;
/// The aggregated dispatch type that is available for extrinsics.
type Call = Call;
/// The lookup mechanism to get account ID from whatever is passed in dispatchers.
type Lookup = IdentityLookup<AccountId>;
/// The index type for storing how many extrinsics an account has signed.
type Index = Index;
/// The index type for blocks.
type BlockNumber = BlockNumber;
/// The type for hashing blocks and tries.
type Hash = Hash;
/// The hashing algorithm used.
type Hashing = BlakeTwo256;
/// The header type.
type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// The ubiquitous event type.
type Event = Event;
/// The ubiquitous origin type.
type Origin = Origin;
/// Maximum number of block number to block hash mappings to keep (oldest pruned first).
type BlockHashCount = BlockHashCount;
/// Maximum weight of each block.
type MaximumBlockWeight = MaximumBlockWeight;
/// The weight of database operations that the runtime can invoke.
type DbWeight = RocksDbWeight;
/// The weight of the overhead invoked on the block import process, independent of the
/// extrinsics included in that block.
type BlockExecutionWeight = BlockExecutionWeight;
/// The base weight of any extrinsic processed by the runtime, independent of the
/// logic of that extrinsic. (Signature verification, nonce increment, fee, etc...)
type ExtrinsicBaseWeight = ExtrinsicBaseWeight;
/// The maximum weight that a single extrinsic of `Normal` dispatch class can have,
/// idependent of the logic of that extrinsics. (Roughly max block weight - average on
/// initialize cost).
type MaximumExtrinsicWeight = MaximumExtrinsicWeight;
/// Maximum size of all encoded transactions (in bytes) that are allowed in one block.
type MaximumBlockLength = MaximumBlockLength;
/// Portion of the block weight that is available to all normal transactions.
type AvailableBlockRatio = AvailableBlockRatio;
/// Version of the runtime.
type Version = Version;
/// Converts a module to the index of the module in `construct_runtime!`.
///
/// This type is being generated by `construct_runtime!`.
type ModuleToIndex = ModuleToIndex;
/// What to do if a new account is created.
type OnNewAccount = ();
/// What to do if an account is fully reaped from the system.
type OnKilledAccount = ();
/// The data to be stored in an account.
type AccountData = balances::AccountData<Balance>;
}
impl aura::Trait for Runtime {
type AuthorityId = AuraId;
}
impl grandpa::Trait for Runtime {
type Event = Event;
type Call = Call;
type KeyOwnerProofSystem = ();
type KeyOwnerProof =
<Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(KeyTypeId, GrandpaId)>>::Proof;
type KeyOwnerIdentification = <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(
KeyTypeId,
GrandpaId,
)>>::IdentificationTuple;
type HandleEquivocation = ();
}
parameter_types! {
pub const MinimumPeriod: u64 = SLOT_DURATION / 2;
}
impl timestamp::Trait for Runtime {
/// A timestamp: milliseconds since the unix epoch.
type Moment = u64;
type OnTimestampSet = Aura;
type MinimumPeriod = MinimumPeriod;
}
parameter_types! {
pub const TombstoneDeposit: Balance = 16 * MILLICENTS;
pub const RentByteFee: Balance = 4 * MILLICENTS;
pub const RentDepositOffset: Balance = 1000 * MILLICENTS;
pub const SurchargeReward: Balance = 150 * MILLICENTS;
}
impl contracts::Trait for Runtime {
type Time = Timestamp;
type Randomness = RandomnessCollectiveFlip;
type Currency = Balances;
type Event = Event;
type DetermineContractAddress = contracts::SimpleAddressDeterminer<Runtime>;
type TrieIdGenerator = contracts::TrieIdFromParentCounter<Runtime>;
type RentPayment = ();
type SignedClaimHandicap = contracts::DefaultSignedClaimHandicap;
type TombstoneDeposit = TombstoneDeposit;
type StorageSizeOffset = contracts::DefaultStorageSizeOffset;
type RentByteFee = RentByteFee;
type RentDepositOffset = RentDepositOffset;
type SurchargeReward = SurchargeReward;
type MaxDepth = contracts::DefaultMaxDepth;
type MaxValueSize = contracts::DefaultMaxValueSize;
type WeightPrice = transaction_payment::Module<Self>;
}
parameter_types! {
pub const ExistentialDeposit: u128 = 500;
}
impl balances::Trait for Runtime {
/// The type for recording an account's balance.
type Balance = Balance;
/// The ubiquitous event type.
type Event = Event;
type DustRemoval = ();
type ExistentialDeposit = ExistentialDeposit;
type AccountStore = System;
}
parameter_types! {
pub const TransactionByteFee: Balance = 1;
}
impl transaction_payment::Trait for Runtime {
type Currency = balances::Module<Runtime>;
type OnTransactionPayment = ();
type TransactionByteFee = TransactionByteFee;
type WeightToFee = IdentityFee<Balance>;
type FeeMultiplierUpdate = ();
}
impl sudo::Trait for Runtime {
type Event = Event;
type Call = Call;
}
/// Used for the module template in `./template.rs`
impl template::Trait for Runtime {
type Event = Event;
}
construct_runtime!(
pub enum Runtime where
Block = Block,
NodeBlock = opaque::Block,
UncheckedExtrinsic = UncheckedExtrinsic
{
System: system::{Module, Call, Config, Storage, Event<T>},
RandomnessCollectiveFlip: randomness_collective_flip::{Module, Call, Storage},
Timestamp: timestamp::{Module, Call, Storage, Inherent},
Aura: aura::{Module, Config<T>, Inherent(Timestamp)},
Grandpa: grandpa::{Module, Call, Storage, Config, Event},
Balances: balances::{Module, Call, Storage, Config<T>, Event<T>},
TransactionPayment: transaction_payment::{Module, Storage},
Sudo: sudo::{Module, Call, Config<T>, Storage, Event<T>},
// Used for the module template in `./template.rs`
TemplateModule: template::{Module, Call, Storage, Event<T>},
Contracts: contracts::{Module, Call, Config, Storage, Event<T>},
}
);
/// The address format for describing accounts.
pub type Address = AccountId;
/// Block header type as expected by this runtime.
pub type Header = generic::Header<BlockNumber, BlakeTwo256>;
/// Block type as expected by this runtime.
pub type Block = generic::Block<Header, UncheckedExtrinsic>;
/// A Block signed with a Justification
pub type SignedBlock = generic::SignedBlock<Block>;
/// BlockId type as expected by this runtime.
pub type BlockId = generic::BlockId<Block>;
/// The SignedExtension to the basic transaction logic.
pub type SignedExtra = (
system::CheckSpecVersion<Runtime>,
system::CheckTxVersion<Runtime>,
system::CheckGenesis<Runtime>,
system::CheckEra<Runtime>,
system::CheckNonce<Runtime>,
system::CheckWeight<Runtime>,
transaction_payment::ChargeTransactionPayment<Runtime>
);
/// Unchecked extrinsic type as expected by this runtime.
pub type UncheckedExtrinsic = generic::UncheckedExtrinsic<Address, Call, Signature, SignedExtra>;
/// Extrinsic type that has already been checked.
pub type CheckedExtrinsic = generic::CheckedExtrinsic<AccountId, Call, SignedExtra>;
/// Executive: handles dispatch to the various modules.
pub type Executive = frame_executive::Executive<Runtime, Block, system::ChainContext<Runtime>, Runtime, AllModules>;
impl_runtime_apis! {
impl sp_api::Core<Block> for Runtime {
fn version() -> RuntimeVersion {
VERSION
}
fn execute_block(block: Block) {
Executive::execute_block(block)
}
fn initialize_block(header: &<Block as BlockT>::Header) {
Executive::initialize_block(header)
}
}
impl sp_api::Metadata<Block> for Runtime {
fn metadata() -> OpaqueMetadata {
Runtime::metadata().into()
}
}
impl sp_block_builder::BlockBuilder<Block> for Runtime {
fn apply_extrinsic(extrinsic: <Block as BlockT>::Extrinsic) -> ApplyExtrinsicResult {
Executive::apply_extrinsic(extrinsic)
}
fn finalize_block() -> <Block as BlockT>::Header {
Executive::finalize_block()
}
fn inherent_extrinsics(data: sp_inherents::InherentData) -> Vec<<Block as BlockT>::Extrinsic> {
data.create_extrinsics()
}
fn check_inherents(
block: Block,
data: sp_inherents::InherentData,
) -> sp_inherents::CheckInherentsResult {
data.check_extrinsics(&block)
}
fn random_seed() -> <Block as BlockT>::Hash {
RandomnessCollectiveFlip::random_seed()
}
}
impl sp_transaction_pool::runtime_api::TaggedTransactionQueue<Block> for Runtime {
fn validate_transaction(
source: TransactionSource,
tx: <Block as BlockT>::Extrinsic,
) -> TransactionValidity {
Executive::validate_transaction(source, tx)
}
}
impl sp_offchain::OffchainWorkerApi<Block> for Runtime {
fn offchain_worker(header: &<Block as BlockT>::Header) {
Executive::offchain_worker(header)
}
}
impl sp_consensus_aura::AuraApi<Block, AuraId> for Runtime {
fn slot_duration() -> u64 {
Aura::slot_duration()
}
fn authorities() -> Vec<AuraId> {
Aura::authorities()
}
}
impl sp_session::SessionKeys<Block> for Runtime {
fn generate_session_keys(seed: Option<Vec<u8>>) -> Vec<u8> {
opaque::SessionKeys::generate(seed)
}
fn decode_session_keys(
encoded: Vec<u8>,
) -> Option<Vec<(Vec<u8>, KeyTypeId)>> {
opaque::SessionKeys::decode_into_raw_public_keys(&encoded)
}
}
impl fg_primitives::GrandpaApi<Block> for Runtime {
fn grandpa_authorities() -> GrandpaAuthorityList {
Grandpa::grandpa_authorities()
}
fn submit_report_equivocation_extrinsic(
_equivocation_proof: fg_primitives::EquivocationProof<
<Block as BlockT>::Hash,
NumberFor<Block>,
>,
_key_owner_proof: fg_primitives::OpaqueKeyOwnershipProof,
) -> Option<()> {
None
}
fn generate_key_ownership_proof(
_set_id: fg_primitives::SetId,
_authority_id: GrandpaId,
) -> Option<fg_primitives::OpaqueKeyOwnershipProof> {
// NOTE: this is the only implementation possible since we've
// defined our key owner proof type as a bottom type (i.e. a type
// with no values).
None
}
}
impl contracts_rpc_runtime_api::ContractsApi<Block, AccountId, Balance, BlockNumber>
for Runtime
{
fn call(
origin: AccountId,
dest: AccountId,
value: Balance,
gas_limit: u64,
input_data: Vec<u8>,
) -> ContractExecResult {
let exec_result =
Contracts::bare_call(origin, dest.into(), value, gas_limit, input_data);
match exec_result {
Ok(v) => ContractExecResult::Success {
status: v.status,
data: v.data,
},
Err(_) => ContractExecResult::Error,
}
}
fn get_storage(
address: AccountId,
key: [u8; 32],
) -> contracts_primitives::GetStorageResult {
Contracts::get_storage(address, key)
}
fn rent_projection(
address: AccountId,
) -> contracts_primitives::RentProjectionResult<BlockNumber> {
Contracts::rent_projection(address)
}
}
}
| 32.860656 | 116 | 0.727301 |
75cafa0998ee574c057e13488dce60ca718b7bb7 | 26,274 | //! This module contains free-standing functions for creating AST fragments out
//! of smaller pieces.
//!
//! Note that all functions here intended to be stupid constructors, which just
//! assemble a finish node from immediate children. If you want to do something
//! smarter than that, it belongs to the `ext` submodule.
//!
//! Keep in mind that `from_text` functions should be kept private. The public
//! API should require to assemble every node piecewise. The trick of
//! `parse(format!())` we use internally is an implementation detail -- long
//! term, it will be replaced with direct tree manipulation.
use itertools::Itertools;
use stdx::{format_to, never};
use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
/// While the parent module defines basic atomic "constructors", the `ext`
/// module defines shortcuts for common things.
///
/// It's named `ext` rather than `shortcuts` just to keep it short.
pub mod ext {
use super::*;
pub fn simple_ident_pat(name: ast::Name) -> ast::IdentPat {
return from_text(&name.text());
fn from_text(text: &str) -> ast::IdentPat {
ast_from_text(&format!("fn f({}: ())", text))
}
}
pub fn ident_path(ident: &str) -> ast::Path {
path_unqualified(path_segment(name_ref(ident)))
}
pub fn path_from_idents<'a>(
parts: impl std::iter::IntoIterator<Item = &'a str>,
) -> Option<ast::Path> {
let mut iter = parts.into_iter();
let base = ext::ident_path(iter.next()?);
let path = iter.fold(base, |base, s| {
let path = ext::ident_path(s);
path_concat(base, path)
});
Some(path)
}
pub fn field_from_idents<'a>(
parts: impl std::iter::IntoIterator<Item = &'a str>,
) -> Option<ast::Expr> {
let mut iter = parts.into_iter();
let base = expr_path(ext::ident_path(iter.next()?));
let expr = iter.fold(base, |base, s| expr_field(base, s));
Some(expr)
}
pub fn expr_unreachable() -> ast::Expr {
expr_from_text("unreachable!()")
}
pub fn expr_todo() -> ast::Expr {
expr_from_text("todo!()")
}
pub fn empty_block_expr() -> ast::BlockExpr {
block_expr(None, None)
}
pub fn ty_bool() -> ast::Type {
ty_path(ident_path("bool"))
}
pub fn ty_option(t: ast::Type) -> ast::Type {
ty_from_text(&format!("Option<{}>", t))
}
pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type {
ty_from_text(&format!("Result<{}, {}>", t, e))
}
}
pub fn name(text: &str) -> ast::Name {
ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text))
}
pub fn name_ref(text: &str) -> ast::NameRef {
ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text))
}
fn raw_ident_esc(ident: &str) -> &'static str {
let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some();
if is_keyword && !matches!(ident, "self" | "crate" | "super" | "Self") {
"r#"
} else {
""
}
}
pub fn lifetime(text: &str) -> ast::Lifetime {
let mut text = text;
let tmp;
if never!(!text.starts_with('\'')) {
tmp = format!("'{}", text);
text = &tmp;
}
ast_from_text(&format!("fn f<{}>() {{ }}", text))
}
// FIXME: replace stringly-typed constructor with a family of typed ctors, a-la
// `expr_xxx`.
pub fn ty(text: &str) -> ast::Type {
ty_from_text(text)
}
pub fn ty_placeholder() -> ast::Type {
ty_from_text("_")
}
pub fn ty_unit() -> ast::Type {
ty_from_text("()")
}
pub fn ty_tuple(types: impl IntoIterator<Item = ast::Type>) -> ast::Type {
let mut count: usize = 0;
let mut contents = types.into_iter().inspect(|_| count += 1).join(", ");
if count == 1 {
contents.push(',');
}
ty_from_text(&format!("({})", contents))
}
pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type {
ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) })
}
pub fn ty_path(path: ast::Path) -> ast::Type {
ty_from_text(&path.to_string())
}
fn ty_from_text(text: &str) -> ast::Type {
ast_from_text(&format!("type _T = {};", text))
}
pub fn assoc_item_list() -> ast::AssocItemList {
ast_from_text("impl C for D {}")
}
pub fn impl_(
ty: ast::Path,
params: Option<ast::GenericParamList>,
ty_params: Option<ast::GenericParamList>,
) -> ast::Impl {
let params = match params {
Some(params) => params.to_string(),
None => String::new(),
};
let ty_params = match ty_params {
Some(params) => params.to_string(),
None => String::new(),
};
ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params))
}
pub fn impl_trait(
trait_: ast::Path,
ty: ast::Path,
ty_params: Option<ast::GenericParamList>,
) -> ast::Impl {
let ty_params = ty_params.map_or_else(String::new, |params| params.to_string());
ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params))
}
pub(crate) fn generic_arg_list() -> ast::GenericArgList {
ast_from_text("const S: T<> = ();")
}
pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
ast_from_text(&format!("use {};", name_ref))
}
pub fn path_segment_self() -> ast::PathSegment {
ast_from_text("use self;")
}
pub fn path_segment_super() -> ast::PathSegment {
ast_from_text("use super;")
}
pub fn path_segment_crate() -> ast::PathSegment {
ast_from_text("use crate;")
}
pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
ast_from_text(&format!("use {}", segment))
}
pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
ast_from_text(&format!("{}::{}", qual, segment))
}
// FIXME: path concatenation operation doesn't make sense as AST op.
pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path {
ast_from_text(&format!("{}::{}", first, second))
}
pub fn path_from_segments(
segments: impl IntoIterator<Item = ast::PathSegment>,
is_abs: bool,
) -> ast::Path {
let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::");
ast_from_text(&if is_abs {
format!("use ::{};", segments)
} else {
format!("use {};", segments)
})
}
pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::");
ast_from_text(&format!("use {};", paths))
}
// FIXME: should not be pub
pub fn path_from_text(text: &str) -> ast::Path {
ast_from_text(&format!("fn main() {{ let test = {}; }}", text))
}
pub fn use_tree_glob() -> ast::UseTree {
ast_from_text("use *;")
}
pub fn use_tree(
path: ast::Path,
use_tree_list: Option<ast::UseTreeList>,
alias: Option<ast::Rename>,
add_star: bool,
) -> ast::UseTree {
let mut buf = "use ".to_string();
buf += &path.syntax().to_string();
if let Some(use_tree_list) = use_tree_list {
format_to!(buf, "::{}", use_tree_list);
}
if add_star {
buf += "::*";
}
if let Some(alias) = alias {
format_to!(buf, " {}", alias);
}
ast_from_text(&buf)
}
pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {
let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", ");
ast_from_text(&format!("use {{{}}};", use_trees))
}
pub fn use_(visibility: Option<ast::Visibility>, use_tree: ast::UseTree) -> ast::Use {
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{} ", it),
};
ast_from_text(&format!("{}use {};", visibility, use_tree))
}
pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr {
ast_from_text(&format!("fn f() {{ {} {} }}", path, fields))
}
pub fn record_expr_field_list(
fields: impl IntoIterator<Item = ast::RecordExprField>,
) -> ast::RecordExprFieldList {
let fields = fields.into_iter().join(", ");
ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields))
}
pub fn record_expr_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordExprField {
return match expr {
Some(expr) => from_text(&format!("{}: {}", name, expr)),
None => from_text(&name.to_string()),
};
fn from_text(text: &str) -> ast::RecordExprField {
ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text))
}
}
pub fn record_field(
visibility: Option<ast::Visibility>,
name: ast::Name,
ty: ast::Type,
) -> ast::RecordField {
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{} ", it),
};
ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty))
}
// TODO
pub fn block_expr(
stmts: impl IntoIterator<Item = ast::Stmt>,
tail_expr: Option<ast::Expr>,
) -> ast::BlockExpr {
let mut buf = "{\n".to_string();
for stmt in stmts.into_iter() {
format_to!(buf, " {}\n", stmt);
}
if let Some(tail_expr) = tail_expr {
format_to!(buf, " {}\n", tail_expr);
}
buf += "}";
ast_from_text(&format!("fn f() {}", buf))
}
pub fn expr_unit() -> ast::Expr {
expr_from_text("()")
}
pub fn expr_literal(text: &str) -> ast::Literal {
assert_eq!(text.trim(), text);
ast_from_text(&format!("fn f() {{ let _ = {}; }}", text))
}
pub fn expr_empty_block() -> ast::Expr {
expr_from_text("{}")
}
pub fn expr_path(path: ast::Path) -> ast::Expr {
expr_from_text(&path.to_string())
}
pub fn expr_continue() -> ast::Expr {
expr_from_text("continue")
}
// Consider `op: SyntaxKind` instead for nicer syntax at the call-site?
pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr {
expr_from_text(&format!("{} {} {}", lhs, op, rhs))
}
pub fn expr_break(expr: Option<ast::Expr>) -> ast::Expr {
match expr {
Some(expr) => expr_from_text(&format!("break {}", expr)),
None => expr_from_text("break"),
}
}
pub fn expr_return(expr: Option<ast::Expr>) -> ast::Expr {
match expr {
Some(expr) => expr_from_text(&format!("return {}", expr)),
None => expr_from_text("return"),
}
}
pub fn expr_try(expr: ast::Expr) -> ast::Expr {
expr_from_text(&format!("{}?", expr))
}
pub fn expr_await(expr: ast::Expr) -> ast::Expr {
expr_from_text(&format!("{}.await", expr))
}
pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
expr_from_text(&format!("match {} {}", expr, match_arm_list))
}
pub fn expr_if(
condition: ast::Condition,
then_branch: ast::BlockExpr,
else_branch: Option<ast::ElseBranch>,
) -> ast::Expr {
let else_branch = match else_branch {
Some(ast::ElseBranch::Block(block)) => format!("else {}", block),
Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr),
None => String::new(),
};
expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch))
}
pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr {
expr_from_text(&format!("for {} in {} {}", pat, expr, block))
}
pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr {
expr_from_text(&format!("loop {}", block))
}
pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
let token = token(op);
expr_from_text(&format!("{}{}", token, expr))
}
pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
expr_from_text(&format!("{}{}", f, arg_list))
}
pub fn expr_method_call(
receiver: ast::Expr,
method: ast::NameRef,
arg_list: ast::ArgList,
) -> ast::Expr {
expr_from_text(&format!("{}.{}{}", receiver, method, arg_list))
}
pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
expr_from_text(&format!("{}!{}", f, arg_list))
}
pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) })
}
pub fn expr_closure(pats: impl IntoIterator<Item = ast::Param>, expr: ast::Expr) -> ast::Expr {
let params = pats.into_iter().join(", ");
expr_from_text(&format!("|{}| {}", params, expr))
}
pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr {
expr_from_text(&format!("{}.{}", receiver, field))
}
pub fn expr_paren(expr: ast::Expr) -> ast::Expr {
expr_from_text(&format!("({})", expr))
}
pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::Expr {
let expr = elements.into_iter().format(", ");
expr_from_text(&format!("({})", expr))
}
pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
expr_from_text(&format!("{} = {}", lhs, rhs))
}
fn expr_from_text(text: &str) -> ast::Expr {
ast_from_text(&format!("const C: () = {};", text))
}
pub fn condition(expr: ast::Expr, pattern: Option<ast::Pat>) -> ast::Condition {
match pattern {
None => ast_from_text(&format!("const _: () = while {} {{}};", expr)),
Some(pattern) => {
ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
}
}
}
pub fn arg_list(args: impl IntoIterator<Item = ast::Expr>) -> ast::ArgList {
ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", ")))
}
pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
let mut s = String::from("fn f(");
if ref_ {
s.push_str("ref ");
}
if mut_ {
s.push_str("mut ");
}
format_to!(s, "{}", name);
s.push_str(": ())");
ast_from_text(&s)
}
pub fn wildcard_pat() -> ast::WildcardPat {
return from_text("_");
fn from_text(text: &str) -> ast::WildcardPat {
ast_from_text(&format!("fn f({}: ())", text))
}
}
pub fn literal_pat(lit: &str) -> ast::LiteralPat {
return from_text(lit);
fn from_text(text: &str) -> ast::LiteralPat {
ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text))
}
}
/// Creates a tuple of patterns from an iterator of patterns.
///
/// Invariant: `pats` must be length > 0
pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
let mut count: usize = 0;
let mut pats_str = pats.into_iter().inspect(|_| count += 1).join(", ");
if count == 1 {
pats_str.push(',');
}
return from_text(&format!("({})", pats_str));
fn from_text(text: &str) -> ast::TuplePat {
ast_from_text(&format!("fn f({}: ())", text))
}
}
pub fn tuple_struct_pat(
path: ast::Path,
pats: impl IntoIterator<Item = ast::Pat>,
) -> ast::TupleStructPat {
let pats_str = pats.into_iter().join(", ");
return from_text(&format!("{}({})", path, pats_str));
fn from_text(text: &str) -> ast::TupleStructPat {
ast_from_text(&format!("fn f({}: ())", text))
}
}
pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
let pats_str = pats.into_iter().join(", ");
return from_text(&format!("{} {{ {} }}", path, pats_str));
fn from_text(text: &str) -> ast::RecordPat {
ast_from_text(&format!("fn f({}: ())", text))
}
}
pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat {
ast_from_text(&format!("fn f({} {}: ()))", path, fields))
}
pub fn record_pat_field_list(
fields: impl IntoIterator<Item = ast::RecordPatField>,
) -> ast::RecordPatFieldList {
let fields = fields.into_iter().join(", ");
ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields))
}
pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat))
}
/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise.
pub fn path_pat(path: ast::Path) -> ast::Pat {
return from_text(&path.to_string());
fn from_text(text: &str) -> ast::Pat {
ast_from_text(&format!("fn f({}: ())", text))
}
}
pub fn match_arm(
pats: impl IntoIterator<Item = ast::Pat>,
guard: Option<ast::Expr>,
expr: ast::Expr,
) -> ast::MatchArm {
let pats_str = pats.into_iter().join(" | ");
return match guard {
Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)),
None => from_text(&format!("{} => {}", pats_str, expr)),
};
fn from_text(text: &str) -> ast::MatchArm {
ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
}
}
pub fn match_arm_with_guard(
pats: impl IntoIterator<Item = ast::Pat>,
guard: ast::Expr,
expr: ast::Expr,
) -> ast::MatchArm {
let pats_str = pats.into_iter().join(" | ");
return from_text(&format!("{} if {} => {}", pats_str, guard, expr));
fn from_text(text: &str) -> ast::MatchArm {
ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
}
}
pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
let arms_str = arms
.into_iter()
.map(|arm| {
let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
let comma = if needs_comma { "," } else { "" };
format!(" {}{}\n", arm.syntax(), comma)
})
.collect::<String>();
return from_text(&arms_str);
fn from_text(text: &str) -> ast::MatchArmList {
ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
}
}
pub fn where_pred(
path: ast::Path,
bounds: impl IntoIterator<Item = ast::TypeBound>,
) -> ast::WherePred {
let bounds = bounds.into_iter().join(" + ");
return from_text(&format!("{}: {}", path, bounds));
fn from_text(text: &str) -> ast::WherePred {
ast_from_text(&format!("fn f() where {} {{ }}", text))
}
}
pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
let preds = preds.into_iter().join(", ");
return from_text(preds.as_str());
fn from_text(text: &str) -> ast::WhereClause {
ast_from_text(&format!("fn f() where {} {{ }}", text))
}
}
pub fn let_stmt(
pattern: ast::Pat,
ty: Option<ast::Type>,
initializer: Option<ast::Expr>,
) -> ast::LetStmt {
let mut text = String::new();
format_to!(text, "let {}", pattern);
if let Some(ty) = ty {
format_to!(text, ": {}", ty);
}
match initializer {
Some(it) => format_to!(text, " = {};", it),
None => format_to!(text, ";"),
};
ast_from_text(&format!("fn f() {{ {} }}", text))
}
pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
let semi = if expr.is_block_like() { "" } else { ";" };
ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi))
}
pub fn item_const(
visibility: Option<ast::Visibility>,
name: ast::Name,
ty: ast::Type,
expr: ast::Expr,
) -> ast::Const {
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{} ", it),
};
ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr))
}
pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param {
ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty))
}
pub fn self_param() -> ast::SelfParam {
ast_from_text("fn f(&self) { }")
}
pub fn ret_type(ty: ast::Type) -> ast::RetType {
ast_from_text(&format!("fn f() -> {} {{ }}", ty))
}
pub fn param_list(
self_param: Option<ast::SelfParam>,
pats: impl IntoIterator<Item = ast::Param>,
) -> ast::ParamList {
let args = pats.into_iter().join(", ");
let list = match self_param {
Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param),
Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args),
None => format!("fn f({}) {{ }}", args),
};
ast_from_text(&list)
}
pub fn type_param(name: ast::Name, ty: Option<ast::TypeBoundList>) -> ast::TypeParam {
let bound = match ty {
Some(it) => format!(": {}", it),
None => String::new(),
};
ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound))
}
pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam {
ast_from_text(&format!("fn f<{}>() {{ }}", lifetime))
}
pub fn generic_param_list(
pats: impl IntoIterator<Item = ast::GenericParam>,
) -> ast::GenericParamList {
let args = pats.into_iter().join(", ");
ast_from_text(&format!("fn f<{}>() {{ }}", args))
}
pub fn visibility_pub_crate() -> ast::Visibility {
ast_from_text("pub(crate) struct S")
}
pub fn visibility_pub() -> ast::Visibility {
ast_from_text("pub struct S")
}
pub fn tuple_field_list(fields: impl IntoIterator<Item = ast::TupleField>) -> ast::TupleFieldList {
let fields = fields.into_iter().join(", ");
ast_from_text(&format!("struct f({});", fields))
}
pub fn record_field_list(
fields: impl IntoIterator<Item = ast::RecordField>,
) -> ast::RecordFieldList {
let fields = fields.into_iter().join(", ");
ast_from_text(&format!("struct f {{ {} }}", fields))
}
pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::TupleField {
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{} ", it),
};
ast_from_text(&format!("struct f({}{});", visibility, ty))
}
pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
let field_list = match field_list {
None => String::new(),
Some(it) => format!("{}", it),
};
ast_from_text(&format!("enum f {{ {}{} }}", name, field_list))
}
pub fn fn_(
visibility: Option<ast::Visibility>,
fn_name: ast::Name,
type_params: Option<ast::GenericParamList>,
params: ast::ParamList,
body: ast::BlockExpr,
ret_type: Option<ast::RetType>,
is_async: bool,
) -> ast::Fn {
let type_params = match type_params {
Some(type_params) => format!("{}", type_params),
None => "".into(),
};
let ret_type = match ret_type {
Some(ret_type) => format!("{} ", ret_type),
None => "".into(),
};
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{} ", it),
};
let async_literal = if is_async { "async " } else { "" };
ast_from_text(&format!(
"{}{}fn {}{}{} {}{}",
visibility, async_literal, fn_name, type_params, params, ret_type, body
))
}
pub fn struct_(
visibility: Option<ast::Visibility>,
strukt_name: ast::Name,
generic_param_list: Option<ast::GenericParamList>,
field_list: ast::FieldList,
) -> ast::Struct {
let semicolon = if matches!(field_list, ast::FieldList::TupleFieldList(_)) { ";" } else { "" };
let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string());
let visibility = match visibility {
None => String::new(),
Some(it) => format!("{} ", it),
};
ast_from_text(&format!(
"{}struct {}{}{}{}",
visibility, strukt_name, type_params, field_list, semicolon
))
}
fn ast_from_text<N: AstNode>(text: &str) -> N {
let parse = SourceFile::parse(text);
let node = match parse.tree().syntax().descendants().find_map(N::cast) {
Some(it) => it,
None => {
panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
}
};
let node = node.clone_subtree();
assert_eq!(node.syntax().text_range().start(), 0.into());
node
}
pub fn token(kind: SyntaxKind) -> SyntaxToken {
tokens::SOURCE_FILE
.tree()
.syntax()
.clone_for_update()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == kind)
.unwrap_or_else(|| panic!("unhandled token: {:?}", kind))
}
pub mod tokens {
use once_cell::sync::Lazy;
use crate::{ast, AstNode, ParseResult, SourceFile, SyntaxKind::*, SyntaxToken};
pub(super) static SOURCE_FILE: Lazy<ParseResult<SourceFile>> = Lazy::new(|| {
SourceFile::parse(
"const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n",
)
});
pub fn single_space() -> SyntaxToken {
SOURCE_FILE
.tree()
.syntax()
.clone_for_update()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == WHITESPACE && it.text() == " ")
.unwrap()
}
pub fn whitespace(text: &str) -> SyntaxToken {
assert!(text.trim().is_empty());
let sf = SourceFile::parse(text).ok().unwrap();
sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
}
pub fn doc_comment(text: &str) -> SyntaxToken {
assert!(!text.trim().is_empty());
let sf = SourceFile::parse(text).ok().unwrap();
sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
pub fn literal(text: &str) -> SyntaxToken {
assert_eq!(text.trim(), text);
let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
pub fn single_newline() -> SyntaxToken {
let res = SOURCE_FILE
.tree()
.syntax()
.clone_for_update()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == WHITESPACE && it.text() == "\n")
.unwrap();
res.detach();
res
}
pub fn blank_line() -> SyntaxToken {
SOURCE_FILE
.tree()
.syntax()
.clone_for_update()
.descendants_with_tokens()
.filter_map(|it| it.into_token())
.find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
.unwrap()
}
pub struct WsBuilder(SourceFile);
impl WsBuilder {
pub fn new(text: &str) -> WsBuilder {
WsBuilder(SourceFile::parse(text).ok().unwrap())
}
pub fn ws(&self) -> SyntaxToken {
self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
}
}
}
| 31.465868 | 101 | 0.580688 |
23182e78f9747b58e64b5a0fdb73e81114f3e1ab | 35,475 | use super::*;
use crate::{getters::Getter, parsers::Parser};
use bigdecimal::ToPrimitive;
use common::purge_dangling_foreign_keys;
use indoc::indoc;
use native_types::{MySqlType, NativeType};
use quaint::{prelude::Queryable, Value};
use serde_json::from_str;
use std::{
borrow::Cow,
collections::{BTreeMap, HashSet},
};
use tracing::trace;
fn is_mariadb(version: &str) -> bool {
version.contains("MariaDB")
}
enum Flavour {
Mysql,
MariaDb,
}
impl Flavour {
fn from_version(version_string: &str) -> Self {
if is_mariadb(version_string) {
Self::MariaDb
} else {
Self::Mysql
}
}
}
pub struct SqlSchemaDescriber<'a> {
conn: &'a dyn Queryable,
}
#[async_trait::async_trait]
impl super::SqlSchemaDescriberBackend for SqlSchemaDescriber<'_> {
async fn list_databases(&self) -> DescriberResult<Vec<String>> {
self.get_databases().await
}
async fn get_metadata(&self, schema: &str) -> DescriberResult<SqlMetadata> {
let table_count = self.get_table_names(schema).await?.len();
let size_in_bytes = self.get_size(schema).await?;
Ok(SqlMetadata {
table_count,
size_in_bytes,
})
}
#[tracing::instrument(skip(self))]
async fn describe(&self, schema: &str) -> DescriberResult<SqlSchema> {
let version = self.conn.version().await.ok().flatten();
let flavour = version
.as_ref()
.map(|s| Flavour::from_version(s))
.unwrap_or(Flavour::Mysql);
let table_names = self.get_table_names(schema).await?;
let mut tables = Vec::with_capacity(table_names.len());
let mut columns = Self::get_all_columns(self.conn, schema, &flavour).await?;
let mut indexes = self.get_all_indexes(schema).await?;
let mut fks = Self::get_foreign_keys(self.conn, schema).await?;
let mut enums = vec![];
for table_name in &table_names {
let (table, enms) = self.get_table(table_name, &mut columns, &mut indexes, &mut fks);
tables.push(table);
enums.extend(enms.into_iter());
}
purge_dangling_foreign_keys(&mut tables);
let views = self.get_views(schema).await?;
let procedures = self.get_procedures(schema).await?;
Ok(SqlSchema {
tables,
enums,
views,
procedures,
sequences: vec![],
user_defined_types: vec![],
})
}
#[tracing::instrument(skip(self))]
async fn version(&self, _schema: &str) -> crate::DescriberResult<Option<String>> {
Ok(self.conn.version().await?)
}
}
impl Parser for SqlSchemaDescriber<'_> {}
impl<'a> SqlSchemaDescriber<'a> {
/// Constructor.
pub fn new(conn: &'a dyn Queryable) -> SqlSchemaDescriber<'a> {
SqlSchemaDescriber { conn }
}
#[tracing::instrument(skip(self))]
async fn get_databases(&self) -> DescriberResult<Vec<String>> {
let sql = "select schema_name as schema_name from information_schema.schemata;";
let rows = self.conn.query_raw(sql, &[]).await?;
let names = rows
.into_iter()
.map(|row| row.get_expect_string("schema_name"))
.collect();
trace!("Found schema names: {:?}", names);
Ok(names)
}
#[tracing::instrument(skip(self))]
async fn get_views(&self, schema: &str) -> DescriberResult<Vec<View>> {
let sql = indoc! {r#"
SELECT TABLE_NAME AS view_name, VIEW_DEFINITION AS view_sql
FROM INFORMATION_SCHEMA.VIEWS
WHERE TABLE_SCHEMA = ?;
"#};
let result_set = self.conn.query_raw(sql, &[schema.into()]).await?;
let mut views = Vec::with_capacity(result_set.len());
for row in result_set.into_iter() {
views.push(View {
name: row.get_expect_string("view_name"),
definition: row.get_string("view_sql"),
})
}
Ok(views)
}
#[tracing::instrument(skip(self))]
async fn get_procedures(&self, schema: &str) -> DescriberResult<Vec<Procedure>> {
let sql = r#"
SELECT routine_name AS name,
routine_definition AS definition
FROM information_schema.routines
WHERE ROUTINE_SCHEMA = ?
AND ROUTINE_TYPE = 'PROCEDURE'
"#;
let rows = self.conn.query_raw(sql, &[schema.into()]).await?;
let mut procedures = Vec::with_capacity(rows.len());
for row in rows.into_iter() {
procedures.push(Procedure {
name: row.get_expect_string("name"),
definition: row.get_string("definition"),
});
}
Ok(procedures)
}
#[tracing::instrument(skip(self))]
async fn get_table_names(&self, schema: &str) -> DescriberResult<Vec<String>> {
let sql = "SELECT table_name as table_name FROM information_schema.tables
WHERE table_schema = ?
-- Views are not supported yet
AND table_type = 'BASE TABLE'
ORDER BY Binary table_name";
let rows = self.conn.query_raw(sql, &[schema.into()]).await?;
let names = rows
.into_iter()
.map(|row| row.get_expect_string("table_name"))
.collect();
trace!("Found table names: {:?}", names);
Ok(names)
}
#[tracing::instrument(skip(self))]
async fn get_size(&self, schema: &str) -> DescriberResult<usize> {
let sql = r#"
SELECT
SUM(data_length + index_length) as size
FROM information_schema.TABLES
WHERE table_schema = ?
"#;
let result = self.conn.query_raw(sql, &[schema.into()]).await?;
let size = result
.first()
.and_then(|row| {
row.get("size")
.and_then(|x| x.as_numeric())
.and_then(|decimal| decimal.round(0).to_usize())
})
.unwrap_or(0);
trace!("Found db size: {:?}", size);
Ok(size as usize)
}
#[tracing::instrument(skip(self, columns, indexes, foreign_keys))]
fn get_table(
&self,
name: &str,
columns: &mut BTreeMap<String, (Vec<Column>, Vec<Enum>)>,
indexes: &mut BTreeMap<String, (BTreeMap<String, Index>, Option<PrimaryKey>)>,
foreign_keys: &mut BTreeMap<String, Vec<ForeignKey>>,
) -> (Table, Vec<Enum>) {
let (columns, enums) = columns.remove(name).unwrap_or((vec![], vec![]));
let (indices, primary_key) = indexes.remove(name).unwrap_or_else(|| (BTreeMap::new(), None));
let foreign_keys = foreign_keys.remove(name).unwrap_or_default();
(
Table {
name: name.to_string(),
columns,
foreign_keys,
indices: indices.into_iter().map(|(_k, v)| v).collect(),
primary_key,
},
enums,
)
}
async fn get_all_columns(
conn: &dyn Queryable,
schema_name: &str,
flavour: &Flavour,
) -> DescriberResult<BTreeMap<String, (Vec<Column>, Vec<Enum>)>> {
// We alias all the columns because MySQL column names are case-insensitive in queries, but the
// information schema column names became upper-case in MySQL 8, causing the code fetching
// the result values by column name below to fail.
let sql = "
SELECT
column_name column_name,
data_type data_type,
column_type full_data_type,
character_maximum_length character_maximum_length,
numeric_precision numeric_precision,
numeric_scale numeric_scale,
datetime_precision datetime_precision,
column_default column_default,
is_nullable is_nullable,
extra extra,
table_name table_name
FROM information_schema.columns
WHERE table_schema = ?
ORDER BY ordinal_position
";
let mut map = BTreeMap::new();
let rows = conn.query_raw(sql, &[schema_name.into()]).await?;
for col in rows {
trace!("Got column: {:?}", col);
let table_name = col.get_expect_string("table_name");
let name = col.get_expect_string("column_name");
let data_type = col.get_expect_string("data_type");
let full_data_type = col.get_expect_string("full_data_type");
let is_nullable = col.get_expect_string("is_nullable").to_lowercase();
let is_required = match is_nullable.as_ref() {
"no" => true,
"yes" => false,
x => panic!("unrecognized is_nullable variant '{}'", x),
};
let arity = if is_required {
ColumnArity::Required
} else {
ColumnArity::Nullable
};
let character_maximum_length = col.get_u32("character_maximum_length");
let time_precision = col.get_u32("datetime_precision");
let numeric_precision = col.get_u32("numeric_precision");
let numeric_scale = col.get_u32("numeric_scale");
let precision = Precision {
character_maximum_length,
numeric_precision,
numeric_scale,
time_precision,
};
let default_value = col.get("column_default");
let (tpe, enum_option) = Self::get_column_type_and_enum(
&table_name,
&name,
&data_type,
&full_data_type,
precision,
arity,
default_value,
);
let extra = col.get_expect_string("extra").to_lowercase();
let auto_increment = matches!(extra.as_str(), "auto_increment");
let entry = map.entry(table_name).or_insert((Vec::new(), Vec::new()));
if let Some(enm) = enum_option {
entry.1.push(enm);
}
let default = match default_value {
None => None,
Some(param_value) => match param_value.to_string() {
None => None,
Some(x) if x == "NULL" => None,
Some(default_string) => {
let default_generated = matches!(extra.as_str(), "default_generated");
let maria_db = matches!(flavour, Flavour::MariaDb);
let default_expression = default_generated || maria_db;
Some(match &tpe.family {
ColumnTypeFamily::Int => match Self::parse_int(&default_string) {
Some(int_value) => DefaultValue::value(int_value),
None => Self::db_generated(&default_string, default_expression),
},
ColumnTypeFamily::BigInt => match Self::parse_big_int(&default_string) {
Some(int_value) => DefaultValue::value(int_value),
None => Self::db_generated(&default_string, default_expression),
},
ColumnTypeFamily::Float => match Self::parse_float(&default_string) {
Some(float_value) => DefaultValue::value(float_value),
None => Self::db_generated(&default_string, default_expression),
},
ColumnTypeFamily::Decimal => match Self::parse_float(&default_string) {
Some(float_value) => DefaultValue::value(float_value),
None => Self::db_generated(&default_string, default_expression),
},
ColumnTypeFamily::Boolean => match Self::parse_int(&default_string) {
Some(PrismaValue::Int(1)) => DefaultValue::value(true),
Some(PrismaValue::Int(0)) => DefaultValue::value(false),
_ => Self::db_generated(&default_string, default_expression),
},
ColumnTypeFamily::String => {
// See https://dev.mysql.com/doc/refman/8.0/en/information-schema-columns-table.html
// and https://mariadb.com/kb/en/information-schema-columns-table/
if default_generated
|| (maria_db && !matches!(default_string.chars().next(), Some('\'')))
{
Self::dbgenerated_expression(&default_string)
} else {
DefaultValue::value(PrismaValue::String(Self::unescape_and_unquote_default_string(
default_string,
flavour,
)))
}
}
//todo check other now() definitions
ColumnTypeFamily::DateTime => match Self::default_is_current_timestamp(&default_string) {
true => DefaultValue::now(),
_ => Self::db_generated(&default_string, default_expression),
},
ColumnTypeFamily::Binary => Self::db_generated(&default_string, default_expression),
ColumnTypeFamily::Json => Self::db_generated(&default_string, default_expression),
ColumnTypeFamily::Uuid => Self::db_generated(&default_string, default_expression),
ColumnTypeFamily::Enum(_) => {
if default_generated
|| (maria_db && !matches!(default_string.chars().next(), Some('\'')))
{
Self::dbgenerated_expression(&default_string)
} else {
DefaultValue::value(PrismaValue::Enum(Self::unquote_string(
&default_string.replace("_utf8mb4", "").replace("\\\'", ""),
)))
}
}
ColumnTypeFamily::Unsupported(_) => Self::db_generated(&default_string, default_generated),
})
}
},
};
let col = Column {
name,
tpe,
default,
auto_increment,
};
entry.0.push(col);
}
Ok(map)
}
fn db_generated(default_string: &str, default_generated: bool) -> DefaultValue {
if default_generated {
Self::dbgenerated_expression(default_string)
} else {
DefaultValue::db_generated(default_string)
}
}
fn dbgenerated_expression(default_string: &str) -> DefaultValue {
if matches!(default_string.chars().next(), Some('(')) {
DefaultValue::db_generated(default_string)
} else {
let mut introspected_default = String::with_capacity(default_string.len());
introspected_default.push('(');
introspected_default.push_str(default_string);
introspected_default.push(')');
DefaultValue::db_generated(introspected_default)
}
}
async fn get_all_indexes(
&self,
schema_name: &str,
) -> DescriberResult<BTreeMap<String, (BTreeMap<String, Index>, Option<PrimaryKey>)>> {
let mut map = BTreeMap::new();
let mut indexes_with_expressions: HashSet<(String, String)> = HashSet::new();
// We alias all the columns because MySQL column names are case-insensitive in queries, but the
// information schema column names became upper-case in MySQL 8, causing the code fetching
// the result values by column name below to fail.
let sql = "
SELECT DISTINCT
index_name AS index_name,
non_unique AS non_unique,
Binary column_name AS column_name,
seq_in_index AS seq_in_index,
Binary table_name AS table_name,
sub_part AS partial,
Binary collation AS column_order,
Binary index_type AS index_type
FROM INFORMATION_SCHEMA.STATISTICS
WHERE table_schema = ?
ORDER BY index_name, seq_in_index
";
let rows = self.conn.query_raw(sql, &[schema_name.into()]).await?;
for row in rows {
trace!("Got index row: {:#?}", row);
let table_name = row.get_expect_string("table_name");
let index_name = row.get_expect_string("index_name");
let length = row.get_u32("partial");
let sort_order = row.get_string("column_order").map(|v| match v.as_ref() {
"A" => SQLSortOrder::Asc,
"D" => SQLSortOrder::Desc,
misc => panic!("Unexpected sort order `{}`, collation should be A, D or Null", misc),
});
match row.get_string("column_name") {
Some(column_name) => {
let seq_in_index = row.get_expect_i64("seq_in_index");
let pos = seq_in_index - 1;
let is_unique = !row.get_expect_bool("non_unique");
// Multi-column indices will return more than one row (with different column_name values).
// We cannot assume that one row corresponds to one index.
let (ref mut indexes_map, ref mut primary_key): &mut (_, Option<PrimaryKey>) = map
.entry(table_name)
.or_insert((BTreeMap::<String, Index>::new(), None));
let is_pk = index_name.to_lowercase() == "primary";
if is_pk {
trace!("Column '{}' is part of the primary key", column_name);
match primary_key {
Some(pk) => {
if pk.columns.len() < (pos + 1) as usize {
pk.columns.resize((pos + 1) as usize, PrimaryKeyColumn::default());
}
let mut column = PrimaryKeyColumn::new(column_name);
column.length = length;
pk.columns[pos as usize] = column;
trace!(
"The primary key has already been created, added column to it: {:?}",
pk.columns
);
}
None => {
trace!("Instantiating primary key");
let mut column = PrimaryKeyColumn::new(column_name);
column.length = length;
primary_key.replace(PrimaryKey {
columns: vec![column],
sequence: None,
constraint_name: None,
});
}
};
} else if indexes_map.contains_key(&index_name) {
if let Some(index) = indexes_map.get_mut(&index_name) {
let mut column = IndexColumn::new(column_name);
column.length = length;
column.sort_order = sort_order;
index.columns.push(column);
}
} else {
let mut column = IndexColumn::new(column_name);
column.length = length;
column.sort_order = sort_order;
let tpe = match (is_unique, row.get_string("index_type").as_deref()) {
(true, _) => IndexType::Unique,
(_, Some("FULLTEXT")) => IndexType::Fulltext,
_ => IndexType::Normal,
};
indexes_map.insert(
index_name.clone(),
Index {
name: index_name,
columns: vec![column],
tpe,
algorithm: None,
},
);
}
}
None => {
indexes_with_expressions.insert((table_name, index_name));
}
}
}
for (table, (index_map, _)) in &mut map {
for (tble, index_name) in &indexes_with_expressions {
if tble == table {
index_map.remove(index_name);
}
}
}
Ok(map)
}
async fn get_foreign_keys(
conn: &dyn Queryable,
schema_name: &str,
) -> DescriberResult<BTreeMap<String, Vec<ForeignKey>>> {
// Foreign keys covering multiple columns will return multiple rows, which we need to
// merge.
let mut map: BTreeMap<String, BTreeMap<String, ForeignKey>> = BTreeMap::new();
// XXX: Is constraint_name unique? Need a way to uniquely associate rows with foreign keys
// One should think it's unique since it's used to join information_schema.key_column_usage
// and information_schema.referential_constraints tables in this query lifted from
// Stack Overflow
//
// We alias all the columns because MySQL column names are case-insensitive in queries, but the
// information schema column names became upper-case in MySQL 8, causing the code fetching
// the result values by column name below to fail.
let sql = "
SELECT
kcu.constraint_name constraint_name,
kcu.column_name column_name,
kcu.referenced_table_name referenced_table_name,
kcu.referenced_column_name referenced_column_name,
kcu.ordinal_position ordinal_position,
kcu.table_name table_name,
rc.delete_rule delete_rule,
rc.update_rule update_rule
FROM information_schema.key_column_usage AS kcu
INNER JOIN information_schema.referential_constraints AS rc ON
kcu.constraint_name = rc.constraint_name
WHERE
kcu.table_schema = ?
AND rc.constraint_schema = ?
AND kcu.referenced_column_name IS NOT NULL
ORDER BY ordinal_position
";
let result_set = conn.query_raw(sql, &[schema_name.into(), schema_name.into()]).await?;
for row in result_set.into_iter() {
trace!("Got description FK row {:#?}", row);
let table_name = row.get_expect_string("table_name");
let constraint_name = row.get_expect_string("constraint_name");
let column = row.get_expect_string("column_name");
let referenced_table = row.get_expect_string("referenced_table_name");
let referenced_column = row.get_expect_string("referenced_column_name");
let ord_pos = row.get_expect_i64("ordinal_position");
let on_delete_action = match row.get_expect_string("delete_rule").to_lowercase().as_str() {
"cascade" => ForeignKeyAction::Cascade,
"set null" => ForeignKeyAction::SetNull,
"set default" => ForeignKeyAction::SetDefault,
"restrict" => ForeignKeyAction::Restrict,
"no action" => ForeignKeyAction::NoAction,
s => panic!("Unrecognized on delete action '{}'", s),
};
let on_update_action = match row.get_expect_string("update_rule").to_lowercase().as_str() {
"cascade" => ForeignKeyAction::Cascade,
"set null" => ForeignKeyAction::SetNull,
"set default" => ForeignKeyAction::SetDefault,
"restrict" => ForeignKeyAction::Restrict,
"no action" => ForeignKeyAction::NoAction,
s => panic!("Unrecognized on update action '{}'", s),
};
let intermediate_fks = map.entry(table_name).or_default();
match intermediate_fks.get_mut(&constraint_name) {
Some(fk) => {
let pos = ord_pos as usize - 1;
if fk.columns.len() <= pos {
fk.columns.resize(pos + 1, "".to_string());
}
fk.columns[pos] = column;
if fk.referenced_columns.len() <= pos {
fk.referenced_columns.resize(pos + 1, "".to_string());
}
fk.referenced_columns[pos] = referenced_column;
}
None => {
let fk = ForeignKey {
constraint_name: Some(constraint_name.clone()),
columns: vec![column],
referenced_table,
referenced_columns: vec![referenced_column],
on_delete_action,
on_update_action,
};
intermediate_fks.insert(constraint_name, fk);
}
};
}
let fks = map
.into_iter()
.map(|(k, v)| {
let mut fks: Vec<ForeignKey> = v.into_iter().map(|(_k, v)| v).collect();
fks.sort_unstable_by(|this, other| this.columns.cmp(&other.columns));
(k, fks)
})
.collect();
Ok(fks)
}
fn get_column_type_and_enum(
table: &str,
column_name: &str,
data_type: &str,
full_data_type: &str,
precision: Precision,
arity: ColumnArity,
default: Option<&Value>,
) -> (ColumnType, Option<Enum>) {
static UNSIGNEDNESS_RE: Lazy<Regex> = Lazy::new(|| Regex::new("(?i)unsigned$").unwrap());
// println!("Name: {}", column_name);
// println!("DT: {}", data_type);
// println!("FDT: {}", full_data_type);
// println!("Precision: {:?}", precision);
// println!("Default: {:?}", default);
let is_tinyint1 = || Self::extract_precision(full_data_type) == Some(1);
let invalid_bool_default = || {
default
.and_then(|default| default.to_string())
.filter(|default_string| default_string != "NULL")
.and_then(|default_string| Self::parse_int(&default_string))
.filter(|default_int| *default_int != PrismaValue::Int(0) && *default_int != PrismaValue::Int(1))
.is_some()
};
let (family, native_type) = match data_type {
"int" if UNSIGNEDNESS_RE.is_match(full_data_type) => (ColumnTypeFamily::Int, Some(MySqlType::UnsignedInt)),
"int" => (ColumnTypeFamily::Int, Some(MySqlType::Int)),
"smallint" if UNSIGNEDNESS_RE.is_match(full_data_type) => {
(ColumnTypeFamily::Int, Some(MySqlType::UnsignedSmallInt))
}
"smallint" => (ColumnTypeFamily::Int, Some(MySqlType::SmallInt)),
"tinyint" if is_tinyint1() && !invalid_bool_default() => {
(ColumnTypeFamily::Boolean, Some(MySqlType::TinyInt))
}
"tinyint" if UNSIGNEDNESS_RE.is_match(full_data_type) => {
(ColumnTypeFamily::Int, Some(MySqlType::UnsignedTinyInt))
}
"tinyint" => (ColumnTypeFamily::Int, Some(MySqlType::TinyInt)),
"mediumint" if UNSIGNEDNESS_RE.is_match(full_data_type) => {
(ColumnTypeFamily::Int, Some(MySqlType::UnsignedMediumInt))
}
"mediumint" => (ColumnTypeFamily::Int, Some(MySqlType::MediumInt)),
"bigint" if UNSIGNEDNESS_RE.is_match(full_data_type) => {
(ColumnTypeFamily::BigInt, Some(MySqlType::UnsignedBigInt))
}
"bigint" => (ColumnTypeFamily::BigInt, Some(MySqlType::BigInt)),
"decimal" => (
ColumnTypeFamily::Decimal,
Some(MySqlType::Decimal(Some((
precision.numeric_precision.unwrap(),
precision.numeric_scale.unwrap(),
)))),
),
"float" => (ColumnTypeFamily::Float, Some(MySqlType::Float)),
"double" => (ColumnTypeFamily::Float, Some(MySqlType::Double)),
"char" => (
ColumnTypeFamily::String,
Some(MySqlType::Char(precision.character_maximum_length.unwrap())),
),
"varchar" => (
ColumnTypeFamily::String,
Some(MySqlType::VarChar(precision.character_maximum_length.unwrap())),
),
"text" => (ColumnTypeFamily::String, Some(MySqlType::Text)),
"tinytext" => (ColumnTypeFamily::String, Some(MySqlType::TinyText)),
"mediumtext" => (ColumnTypeFamily::String, Some(MySqlType::MediumText)),
"longtext" => (ColumnTypeFamily::String, Some(MySqlType::LongText)),
"enum" => (ColumnTypeFamily::Enum(format!("{}_{}", table, column_name)), None),
"json" => (ColumnTypeFamily::Json, Some(MySqlType::Json)),
"set" => (ColumnTypeFamily::String, None),
//temporal
"date" => (ColumnTypeFamily::DateTime, Some(MySqlType::Date)),
"time" => (
//Fixme this can either be a time or a duration -.-
ColumnTypeFamily::DateTime,
Some(MySqlType::Time(precision.time_precision)),
),
"datetime" => (
ColumnTypeFamily::DateTime,
Some(MySqlType::DateTime(precision.time_precision)),
),
"timestamp" => (
ColumnTypeFamily::DateTime,
Some(MySqlType::Timestamp(precision.time_precision)),
),
"year" => (ColumnTypeFamily::Int, Some(MySqlType::Year)),
"bit" if precision.numeric_precision == Some(1) => (
ColumnTypeFamily::Boolean,
Some(MySqlType::Bit(precision.numeric_precision.unwrap())),
),
//01100010 01101001 01110100 01110011 00100110 01100010 01111001 01110100 01100101 01110011 00001010
"bit" => (
ColumnTypeFamily::Binary,
Some(MySqlType::Bit(precision.numeric_precision.unwrap())),
),
"binary" => (
ColumnTypeFamily::Binary,
Some(MySqlType::Binary(precision.character_maximum_length.unwrap())),
),
"varbinary" => (
ColumnTypeFamily::Binary,
Some(MySqlType::VarBinary(precision.character_maximum_length.unwrap())),
),
"blob" => (ColumnTypeFamily::Binary, Some(MySqlType::Blob)),
"tinyblob" => (ColumnTypeFamily::Binary, Some(MySqlType::TinyBlob)),
"mediumblob" => (ColumnTypeFamily::Binary, Some(MySqlType::MediumBlob)),
"longblob" => (ColumnTypeFamily::Binary, Some(MySqlType::LongBlob)),
//spatial
"geometry" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"point" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"linestring" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"polygon" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"multipoint" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"multilinestring" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"multipolygon" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
"geometrycollection" => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
_ => (ColumnTypeFamily::Unsupported(full_data_type.into()), None),
};
let tpe = ColumnType {
full_data_type: full_data_type.to_owned(),
family: family.clone(),
arity,
native_type: native_type.map(|x| x.to_json()),
};
match &family {
ColumnTypeFamily::Enum(name) => (
tpe,
Some(Enum {
name: name.clone(),
values: Self::extract_enum_values(&full_data_type),
}),
),
_ => (tpe, None),
}
}
fn extract_precision(input: &str) -> Option<u32> {
static RE: Lazy<Regex> = Lazy::new(|| Regex::new(r#".*\(([1-9])\)"#).unwrap());
RE.captures(input)
.and_then(|cap| cap.get(1).map(|precision| from_str::<u32>(precision.as_str()).unwrap()))
}
fn extract_enum_values(full_data_type: &&str) -> Vec<String> {
let len = &full_data_type.len() - 1;
let vals = &full_data_type[5..len];
vals.split(',').map(unquote_string).collect()
}
// See https://dev.mysql.com/doc/refman/8.0/en/string-literals.html
//
// In addition, MariaDB will return string literals with the quotes and extra backslashes around
// control characters like `\n`.
fn unescape_and_unquote_default_string(default: String, flavour: &Flavour) -> String {
static MYSQL_ESCAPING_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"\\('|\\[^\\])|'(')"#).unwrap());
static MARIADB_NEWLINE_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"\\n"#).unwrap());
static MARIADB_DEFAULT_QUOTE_UNESCAPE_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r#"'(.*)'"#).unwrap());
let maybe_unquoted: Cow<str> = if matches!(flavour, Flavour::MariaDb) {
let unquoted = MARIADB_DEFAULT_QUOTE_UNESCAPE_RE
.captures(&default)
.and_then(|cap| cap.get(1).map(|x| x.as_str()))
.unwrap_or(&default);
MARIADB_NEWLINE_RE.replace_all(unquoted, "\n")
} else {
default.into()
};
MYSQL_ESCAPING_RE.replace_all(&maybe_unquoted, "$1$2").into()
}
/// Tests whether an introspected default value should be categorized as current_timestamp.
fn default_is_current_timestamp(default_str: &str) -> bool {
static MYSQL_CURRENT_TIMESTAMP_RE: Lazy<Regex> =
Lazy::new(|| Regex::new(r#"(?i)^current_timestamp(\([0-9]*\))?$"#).unwrap());
MYSQL_CURRENT_TIMESTAMP_RE.is_match(default_str)
}
}
| 42.587035 | 119 | 0.526455 |
506e8e472cd699a72c1278f066f344c1f6471102 | 6,917 | use std::collections::{HashSet, HashMap};
use std::fs::File;
use std::io::{self, BufRead};
#[derive(Clone, PartialEq, Eq, Hash)]
struct Pixel {
row: i64,
col: i64,
}
impl Pixel {
fn new(row: i64, col: i64) -> Self {
Self {
row,
col
}
}
}
struct Algorithm {
samples: Vec<bool>
}
impl Algorithm {
fn parse(s: String) -> Self {
Self {
samples: s.chars().map(|x| x== '#').collect()
}
}
}
#[derive(Clone)]
struct Size {
rows: usize,
cols: usize,
}
impl Size {
fn new(rows: usize, cols: usize) -> Self {
Self {
rows,
cols
}
}
}
struct Image {
matrix: HashMap<Pixel, bool>,
size: Size,
}
impl Image {
fn parse(data: &Vec<String>) -> Self {
let size = Size::new(data.len(), data.first().unwrap().len());
let mut matrix: HashMap<Pixel, bool> = HashMap::new();
for r_i in 0..size.rows {
let row: Vec<char> = data[r_i].chars().into_iter().collect();
for c_i in 0..size.cols {
let pixel_on: bool = row[c_i] == '#';
matrix.insert(Pixel{row: r_i as i64, col: c_i as i64}, pixel_on);
}
}
Self{
matrix,
size
}
}
fn increase_size(&self, offset: i64, border_on: bool) -> Self {
// increase adding a 2 pixel border
// all pixels increase row and col by 2
let size = Size::new(self.size.rows + (offset as usize)*2, self.size.cols + (offset as usize)*2);
let mut matrix: HashMap<Pixel, bool> = HashMap::new();
for r_i in 0..size.rows {
for c_i in 0..size.cols {
matrix.insert(Pixel{row: r_i as i64, col: c_i as i64}, border_on);
}
}
for (pixel, is_on) in &self.matrix {
matrix.insert(Pixel::new(pixel.row+offset, pixel.col+offset), is_on.clone());
}
Self{
matrix,
size
}
}
fn get_algo_index(&self, target: &Pixel, border_on: bool) -> usize {
let mut neighbours = "".to_owned();
let char_by_bool: HashMap<bool, char> = HashMap::from([
(true, '1'),
(false, '0'),
]);
// find neighbours for target
// top-left
let neighbour = self.matrix.get(&Pixel{row: target.row - 1, col: target.col - 1});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// top
let neighbour = self.matrix.get(&Pixel{row: target.row - 1, col: target.col});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// top-right
let neighbour = self.matrix.get(&Pixel{row: target.row - 1, col: target.col + 1});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// left
let neighbour = self.matrix.get(&Pixel{row: target.row, col: target.col - 1});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// center
let neighbour = self.matrix.get(&Pixel{row: target.row, col: target.col});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// right
let neighbour = self.matrix.get(&Pixel{row: target.row, col: target.col + 1});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// bottom-left
let neighbour = self.matrix.get(&Pixel{row: target.row + 1, col: target.col - 1});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// bottom
let neighbour = self.matrix.get(&Pixel{row: target.row + 1, col: target.col});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
// bottom-right
let neighbour = self.matrix.get(&Pixel{row: target.row + 1, col: target.col + 1});
if neighbour.is_some() {
neighbours.push(char_by_bool.get(neighbour.unwrap()).unwrap().clone());
} else {
neighbours.push(char_by_bool.get(&border_on).unwrap().clone());
}
usize::from_str_radix(&neighbours, 2).unwrap()
}
fn apply_algo(&self, algo: &Algorithm, border_on: bool) -> Self {
let mut matrix: HashMap<Pixel, bool> = HashMap::new();
for r_i in 0..self.size.rows {
for c_i in 0..self.size.cols {
let target = Pixel::new(r_i as i64, c_i as i64);
let algo_index = self.get_algo_index(&target, border_on);
matrix.insert(target.clone(), algo.samples[algo_index]);
}
}
Self {
matrix,
size: self.size.clone()
}
}
}
fn solution_1(data: &Vec<String>) -> usize {
let algo_str = data[0].clone();
let algo = Algorithm::parse(algo_str);
let mut image = Image::parse(&data[2..].to_vec());
let mut border_on = false;
for _ in 0..2 {
image = image.increase_size(2, border_on);
image = image.apply_algo(&algo, border_on);
if !border_on {
border_on = algo.samples.first().unwrap().clone();
} else {
border_on = algo.samples.last().unwrap().clone();
}
}
return image.matrix.iter().filter(|(_,is_on)|*is_on.clone()).count();
}
fn solution_2(data: &Vec<String>) -> usize {
let algo_str = data[0].clone();
let algo = Algorithm::parse(algo_str);
let mut image = Image::parse(&data[2..].to_vec());
let mut border_on = false;
for _ in 0..50 {
image = image.increase_size(2, border_on);
image = image.apply_algo(&algo, border_on);
if !border_on {
border_on = algo.samples.first().unwrap().clone();
} else {
border_on = algo.samples.last().unwrap().clone();
}
}
return image.matrix.iter().filter(|(_,is_on)|*is_on.clone()).count();
}
fn main() {
let test = false;
let mut file_path: String = "inputs/day20".to_string();
let mut emoji: String = "🎉".to_string();
if test {
file_path += ".test";
emoji = "🧪".to_string();
}
file_path += ".txt";
let file = File::open(file_path).unwrap();
let data: Vec<String> = io::BufReader::new(file)
.lines()
.map(|line|line.unwrap())
.collect();
println!(r#"{:?} Part 1 result is {:?}"#, emoji, solution_1(&data));
println!(r#"{:?} Part 2 test result is {:?}"#, emoji, solution_2(&data));
}
| 28.117886 | 101 | 0.601128 |
def828fe74619278fb83e98ace7958eccef8f7b9 | 20,219 | //! compatible with std::sync::rwlock except for both thread and coroutine
//! please ref the doc from std::sync::rwlock
use std::cell::UnsafeCell;
use std::fmt;
use std::ops::{Deref, DerefMut};
use std::panic::{RefUnwindSafe, UnwindSafe};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use std::sync::{LockResult, PoisonError, TryLockError, TryLockResult};
use crate::cancel::trigger_cancel_panic;
use crate::park::ParkError;
use may_queue::mpsc_list::Queue as WaitList;
use super::blocking::SyncBlocker;
use super::mutex::{self, Mutex};
use super::poison;
/// A reader-writer lock
///
/// The priority policy of the lock is that readers have weak priority
pub struct RwLock<T: ?Sized> {
// below two variables consist a global mutex
// we need to deal with the cancel logic differently
// the waiting blocker list
to_wake: WaitList<Arc<SyncBlocker>>,
// track how many blockers are waiting on the mutex
cnt: AtomicUsize,
// the reader mutex that track the reader count
rlock: Mutex<usize>,
poison: poison::Flag,
data: UnsafeCell<T>,
}
unsafe impl<T: ?Sized + Send + Sync> Send for RwLock<T> {}
unsafe impl<T: ?Sized + Send + Sync> Sync for RwLock<T> {}
impl<T: ?Sized> UnwindSafe for RwLock<T> {}
impl<T: ?Sized> RefUnwindSafe for RwLock<T> {}
#[must_use]
pub struct RwLockReadGuard<'a, T: ?Sized + 'a> {
__lock: &'a RwLock<T>,
}
// impl<'a, T: ?Sized> !marker::Send for RwLockReadGuard<'a, T> {}
#[must_use]
pub struct RwLockWriteGuard<'a, T: ?Sized + 'a> {
__lock: &'a RwLock<T>,
__poison: poison::Guard,
}
// impl<'a, T: ?Sized> !marker::Send for RwLockWriteGuard<'a, T> {}
impl<T> RwLock<T> {
pub fn new(t: T) -> RwLock<T> {
RwLock {
to_wake: WaitList::new(),
cnt: AtomicUsize::new(0),
rlock: Mutex::new(0),
poison: poison::Flag::new(),
data: UnsafeCell::new(t),
}
}
}
impl<T: ?Sized> RwLock<T> {
// global mutex lock without return a guard
fn lock(&self) -> Result<(), ParkError> {
// try lock first
match self.try_lock() {
Ok(_) => return Ok(()),
Err(TryLockError::WouldBlock) => {}
Err(TryLockError::Poisoned(_)) => return Err(ParkError::Timeout),
}
let cur = SyncBlocker::current();
// register blocker first
self.to_wake.push(cur.clone());
// inc the cnt, if it's the first grab, unpark the first waiter
if self.cnt.fetch_add(1, Ordering::SeqCst) == 0 {
self.to_wake
.pop()
.map(|w| self.unpark_one(&w))
.expect("got null blocker!");
}
match cur.park(None) {
Ok(_) => Ok(()),
Err(ParkError::Timeout) => unreachable!("rwlock timeout"),
Err(ParkError::Canceled) => {
// check the unpark status
if cur.is_unparked() {
self.unlock();
} else {
// register
cur.set_release();
// re-check unpark status
if cur.is_unparked() && cur.take_release() {
self.unlock();
}
}
Err(ParkError::Canceled)
}
}
}
fn try_lock(&self) -> TryLockResult<()> {
if self.cnt.load(Ordering::SeqCst) == 0 {
match self
.cnt
.compare_exchange(0, 1, Ordering::SeqCst, Ordering::SeqCst)
{
Ok(_) => Ok(()),
Err(_) => {
if self.poison.get() {
Err(TryLockError::Poisoned(PoisonError::new(())))
} else {
Err(TryLockError::WouldBlock)
}
}
}
} else {
Err(TryLockError::WouldBlock)
}
}
fn unlock(&self) {
if self.cnt.fetch_sub(1, Ordering::SeqCst) > 1 {
self.to_wake
.pop()
.map(|w| self.unpark_one(&w))
.expect("got null blocker!");
}
}
fn unpark_one(&self, w: &SyncBlocker) {
w.unpark();
if w.take_release() {
self.unlock();
}
}
pub fn read(&self) -> LockResult<RwLockReadGuard<T>> {
let mut r = self.rlock.lock().expect("rwlock read");
if *r == 0 {
if let Err(ParkError::Canceled) = self.lock() {
// don't set the poison flag
::std::mem::forget(r);
// release the mutex to let other run
mutex::unlock_mutex(&self.rlock);
// now we can safely go with the cancel panic
trigger_cancel_panic();
}
// else the Poisoned case would be covered by the RwLockReadGuard::new()
}
*r += 1;
RwLockReadGuard::new(self)
}
pub fn try_read(&self) -> TryLockResult<RwLockReadGuard<T>> {
let mut r = match self.rlock.try_lock() {
Ok(r) => r,
Err(TryLockError::Poisoned(_)) => {
return Err(TryLockError::Poisoned(PoisonError::new(RwLockReadGuard {
__lock: self,
})));
}
Err(TryLockError::WouldBlock) => return Err(TryLockError::WouldBlock),
};
if *r == 0 {
if let Err(TryLockError::WouldBlock) = self.try_lock() {
return Err(TryLockError::WouldBlock);
}
}
let g = RwLockReadGuard::new(self)?;
// finally we add rlock
*r += 1;
Ok(g)
}
fn read_unlock(&self) {
let mut r = self.rlock.lock().expect("rwlock read_unlock");
*r -= 1;
if *r == 0 {
self.unlock();
}
}
pub fn write(&self) -> LockResult<RwLockWriteGuard<T>> {
if let Err(ParkError::Canceled) = self.lock() {
// now we can safely go with the cancel panic
trigger_cancel_panic();
}
RwLockWriteGuard::new(self)
}
pub fn try_write(&self) -> TryLockResult<RwLockWriteGuard<T>> {
if let Err(TryLockError::WouldBlock) = self.try_lock() {
return Err(TryLockError::WouldBlock);
}
Ok(RwLockWriteGuard::new(self)?)
}
fn write_unlock(&self) {
self.unlock();
}
pub fn is_poisoned(&self) -> bool {
self.poison.get()
}
pub fn into_inner(self) -> LockResult<T>
where
T: Sized,
{
// We know statically that there are no outstanding references to
// `self` so there's no need to lock the inner lock.
let data = self.data.into_inner();
poison::map_result(self.poison.borrow(), |_| data)
}
pub fn get_mut(&mut self) -> LockResult<&mut T> {
// We know statically that there are no other references to `self`, so
// there's no need to lock the inner lock.
let data = unsafe { &mut *self.data.get() };
poison::map_result(self.poison.borrow(), |_| data)
}
}
impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLock<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.try_read() {
Ok(guard) => write!(f, "RwLock {{ data: {:?} }}", &*guard),
Err(TryLockError::Poisoned(err)) => {
write!(f, "RwLock {{ data: Poisoned({:?}) }}", &**err.get_ref())
}
Err(TryLockError::WouldBlock) => write!(f, "RwLock {{ <locked> }}"),
}
}
}
impl<T: Default> Default for RwLock<T> {
/// Creates a new `RwLock<T>`, with the `Default` value for T.
fn default() -> RwLock<T> {
RwLock::new(Default::default())
}
}
impl<'rwlock, T: ?Sized> RwLockReadGuard<'rwlock, T> {
fn new(lock: &'rwlock RwLock<T>) -> LockResult<RwLockReadGuard<'rwlock, T>> {
poison::map_result(lock.poison.borrow(), |_| RwLockReadGuard { __lock: lock })
}
}
impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> {
fn new(lock: &'rwlock RwLock<T>) -> LockResult<RwLockWriteGuard<'rwlock, T>> {
poison::map_result(lock.poison.borrow(), |guard| RwLockWriteGuard {
__lock: lock,
__poison: guard,
})
}
}
impl<'a, T: fmt::Debug> fmt::Debug for RwLockReadGuard<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("RwLockReadGuard")
.field("lock", &self.__lock)
.finish()
}
}
impl<'a, T: fmt::Debug> fmt::Debug for RwLockWriteGuard<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("RwLockWriteGuard")
.field("lock", &self.__lock)
.finish()
}
}
impl<'rwlock, T: ?Sized> Deref for RwLockReadGuard<'rwlock, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.__lock.data.get() }
}
}
impl<'rwlock, T: ?Sized> Deref for RwLockWriteGuard<'rwlock, T> {
type Target = T;
fn deref(&self) -> &T {
unsafe { &*self.__lock.data.get() }
}
}
impl<'rwlock, T: ?Sized> DerefMut for RwLockWriteGuard<'rwlock, T> {
fn deref_mut(&mut self) -> &mut T {
unsafe { &mut *self.__lock.data.get() }
}
}
impl<'a, T: ?Sized> Drop for RwLockReadGuard<'a, T> {
fn drop(&mut self) {
self.__lock.read_unlock();
}
}
impl<'a, T: ?Sized> Drop for RwLockWriteGuard<'a, T> {
fn drop(&mut self) {
self.__lock.poison.done(&self.__poison);
self.__lock.write_unlock();
}
}
#[cfg(test)]
mod tests {
use crate::sync::mpsc::channel;
use crate::sync::{Condvar, Mutex, RwLock};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, TryLockError};
use std::thread;
#[derive(Eq, PartialEq, Debug)]
struct NonCopy(i32);
#[test]
fn smoke() {
let l = RwLock::new(());
drop(l.read().unwrap());
drop(l.write().unwrap());
drop((l.read().unwrap(), l.read().unwrap()));
drop(l.write().unwrap());
}
#[test]
fn frob() {
const N: usize = 10;
const M: usize = 1000;
let r = Arc::new(RwLock::new(()));
let (tx, rx) = channel::<()>();
for i in 0..N {
let tx = tx.clone();
let r = r.clone();
let f = move || {
for i in 0..M {
if i % 5 == 0 {
drop(r.write().unwrap());
} else {
drop(r.read().unwrap());
}
}
drop(tx);
};
if i % 2 == 0 {
go!(f);
} else {
thread::spawn(f);
}
}
drop(tx);
let _ = rx.recv();
}
#[test]
fn test_rw_arc_poison_wr() {
let arc = Arc::new(RwLock::new(1));
let arc2 = arc.clone();
let _: Result<(), _> = thread::spawn(move || {
let _lock = arc2.write().unwrap();
panic!();
})
.join();
assert!(arc.read().is_err());
}
#[test]
fn test_rw_arc_poison_ww() {
let arc = Arc::new(RwLock::new(1));
assert!(!arc.is_poisoned());
let arc2 = arc.clone();
let _: Result<(), _> = thread::spawn(move || {
let _lock = arc2.write().unwrap();
panic!();
})
.join();
assert!(arc.write().is_err());
assert!(arc.is_poisoned());
}
#[test]
fn test_rw_arc_no_poison_rr() {
let arc = Arc::new(RwLock::new(1));
let arc2 = arc.clone();
let _: Result<(), _> = thread::spawn(move || {
let _lock = arc2.read().unwrap();
panic!();
})
.join();
let lock = arc.read().unwrap();
assert_eq!(*lock, 1);
}
#[test]
fn test_rw_arc_no_poison_rw() {
let arc = Arc::new(RwLock::new(1));
let arc2 = arc.clone();
let _: Result<(), _> = thread::spawn(move || {
let _lock = arc2.read().unwrap();
panic!()
})
.join();
let lock = arc.write().unwrap();
assert_eq!(*lock, 1);
}
#[test]
fn test_rw_arc() {
let arc = Arc::new(RwLock::new(0));
let arc2 = arc.clone();
let (tx, rx) = channel();
thread::spawn(move || {
let mut lock = arc2.write().unwrap();
for _ in 0..10 {
let tmp = *lock;
*lock = -1;
thread::yield_now();
*lock = tmp + 1;
}
tx.send(()).unwrap();
});
// Readers try to catch the writer in the act
let mut children = Vec::new();
for _ in 0..5 {
let arc3 = arc.clone();
children.push(thread::spawn(move || {
let lock = arc3.read().unwrap();
assert!(*lock >= 0);
}));
}
// Wait for children to pass their asserts
for r in children {
assert!(r.join().is_ok());
}
// Wait for writer to finish
rx.recv().unwrap();
let lock = arc.read().unwrap();
assert_eq!(*lock, 10);
}
#[test]
fn test_rw_arc_access_in_unwind() {
let arc = Arc::new(RwLock::new(1));
let arc2 = arc.clone();
let _ = thread::spawn(move || -> () {
struct Unwinder {
i: Arc<RwLock<isize>>,
}
impl Drop for Unwinder {
fn drop(&mut self) {
let mut lock = self.i.write().unwrap();
*lock += 1;
}
}
let _u = Unwinder { i: arc2 };
panic!();
})
.join();
let lock = arc.read().unwrap();
assert_eq!(*lock, 2);
}
#[test]
fn test_rwlock_unsized() {
let rw: &RwLock<[i32]> = &RwLock::new([1, 2, 3]);
{
let b = &mut *rw.write().unwrap();
b[0] = 4;
b[2] = 5;
}
let comp: &[i32] = &[4, 2, 5];
assert_eq!(&*rw.read().unwrap(), comp);
}
#[test]
fn test_rwlock_try_write() {
let lock = RwLock::new(0isize);
let read_guard = lock.read().unwrap();
let write_result = lock.try_write();
match write_result {
Err(TryLockError::WouldBlock) => (),
Ok(_) => assert!(
false,
"try_write should not succeed while read_guard is in scope"
),
Err(_) => assert!(false, "unexpected error"),
}
drop(read_guard);
}
#[test]
fn test_into_inner() {
let m = RwLock::new(NonCopy(10));
assert_eq!(m.into_inner().unwrap(), NonCopy(10));
}
#[test]
fn test_into_inner_drop() {
struct Foo(Arc<AtomicUsize>);
impl Drop for Foo {
fn drop(&mut self) {
self.0.fetch_add(1, Ordering::SeqCst);
}
}
let num_drops = Arc::new(AtomicUsize::new(0));
let m = RwLock::new(Foo(num_drops.clone()));
assert_eq!(num_drops.load(Ordering::SeqCst), 0);
{
let _inner = m.into_inner().unwrap();
assert_eq!(num_drops.load(Ordering::SeqCst), 0);
}
assert_eq!(num_drops.load(Ordering::SeqCst), 1);
}
#[test]
fn test_into_inner_poison() {
let m = Arc::new(RwLock::new(NonCopy(10)));
let m2 = m.clone();
let _ = thread::spawn(move || {
let _lock = m2.write().unwrap();
panic!("test panic in inner thread to poison RwLock");
})
.join();
assert!(m.is_poisoned());
match Arc::try_unwrap(m).unwrap().into_inner() {
Err(e) => assert_eq!(e.into_inner(), NonCopy(10)),
Ok(x) => panic!("into_inner of poisoned RwLock is Ok: {:?}", x),
}
}
#[test]
fn test_get_mut() {
let mut m = RwLock::new(NonCopy(10));
*m.get_mut().unwrap() = NonCopy(20);
assert_eq!(m.into_inner().unwrap(), NonCopy(20));
}
#[test]
fn test_get_mut_poison() {
let m = Arc::new(RwLock::new(NonCopy(10)));
let m2 = m.clone();
let _ = thread::spawn(move || {
let _lock = m2.write().unwrap();
panic!("test panic in inner thread to poison RwLock");
})
.join();
assert!(m.is_poisoned());
match Arc::try_unwrap(m).unwrap().get_mut() {
Err(e) => assert_eq!(*e.into_inner(), NonCopy(10)),
Ok(x) => panic!("get_mut of poisoned RwLock is Ok: {:?}", x),
}
}
#[test]
fn test_rwlock_write_canceled() {
const N: usize = 10;
let sync = Arc::new((Mutex::new(0), Condvar::new()));
let (tx, rx) = channel();
let mut vec = vec![];
let rwlock = Arc::new(RwLock::new(0));
for i in 1..N + 1 {
// println!("create thread id={}", i);
let sync = sync.clone();
let tx = tx.clone();
let rwlock = rwlock.clone();
let h = go!(move || {
// tell master that we started
tx.send(0).unwrap();
// first get the wlock
let _wlock = rwlock.write().unwrap();
tx.send(i).unwrap();
// println!("got wlock, id={}", i);
// wait the mater to let it go
let &(ref lock, ref cond) = &*sync;
let mut cnt = lock.lock().unwrap();
while *cnt != i {
cnt = cond.wait(cnt).unwrap();
}
// println!("got cond id={}", i);
});
vec.push(h);
}
drop(tx);
// wait for coroutine started
let mut id = 0;
for _ in 1..N + 2 {
let a = rx.recv().unwrap();
if a != 0 {
id = a;
// first recv one id
// println!("recv id = {}", id);
}
}
// cancel one coroutine that is waiting for the rwlock
let mut cancel_id = id + 1;
if cancel_id == N + 2 {
cancel_id = 1;
}
// println!("cancel id = {}", cancel_id);
unsafe { vec[cancel_id - 1].coroutine().cancel() };
// let all coroutine to continue
let &(ref lock, ref cond) = &*sync;
for _ in 1..N {
let mut cnt = lock.lock().unwrap();
*cnt = id;
cond.notify_all();
drop(cnt);
id = rx.recv().unwrap_or(0);
// println!("recv id = {:?}", id);
}
assert_eq!(rx.try_recv().is_err(), true);
}
#[test]
fn test_rwlock_read_canceled() {
let (tx, rx) = channel();
let rwlock = Arc::new(RwLock::new(0));
// lock the write lock so all reader lock would enqueue
let wlock = rwlock.write().unwrap();
// create a coroutine that use reader locks
let h = {
let tx = tx.clone();
let rwlock = rwlock.clone();
go!(move || {
// tell master that we started
tx.send(0).unwrap();
// first get the rlock
let _rlock = rwlock.read().unwrap();
tx.send(1).unwrap();
})
};
// wait for reader coroutine started
let a = rx.recv().unwrap();
assert_eq!(a, 0);
// create another thread that wait for wlock
let rwlock1 = rwlock.clone();
let tx1 = tx.clone();
thread::spawn(move || {
let _wlock = rwlock1.write().unwrap();
tx1.send(10).unwrap();
});
// cancel read coroutine that is waiting for the rwlock
unsafe { h.coroutine().cancel() };
h.join().unwrap_err();
// release the write lock, so that other thread can got the lock
drop(wlock);
let a = rx.recv().unwrap();
assert_eq!(a, 10);
assert_eq!(rx.try_recv().is_err(), true);
}
}
| 29.473761 | 86 | 0.48771 |
8f702e59d0a39c3e235d9c976a1ae7497c331d27 | 18,992 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
*
* # Lattice Variables
*
* This file contains generic code for operating on inference variables
* that are characterized by an upper- and lower-bound. The logic and
* reasoning is explained in detail in the large comment in `infer.rs`.
*
* The code in here is defined quite generically so that it can be
* applied both to type variables, which represent types being inferred,
* and fn variables, which represent function types being inferred.
* It may eventually be applied to ther types as well, who knows.
* In some cases, the functions are also generic with respect to the
* operation on the lattice (GLB vs LUB).
*
* Although all the functions are generic, we generally write the
* comments in a way that is specific to type variables and the LUB
* operation. It's just easier that way.
*
* In general all of the functions are defined parametrically
* over a `LatticeValue`, which is a value defined with respect to
* a lattice.
*/
use middle::ty::{RegionVid, TyVar, Vid};
use middle::ty;
use middle::typeck::infer::*;
use middle::typeck::infer::combine::*;
use middle::typeck::infer::glb::Glb;
use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::unify::*;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::to_str::InferStr;
use std::hashmap::HashMap;
use util::common::indenter;
pub trait LatticeValue {
fn sub(cf: &CombineFields, a: &Self, b: &Self) -> ures;
fn lub(cf: &CombineFields, a: &Self, b: &Self) -> cres<Self>;
fn glb(cf: &CombineFields, a: &Self, b: &Self) -> cres<Self>;
}
pub type LatticeOp<'a, T> =
'a |cf: &CombineFields, a: &T, b: &T| -> cres<T>;
impl LatticeValue for ty::t {
fn sub(cf: &CombineFields, a: &ty::t, b: &ty::t) -> ures {
Sub(*cf).tys(*a, *b).to_ures()
}
fn lub(cf: &CombineFields, a: &ty::t, b: &ty::t) -> cres<ty::t> {
Lub(*cf).tys(*a, *b)
}
fn glb(cf: &CombineFields, a: &ty::t, b: &ty::t) -> cres<ty::t> {
Glb(*cf).tys(*a, *b)
}
}
pub trait CombineFieldsLatticeMethods {
fn var_sub_var<T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(&self,
a_id: V,
b_id: V)
-> ures;
/// make variable a subtype of T
fn var_sub_t<T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b: T)
-> ures;
fn t_sub_var<T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a: T,
b_id: V)
-> ures;
fn merge_bnd<T:Clone + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op: LatticeOp<T>)
-> cres<Bound<T>>;
fn set_var_to_merged_bounds<T:Clone + InferStr + LatticeValue,
V:Clone+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures;
fn bnds<T:Clone + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures;
}
impl CombineFieldsLatticeMethods for CombineFields {
fn var_sub_var<T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b_id: V)
-> ures {
/*!
*
* Make one variable a subtype of another variable. This is a
* subtle and tricky process, as described in detail at the
* top of infer.rs*/
// Need to make sub_id a subtype of sup_id.
let node_a = self.infcx.get(a_id);
let node_b = self.infcx.get(b_id);
let a_id = node_a.root.clone();
let b_id = node_b.root.clone();
let a_bounds = node_a.possible_types.clone();
let b_bounds = node_b.possible_types.clone();
debug!("vars({}={} <: {}={})",
a_id.to_str(), a_bounds.inf_str(self.infcx),
b_id.to_str(), b_bounds.inf_str(self.infcx));
if a_id == b_id { return uok(); }
// If both A's UB and B's LB have already been bound to types,
// see if we can make those types subtypes.
match (&a_bounds.ub, &b_bounds.lb) {
(&Some(ref a_ub), &Some(ref b_lb)) => {
let r = self.infcx.try(
|| LatticeValue::sub(self, a_ub, b_lb));
match r {
Ok(()) => {
return Ok(());
}
Err(_) => { /*fallthrough */ }
}
}
_ => { /*fallthrough*/ }
}
// Otherwise, we need to merge A and B so as to guarantee that
// A remains a subtype of B. Actually, there are other options,
// but that's the route we choose to take.
let (new_root, new_rank) = self.infcx.unify(&node_a, &node_b);
self.set_var_to_merged_bounds(new_root,
&a_bounds, &b_bounds,
new_rank)
}
/// make variable a subtype of T
fn var_sub_t<T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a_id: V,
b: T)
-> ures {
/*!
*
* Make a variable (`a_id`) a subtype of the concrete type `b` */
let node_a = self.infcx.get(a_id);
let a_id = node_a.root.clone();
let a_bounds = &node_a.possible_types;
let b_bounds = &Bounds { lb: None, ub: Some(b.clone()) };
debug!("var_sub_t({}={} <: {})",
a_id.to_str(),
a_bounds.inf_str(self.infcx),
b.inf_str(self.infcx));
self.set_var_to_merged_bounds(
a_id, a_bounds, b_bounds, node_a.rank)
}
fn t_sub_var<T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
&self,
a: T,
b_id: V)
-> ures {
/*!
*
* Make a concrete type (`a`) a subtype of the variable `b_id` */
let a_bounds = &Bounds { lb: Some(a.clone()), ub: None };
let node_b = self.infcx.get(b_id);
let b_id = node_b.root.clone();
let b_bounds = &node_b.possible_types;
debug!("t_sub_var({} <: {}={})",
a.inf_str(self.infcx),
b_id.to_str(),
b_bounds.inf_str(self.infcx));
self.set_var_to_merged_bounds(
b_id, a_bounds, b_bounds, node_b.rank)
}
fn merge_bnd<T:Clone + InferStr + LatticeValue>(
&self,
a: &Bound<T>,
b: &Bound<T>,
lattice_op: LatticeOp<T>)
-> cres<Bound<T>> {
/*!
*
* Combines two bounds into a more general bound. */
debug!("merge_bnd({},{})",
a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _r = indenter();
match (a, b) {
(&None, &None) => Ok(None),
(&Some(_), &None) => Ok((*a).clone()),
(&None, &Some(_)) => Ok((*b).clone()),
(&Some(ref v_a), &Some(ref v_b)) => {
lattice_op(self, v_a, v_b).and_then(|v| Ok(Some(v)))
}
}
}
fn set_var_to_merged_bounds<T:Clone + InferStr + LatticeValue,
V:Clone+Eq+ToStr+Vid+UnifyVid<Bounds<T>>>(
&self,
v_id: V,
a: &Bounds<T>,
b: &Bounds<T>,
rank: uint)
-> ures {
/*!
*
* Updates the bounds for the variable `v_id` to be the intersection
* of `a` and `b`. That is, the new bounds for `v_id` will be
* a bounds c such that:
* c.ub <: a.ub
* c.ub <: b.ub
* a.lb <: c.lb
* b.lb <: c.lb
* If this cannot be achieved, the result is failure. */
// Think of the two diamonds, we want to find the
// intersection. There are basically four possibilities (you
// can swap A/B in these pictures):
//
// A A
// / \ / \
// / B \ / B \
// / / \ \ / / \ \
// * * * * * / * *
// \ \ / / \ / /
// \ B / / \ / /
// \ / * \ /
// A \ / A
// B
debug!("merge({},{},{})",
v_id.to_str(),
a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _indent = indenter();
// First, relate the lower/upper bounds of A and B.
// Note that these relations *must* hold for us to
// to be able to merge A and B at all, and relating
// them explicitly gives the type inferencer more
// information and helps to produce tighter bounds
// when necessary.
let () = if_ok!(self.bnds(&a.lb, &b.ub));
let () = if_ok!(self.bnds(&b.lb, &a.ub));
let ub = if_ok!(self.merge_bnd(&a.ub, &b.ub, LatticeValue::glb));
let lb = if_ok!(self.merge_bnd(&a.lb, &b.lb, LatticeValue::lub));
let bounds = Bounds { lb: lb, ub: ub };
debug!("merge({}): bounds={}",
v_id.to_str(),
bounds.inf_str(self.infcx));
// the new bounds must themselves
// be relatable:
let () = if_ok!(self.bnds(&bounds.lb, &bounds.ub));
self.infcx.set(v_id, Root(bounds, rank));
uok()
}
fn bnds<T:Clone + InferStr + LatticeValue>(&self,
a: &Bound<T>,
b: &Bound<T>)
-> ures {
debug!("bnds({} <: {})", a.inf_str(self.infcx),
b.inf_str(self.infcx));
let _r = indenter();
match (a, b) {
(&None, &None) |
(&Some(_), &None) |
(&None, &Some(_)) => {
uok()
}
(&Some(ref t_a), &Some(ref t_b)) => {
LatticeValue::sub(self, t_a, t_b)
}
}
}
}
// ______________________________________________________________________
// Lattice operations on variables
//
// This is common code used by both LUB and GLB to compute the LUB/GLB
// for pairs of variables or for variables and values.
pub trait LatticeDir {
fn combine_fields(&self) -> CombineFields;
fn bnd<T:Clone>(&self, b: &Bounds<T>) -> Option<T>;
fn with_bnd<T:Clone>(&self, b: &Bounds<T>, t: T) -> Bounds<T>;
}
pub trait TyLatticeDir {
fn ty_bot(&self, t: ty::t) -> cres<ty::t>;
}
impl LatticeDir for Lub {
fn combine_fields(&self) -> CombineFields { **self }
fn bnd<T:Clone>(&self, b: &Bounds<T>) -> Option<T> { b.ub.clone() }
fn with_bnd<T:Clone>(&self, b: &Bounds<T>, t: T) -> Bounds<T> {
Bounds { ub: Some(t), ..(*b).clone() }
}
}
impl TyLatticeDir for Lub {
fn ty_bot(&self, t: ty::t) -> cres<ty::t> {
Ok(t)
}
}
impl LatticeDir for Glb {
fn combine_fields(&self) -> CombineFields { **self }
fn bnd<T:Clone>(&self, b: &Bounds<T>) -> Option<T> { b.lb.clone() }
fn with_bnd<T:Clone>(&self, b: &Bounds<T>, t: T) -> Bounds<T> {
Bounds { lb: Some(t), ..(*b).clone() }
}
}
impl TyLatticeDir for Glb {
fn ty_bot(&self, _t: ty::t) -> cres<ty::t> {
Ok(ty::mk_bot())
}
}
pub fn super_lattice_tys<L:LatticeDir+TyLatticeDir+Combine>(this: &L,
a: ty::t,
b: ty::t)
-> cres<ty::t> {
debug!("{}.lattice_tys({}, {})", this.tag(),
a.inf_str(this.infcx()),
b.inf_str(this.infcx()));
if a == b {
return Ok(a);
}
let tcx = this.infcx().tcx;
match (&ty::get(a).sty, &ty::get(b).sty) {
(&ty::ty_bot, _) => { return this.ty_bot(b); }
(_, &ty::ty_bot) => { return this.ty_bot(a); }
(&ty::ty_infer(TyVar(a_id)), &ty::ty_infer(TyVar(b_id))) => {
let r = if_ok!(lattice_vars(this, a_id, b_id,
|x, y| this.tys(*x, *y)));
return match r {
VarResult(v) => Ok(ty::mk_var(tcx, v)),
ValueResult(t) => Ok(t)
};
}
(&ty::ty_infer(TyVar(a_id)), _) => {
return lattice_var_and_t(this, a_id, &b,
|x, y| this.tys(*x, *y));
}
(_, &ty::ty_infer(TyVar(b_id))) => {
return lattice_var_and_t(this, b_id, &a,
|x, y| this.tys(*x, *y));
}
_ => {
return super_tys(this, a, b);
}
}
}
pub type LatticeDirOp<'a, T> = 'a |a: &T, b: &T| -> cres<T>;
#[deriving(Clone)]
pub enum LatticeVarResult<V,T> {
VarResult(V),
ValueResult(T)
}
/**
* Computes the LUB or GLB of two bounded variables. These could be any
* sort of variables, but in the comments on this function I'll assume
* we are doing an LUB on two type variables.
*
* This computation can be done in one of two ways:
*
* - If both variables have an upper bound, we may just compute the
* LUB of those bounds and return that, in which case we are
* returning a type. This is indicated with a `ValueResult` return.
*
* - If the variables do not both have an upper bound, we will unify
* the variables and return the unified variable, in which case the
* result is a variable. This is indicated with a `VarResult`
* return. */
pub fn lattice_vars<L:LatticeDir + Combine,
T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
this: &L, // defines whether we want LUB or GLB
a_vid: V, // first variable
b_vid: V, // second variable
lattice_dir_op: LatticeDirOp<T>) // LUB or GLB operation on types
-> cres<LatticeVarResult<V,T>> {
let nde_a = this.infcx().get(a_vid);
let nde_b = this.infcx().get(b_vid);
let a_vid = nde_a.root.clone();
let b_vid = nde_b.root.clone();
let a_bounds = &nde_a.possible_types;
let b_bounds = &nde_b.possible_types;
debug!("{}.lattice_vars({}={} <: {}={})",
this.tag(),
a_vid.to_str(), a_bounds.inf_str(this.infcx()),
b_vid.to_str(), b_bounds.inf_str(this.infcx()));
// Same variable: the easy case.
if a_vid == b_vid {
return Ok(VarResult(a_vid));
}
// If both A and B have an UB type, then we can just compute the
// LUB of those types:
let (a_bnd, b_bnd) = (this.bnd(a_bounds), this.bnd(b_bounds));
match (a_bnd, b_bnd) {
(Some(ref a_ty), Some(ref b_ty)) => {
match this.infcx().try(|| lattice_dir_op(a_ty, b_ty) ) {
Ok(t) => return Ok(ValueResult(t)),
Err(_) => { /*fallthrough */ }
}
}
_ => {/*fallthrough*/}
}
// Otherwise, we need to merge A and B into one variable. We can
// then use either variable as an upper bound:
let cf = this.combine_fields();
cf.var_sub_var(a_vid.clone(), b_vid.clone()).then(|| {
Ok(VarResult(a_vid.clone()))
})
}
pub fn lattice_var_and_t<L:LatticeDir + Combine,
T:Clone + InferStr + LatticeValue,
V:Clone + Eq + ToStr + Vid + UnifyVid<Bounds<T>>>(
this: &L,
a_id: V,
b: &T,
lattice_dir_op: LatticeDirOp<T>)
-> cres<T> {
let nde_a = this.infcx().get(a_id);
let a_id = nde_a.root.clone();
let a_bounds = &nde_a.possible_types;
// The comments in this function are written for LUB, but they
// apply equally well to GLB if you inverse upper/lower/sub/super/etc.
debug!("{}.lattice_var_and_t({}={} <: {})",
this.tag(),
a_id.to_str(),
a_bounds.inf_str(this.infcx()),
b.inf_str(this.infcx()));
match this.bnd(a_bounds) {
Some(ref a_bnd) => {
// If a has an upper bound, return the LUB(a.ub, b)
debug!("bnd=Some({})", a_bnd.inf_str(this.infcx()));
lattice_dir_op(a_bnd, b)
}
None => {
// If a does not have an upper bound, make b the upper bound of a
// and then return b.
debug!("bnd=None");
let a_bounds = this.with_bnd(a_bounds, (*b).clone());
this.combine_fields().bnds(&a_bounds.lb, &a_bounds.ub).then(|| {
this.infcx().set(a_id.clone(),
Root(a_bounds.clone(), nde_a.rank));
Ok((*b).clone())
})
}
}
}
// ___________________________________________________________________________
// Random utility functions used by LUB/GLB when computing LUB/GLB of
// fn types
pub fn var_ids<T:Combine>(this: &T,
map: &HashMap<ty::BoundRegion, ty::Region>)
-> ~[RegionVid] {
map.iter().map(|(_, r)| match *r {
ty::ReInfer(ty::ReVar(r)) => { r }
r => {
this.infcx().tcx.sess.span_bug(
this.trace().origin.span(),
format!("Found non-region-vid: {:?}", r));
}
}).collect()
}
pub fn is_var_in_set(new_vars: &[RegionVid], r: ty::Region) -> bool {
match r {
ty::ReInfer(ty::ReVar(ref v)) => new_vars.iter().any(|x| x == v),
_ => false
}
}
| 35.10536 | 78 | 0.492471 |
11110070349d601c149d10fa1901be103cf18967 | 1,136 | pub mod dag;
pub use dag::AnyHandler;
pub use dag::AsyncHandler;
pub use dag::EmptyPlaceHolder;
pub use dag::Flow;
pub use dag::HandlerInfo;
pub use dag::HandlerType;
pub use dag::OpResult;
pub use dag::OpResults;
#[macro_export]
macro_rules! resgiter_node{
( $($x:ident),* ) => {
&|| {
let mut data: Vec<(
&'static str,
fn(
Arc<_>,
Arc<_>,
Arc<_>,
) -> Pin<Box<dyn futures::Future<Output = OpResult> + std::marker::Send>>,
Arc<Fn(Box<serde_json::value::RawValue>) -> Arc<(dyn Any + std::marker::Send + Sync)> + Send + Sync>,
bool,
)> = Vec::new();
$(
#[allow(unused_assignments)]
{
data.push((
$x::generate_config().name,
$x::async_calc2,
Arc::new($x::config_generate),
$x::generate_config().has_config,
));
}
)*
data
}
};
}
| 27.707317 | 117 | 0.421655 |
bbdadede0ff52130902ef4f5be6c63cca5901ac7 | 4,465 | use serde::{Deserialize, Serialize};
use std::{
pin::Pin,
task::{Context, Poll},
};
use tezedge::OperationHash;
use super::types::ContractId;
use futures::stream::Stream;
use std::{
cmp::Reverse,
future::Future,
hash::Hash,
ops::{Add, Sub},
};
pub struct Notifications {}
pub struct ContractEventStream {}
#[allow(unused)]
pub struct ContractEvent {
contract: ContractId,
operation: OperationHash,
event: ZkChannelEvent,
}
pub enum ZkChannelEvent {
// TODO: zkchannels domain specific event types
}
pub enum Error {
Reorg,
Io(std::io::Error),
// TODO: maybe other kinds of errors, add them here
}
#[allow(unused)]
impl Stream for ContractEventStream {
type Item = Result<ContractEvent, std::io::Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
todo!()
}
}
pub struct ContractEventStreamHandle {}
#[allow(unused)]
impl ContractEventStreamHandle {
/// Add the given [`ContractId`] to the set of streamed contracts.
/// The [`Level`] parameter indicates the level at which the [`ContractId`] was originated
/// on chain.
///
/// This will stream all events for the [`ContractId`] that have occurred on chain between
/// [`Level`] and the current chain height.
pub async fn add_contract(&self, contract_hash: &ContractId, originated: Level) {
todo!()
}
/// Remove the given [`ContractId`] from the set of streamed contracts.
pub async fn remove_contract(&self, contract_hash: &ContractId) {
todo!()
}
/// Replace the set of streamed contracts with the given `contract_hashes`.
/// The [`Level`] parameters indicate the level at which the [`ContractId`]s are originated
/// on chain.
///
/// This will not cause duplicated events for [`ContractId`]s that were already
/// in the set of streamed contracts.
/// This will stream all events for each _new_ [`ContractId`] that have occurred on chain
/// between [`Level`] and the current chain height.
pub async fn set_contracts(
&self,
contract_hashes: impl IntoIterator<Item = &(ContractId, Level)>,
) {
todo!()
}
}
#[allow(unused)]
impl Notifications {
/// Wait for confirmation that the specified operation is confirmed at the given [`Depth`].
///
/// This can be used for confirmation that an operation will not be lost in a reorg
/// or for checking that a specified timeout has elapsed.
pub async fn confirm_operation(
&self,
operation_hash: &OperationHash,
confirmations: Depth,
) -> Result<(), Error> {
todo!()
}
/// Get a stream of events and a linked handle that allows the contents of the stream to be
/// updated by another task.
pub async fn contract_events(&self) -> (ContractEventStreamHandle, ContractEventStream) {
todo!()
}
}
#[derive(
Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, sqlx::Type,
)]
#[sqlx(transparent)]
pub struct Level(u32);
impl From<u32> for Level {
fn from(n: u32) -> Self {
Self(n)
}
}
impl From<Level> for u32 {
fn from(h: Level) -> Self {
h.0
}
}
impl Add<u32> for Level {
type Output = Level;
fn add(self, rhs: u32) -> Self::Output {
Level(self.0 + rhs)
}
}
impl Sub<u32> for Level {
type Output = Level;
fn sub(self, rhs: u32) -> Self::Output {
Level(self.0 - rhs)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Depth(Reverse<usize>);
impl From<usize> for Depth {
fn from(n: usize) -> Self {
Self(Reverse(n))
}
}
impl From<Depth> for usize {
fn from(d: Depth) -> Self {
d.0 .0
}
}
pub enum Confirmation {
Confirmed,
Dropped,
}
#[allow(unused)]
pub struct Cache<B: Block, F: Fetch> {
blocks: Vec<B>,
fetcher: F,
}
impl<B: Block, F: Fetch> Cache<B, F> where B::Id: Hash + Eq + Clone {}
pub trait Block {
type Id;
fn id(&self) -> &Self::Id;
fn predecessor(&self) -> &Self::Id;
fn height(&self) -> Level;
}
pub trait Fetch {
type Block: Block;
type Error;
type Future: Future<Output = Result<Self::Block, Self::Error>>;
fn fetch_id(&mut self, id: <Self::Block as Block>::Id) -> Self::Future;
fn fetch_height(&mut self, height: usize) -> Self::Future;
fn fetch_head(&mut self) -> Self::Future;
}
| 23.877005 | 95 | 0.627772 |
91082f4fa498343a13a91b4c80516c072041509f | 10,201 | // Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.
use std::collections::HashMap;
use std::fmt::{Debug, Formatter};
use parking_lot::RwLock;
use crate::frames::{Frames, UnresolvedFrames};
use crate::profiler::Profiler;
use crate::{Error, Result};
/// The final presentation of a report which is actually an `HashMap` from `Frames` to isize (count).
pub struct Report {
/// key is a backtrace captured by profiler and value is count of it.
pub data: HashMap<Frames, isize>,
}
/// The presentation of an unsymbolicated report which is actually an `HashMap` from `UnresolvedFrames` to isize (count).
pub struct UnresolvedReport {
/// key is a backtrace captured by profiler and value is count of it.
pub data: HashMap<UnresolvedFrames, isize>,
}
/// A builder of `Report` and `UnresolvedReport`. It builds report from a running `Profiler`.
pub struct ReportBuilder<'a> {
frames_post_processor: Option<Box<dyn Fn(&mut Frames)>>,
profiler: &'a RwLock<Result<Profiler>>,
}
impl<'a> ReportBuilder<'a> {
pub(crate) fn new(profiler: &'a RwLock<Result<Profiler>>) -> Self {
Self {
frames_post_processor: None,
profiler,
}
}
/// Set `frames_post_processor` of a `ReportBuilder`. Before finally building a report, `frames_post_processor`
/// will be applied to every Frames.
pub fn frames_post_processor<T>(&mut self, frames_post_processor: T) -> &mut Self
where
T: Fn(&mut Frames) + 'static,
{
self.frames_post_processor
.replace(Box::new(frames_post_processor));
self
}
/// Build an `UnresolvedReport`
pub fn build_unresolved(&self) -> Result<UnresolvedReport> {
let mut hash_map = HashMap::new();
match self.profiler.read().as_ref() {
Err(err) => {
log::error!("Error in creating profiler: {}", err);
Err(Error::CreatingError)
}
Ok(profiler) => {
profiler.data.try_iter()?.for_each(|entry| {
let count = entry.count;
if count > 0 {
let key = &entry.item;
match hash_map.get_mut(key) {
Some(value) => {
*value += count;
}
None => {
match hash_map.insert(key.clone(), count) {
None => {}
Some(_) => {
unreachable!();
}
};
}
}
}
});
Ok(UnresolvedReport { data: hash_map })
}
}
}
/// Build a `Report`.
pub fn build(&self) -> Result<Report> {
let mut hash_map = HashMap::new();
match self.profiler.write().as_mut() {
Err(err) => {
log::error!("Error in creating profiler: {}", err);
Err(Error::CreatingError)
}
Ok(profiler) => {
profiler.data.try_iter()?.for_each(|entry| {
let count = entry.count;
if count > 0 {
let mut key = Frames::from(entry.item.clone());
if let Some(processor) = &self.frames_post_processor {
processor(&mut key);
}
match hash_map.get_mut(&key) {
Some(value) => {
*value += count;
}
None => {
match hash_map.insert(key, count) {
None => {}
Some(_) => {
unreachable!();
}
};
}
}
}
});
Ok(Report { data: hash_map })
}
}
}
}
/// This will generate Report in a human-readable format:
///
/// ```shell
/// FRAME: pprof::profiler::perf_signal_handler::h7b995c4ab2e66493 -> FRAME: Unknown -> FRAME: {func1} ->
/// FRAME: {func2} -> FRAME: {func3} -> THREAD: {thread_name} {count}
/// ```
impl Debug for Report {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
for (key, val) in self.data.iter() {
write!(f, "{:?} {}", key, val)?;
writeln!(f)?;
}
Ok(())
}
}
#[cfg(feature = "flamegraph")]
mod flamegraph {
use super::*;
use std::io::Write;
impl Report {
/// `flamegraph` will write an svg flamegraph into `writer` **only available with `flamegraph` feature**
pub fn flamegraph<W>(&self, writer: W) -> Result<()>
where
W: Write,
{
use inferno::flamegraph;
let lines: Vec<String> = self
.data
.iter()
.map(|(key, value)| {
let mut line = String::new();
if !key.thread_name.is_empty() {
line.push_str(&key.thread_name);
} else {
line.push_str(&format!("{:?}", key.thread_id));
}
line.push(';');
for frame in key.frames.iter().rev() {
for symbol in frame.iter().rev() {
line.push_str(&format!("{}/", symbol));
}
line.pop().unwrap_or_default();
line.push(';');
}
line.pop().unwrap_or_default();
line.push_str(&format!(" {}", value));
line
})
.collect();
if !lines.is_empty() {
flamegraph::from_lines(
&mut flamegraph::Options::default(),
lines.iter().map(|s| &**s),
writer,
)
.unwrap(); // TODO: handle this error
}
Ok(())
}
}
}
#[cfg(feature = "protobuf")]
mod protobuf {
use super::*;
use crate::protos;
use std::collections::HashSet;
impl Report {
// `pprof` will generate google's pprof format report
pub fn pprof(&self) -> crate::Result<protos::Profile> {
let mut dudup_str = HashSet::new();
for key in self.data.iter().map(|(key, _)| key) {
for frame in key.frames.iter() {
for symbol in frame {
dudup_str.insert(symbol.name());
dudup_str.insert(symbol.sys_name().into_owned());
dudup_str.insert(symbol.filename().into_owned());
}
}
}
// string table's first element must be an empty string
let mut str_tbl = vec!["".to_owned()];
str_tbl.extend(dudup_str.into_iter());
let mut strings = HashMap::new();
for (index, name) in str_tbl.iter().enumerate() {
strings.insert(name.as_str(), index);
}
let mut samples = vec![];
let mut loc_tbl = vec![];
let mut fn_tbl = vec![];
let mut functions = HashMap::new();
for (key, count) in self.data.iter() {
let mut locs = vec![];
for frame in key.frames.iter() {
for symbol in frame {
let name = symbol.name();
if let Some(loc_idx) = functions.get(&name) {
locs.push(*loc_idx);
continue;
}
let sys_name = symbol.sys_name();
let filename = symbol.filename();
let lineno = symbol.lineno();
let mut function = protos::Function::default();
let id = fn_tbl.len() as u64 + 1;
function.id = id;
function.name = *strings.get(name.as_str()).unwrap() as i64;
function.system_name = *strings.get(sys_name.as_ref()).unwrap() as i64;
function.filename = *strings.get(filename.as_ref()).unwrap() as i64;
functions.insert(name, id);
let mut line = protos::Line::default();
line.function_id = id;
line.line = lineno as i64;
let mut loc = protos::Location::default();
loc.id = id;
loc.line = vec![line];
// the fn_tbl has the same length with loc_tbl
fn_tbl.push(function);
loc_tbl.push(loc);
// current frame locations
locs.push(id);
}
}
let mut sample = protos::Sample::default();
sample.location_id = locs;
sample.value = vec![*count as i64];
samples.push(sample);
}
let (type_idx, unit_idx) = (str_tbl.len(), str_tbl.len() + 1);
str_tbl.push("cpu".to_owned());
str_tbl.push("count".to_owned());
let mut sample_type = protos::ValueType::default();
sample_type.r#type = type_idx as i64;
sample_type.unit = unit_idx as i64;
let mut profile = protos::Profile::default();
profile.sample_type = vec![sample_type];
profile.sample = samples;
profile.string_table = str_tbl;
profile.function = fn_tbl;
profile.location = loc_tbl;
Ok(profile)
}
}
}
| 36.302491 | 121 | 0.44476 |
f42f7d3e1936f886d92eaa7cce998b3cd1b8fd2d | 9,233 | // Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use crate::features::Feature;
use std::path::{Path, PathBuf};
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
pub struct Opt {
#[structopt(short = "s", long = "stamp", parse(from_os_str))]
/// Stamp this file on success
pub stamp: Option<PathBuf>,
#[structopt(subcommand)]
pub cmd: Commands,
}
#[derive(StructOpt, Debug)]
pub enum Commands {
#[structopt(name = "validate")]
/// validate that one or more cmx files are valid
Validate {
#[structopt(name = "FILE", parse(from_os_str))]
/// files to process
files: Vec<PathBuf>,
#[structopt(long = "extra_schema", parse(from_str = "parse_extra_schema_arg"))]
/// extra JSON schema files to additionally validate against. A custom error message - to
/// be displayed if the schema fails to validate - can be specified by adding a ':'
/// separator and the message after the path.
extra_schemas: Vec<(PathBuf, Option<String>)>,
},
#[structopt(name = "validate-references")]
/// validate component manifest {.cmx|.cml} against package manifest.
ValidateReferences {
#[structopt(
name = "Component Manifest",
short = "c",
long = "component-manifest",
parse(from_os_str)
)]
component_manifest: PathBuf,
#[structopt(
name = "Package Manifest",
short = "p",
long = "package-manifest",
parse(from_os_str)
)]
package_manifest: PathBuf,
#[structopt(name = "GN Label", short = "l", long = "gn-label")]
gn_label: Option<String>,
},
#[structopt(name = "merge")]
/// merge the listed cmx files
Merge {
#[structopt(name = "FILE", parse(from_os_str))]
/// files to process
///
/// If any file contains an array at its root, every object in the array
/// will be merged into the final object.
files: Vec<PathBuf>,
#[structopt(short = "o", long = "output", parse(from_os_str))]
/// file to write the merged results to, will print to stdout if not provided
output: Option<PathBuf>,
#[structopt(short = "f", long = "fromfile", parse(from_os_str))]
/// response file for files to process
///
/// If specified, additional files to merge will be read from the path provided.
/// The input format is delimited by newlines.
fromfile: Option<PathBuf>,
#[structopt(short = "d", long = "depfile", parse(from_os_str))]
/// depfile for includes
///
/// If specified, include paths will be listed here, delimited by newlines.
depfile: Option<PathBuf>,
},
#[structopt(name = "include")]
/// add contents from includes if any
Include {
#[structopt(name = "FILE", parse(from_os_str))]
/// file to process
file: PathBuf,
#[structopt(short = "o", long = "output", parse(from_os_str))]
/// file to write the merged results to, will print to stdout if not provided
output: Option<PathBuf>,
#[structopt(short = "d", long = "depfile", parse(from_os_str))]
/// depfile for includes
///
/// If specified, include paths will be listed here, delimited by newlines.
depfile: Option<PathBuf>,
#[structopt(short = "p", long = "includepath", parse(from_os_str))]
/// base path for resolving includes
includepath: PathBuf,
#[structopt(short = "r", long = "includeroot", parse(from_os_str))]
/// base path for resolving include paths that start with "//"
/// Temporarily optional for a soft transition.
/// If not specified, will take the value of `includepath`.
includeroot: Option<PathBuf>,
},
#[structopt(name = "check-includes")]
/// check if given includes are present in a given component manifest
CheckIncludes {
#[structopt(name = "FILE", parse(from_os_str))]
/// file to process
file: PathBuf,
#[structopt(name = "expect")]
expected_includes: Vec<String>,
#[structopt(short = "f", long = "fromfile", parse(from_os_str))]
/// response file for includes to expect
///
/// If specified, additional includes to expect will be read from the path provided.
/// The input format is delimited by newlines.
fromfile: Option<PathBuf>,
#[structopt(short = "d", long = "depfile", parse(from_os_str))]
/// depfile for includes
///
/// If specified, include paths will be listed here, delimited by newlines.
depfile: Option<PathBuf>,
#[structopt(short = "p", long = "includepath", parse(from_os_str))]
/// base path for resolving includes
includepath: PathBuf,
#[structopt(short = "r", long = "includeroot", parse(from_os_str))]
/// base path for resolving include paths that start with "//"
/// Temporarily optional for a soft transition.
/// If not specified, will take the value of `includepath`.
includeroot: Option<PathBuf>,
},
#[structopt(name = "format")]
/// format a json file
Format {
#[structopt(name = "FILE", parse(from_os_str))]
/// file to format
file: PathBuf,
#[structopt(short = "p", long = "pretty")]
/// whether to pretty-print the results (otherwise minify JSON documents; ignored for JSON5)
pretty: bool,
#[structopt(long = "cml")]
/// interpret input file as JSON5 CML, and output in the preferred style, preserving all
/// comments (this is the default for `.cml` files; implies `--pretty`)
cml: bool,
#[structopt(short = "i", long = "in-place")]
/// replace the input file with the formatted output (implies `--output <inputfile>`)
inplace: bool,
#[structopt(short = "o", long = "output", parse(from_os_str))]
/// file to write the formatted results to, will print to stdout if not provided
output: Option<PathBuf>,
},
#[structopt(name = "compile")]
/// compile a CML file
Compile {
#[structopt(name = "FILE", parse(from_os_str))]
/// file to format
file: PathBuf,
#[structopt(short = "o", long = "output", parse(from_os_str))]
/// file to write the formatted results to, will print to stdout if not provided
output: Option<PathBuf>,
#[structopt(short = "d", long = "depfile", parse(from_os_str))]
/// depfile for includes
///
/// If specified, include paths will be listed here, delimited by newlines.
depfile: Option<PathBuf>,
#[structopt(short = "p", long = "includepath", parse(from_os_str))]
/// base path for resolving includes
includepath: PathBuf,
#[structopt(short = "r", long = "includeroot", parse(from_os_str))]
/// base path for resolving include paths that start with "//"
/// Temporarily optional for a soft transition.
/// If not specified, will take the value of `includepath`.
includeroot: Option<PathBuf>,
#[structopt(short = "f", long = "features")]
/// The set of non-standard features to compile with.
/// Only applies to CML files.
features: Vec<Feature>,
#[structopt(long = "experimental-force-runner")]
/// override runner to this value in resulting CML
///
/// If specified, the program.runner field will be set to this value. This option is
/// EXPERIMENTAL and subject to removal without warning.
experimental_force_runner: Option<String>,
},
}
fn parse_extra_schema_arg(src: &str) -> (PathBuf, Option<String>) {
let v: Vec<&str> = src.splitn(2, ':').collect();
(Path::new(v[0]).to_path_buf(), v.get(1).map(|s| s.to_string()))
}
#[cfg(test)]
mod tests {
use super::*;
macro_rules! test_parse_extra_schema_arg {
(
$(
$test_name:ident => {
input = $input:expr,
result = $result:expr,
},
)+
) => {
$(
#[test]
fn $test_name() {
assert_eq!(parse_extra_schema_arg($input), $result)
}
)+
}
}
test_parse_extra_schema_arg! {
test_parse_extra_schema_arg_schema_only => {
input = "/some/path",
result = (Path::new("/some/path").to_path_buf(), None),
},
test_parse_extra_schema_arg_schema_and_msg => {
input = "/some/path:my error message",
result = (Path::new("/some/path").to_path_buf(), Some("my error message".to_string())),
},
test_parse_extra_schema_arg_msg_with_sep => {
input = "/some/path:my:error:message",
result = (Path::new("/some/path").to_path_buf(), Some("my:error:message".to_string())),
},
}
}
| 36.066406 | 100 | 0.585184 |
61afd3cb6077e1d9a0782f39ccc4b0a0e929c059 | 1,861 | #![allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
/* automatically generated by rust-bindgen 0.59.2 */
extern "C" {
pub fn DobbyBuildVersion() -> *const ::std::os::raw::c_char;
}
extern "C" {
pub fn DobbyHook(
address: *mut ::std::os::raw::c_void,
replace_call: *mut ::std::os::raw::c_void,
origin_call: *mut *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn DobbyDestroy(address: *mut ::std::os::raw::c_void) -> ::std::os::raw::c_int;
}
extern "C" {
pub fn DobbySymbolResolver(
image_name: *const ::std::os::raw::c_char,
symbol_name: *const ::std::os::raw::c_char,
) -> *mut ::std::os::raw::c_void;
}
extern "C" {
pub fn DobbyGlobalOffsetTableReplace(
image_name: *mut ::std::os::raw::c_char,
symbol_name: *mut ::std::os::raw::c_char,
fake_func: *mut ::std::os::raw::c_void,
orig_func: *mut *mut ::std::os::raw::c_void,
) -> ::std::os::raw::c_int;
}
pub const MemoryOperationError_kMemoryOperationSuccess: MemoryOperationError = 0;
pub const MemoryOperationError_kMemoryOperationError: MemoryOperationError = 1;
pub const MemoryOperationError_kNotSupportAllocateExecutableMemory: MemoryOperationError = 2;
pub const MemoryOperationError_kNotEnough: MemoryOperationError = 3;
pub const MemoryOperationError_kNone: MemoryOperationError = 4;
pub type MemoryOperationError = ::std::os::raw::c_uint;
extern "C" {
pub fn CodePatch(
address: *mut ::std::os::raw::c_void,
buffer: *mut ::std::os::raw::c_uchar,
buffer_size: ::std::os::raw::c_uint,
) -> MemoryOperationError;
}
extern "C" {
pub fn log_set_level(level: ::std::os::raw::c_int);
}
extern "C" {
pub fn log_switch_to_syslog();
}
extern "C" {
pub fn log_switch_to_file(path: *const ::std::os::raw::c_char);
}
| 35.113208 | 93 | 0.658248 |
2919f7042ee0c47c89fa0ec62f45eeb2dca04506 | 67,831 | // Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
commands::{is_address, is_authentication_key},
libra_client::LibraClient,
AccountData, AccountStatus,
};
use anyhow::{bail, ensure, format_err, Error, Result};
use compiled_stdlib::{transaction_scripts::StdlibScript, StdLibOptions};
use libra_crypto::{
ed25519::{Ed25519PrivateKey, Ed25519PublicKey, Ed25519Signature},
test_utils::KeyPair,
traits::ValidCryptoMaterial,
x25519, ValidCryptoMaterialStringExt,
};
use libra_json_rpc_client::views::{AccountView, BlockMetadata, EventView, TransactionView};
use libra_logger::prelude::*;
use libra_network_address::{
encrypted::{
RawEncNetworkAddress, TEST_SHARED_VAL_NETADDR_KEY, TEST_SHARED_VAL_NETADDR_KEY_VERSION,
},
NetworkAddress, RawNetworkAddress,
};
use libra_temppath::TempPath;
use libra_types::{
access_path::AccessPath,
account_address::AccountAddress,
account_config::{
from_currency_code_string, libra_root_address, testnet_dd_account_address,
type_tag_for_currency_code, ACCOUNT_RECEIVED_EVENT_PATH, ACCOUNT_SENT_EVENT_PATH, LBR_NAME,
},
account_state::AccountState,
ledger_info::LedgerInfoWithSignatures,
on_chain_config::VMPublishingOption,
transaction::{
authenticator::AuthenticationKey,
helpers::{create_unsigned_txn, create_user_txn, TransactionSigner},
parse_transaction_argument, Module, RawTransaction, Script, SignedTransaction,
TransactionArgument, TransactionPayload, Version,
},
vm_status::StatusCode,
waypoint::Waypoint,
};
use libra_wallet::{io_utils, WalletLibrary};
use num_traits::{
cast::{FromPrimitive, ToPrimitive},
identities::Zero,
};
use reqwest::Url;
use resource_viewer::{AnnotatedAccountStateBlob, MoveValueAnnotator, NullStateView};
use rust_decimal::Decimal;
use std::{
collections::HashMap,
convert::TryFrom,
fmt, fs,
io::{stdout, Write},
path::{Path, PathBuf},
process::Command,
str::{self, FromStr},
thread, time,
};
use transaction_builder::encode_set_validator_config_script;
const CLIENT_WALLET_MNEMONIC_FILE: &str = "client.mnemonic";
const GAS_UNIT_PRICE: u64 = 0;
const MAX_GAS_AMOUNT: u64 = 1_000_000;
const TX_EXPIRATION: i64 = 100;
/// Enum used for error formatting.
#[derive(Debug)]
enum InputType {
Bool,
UnsignedInt,
Usize,
}
/// Account data is stored in a map and referenced by an index.
#[derive(Debug)]
pub struct AddressAndIndex {
/// Address of the account.
pub address: AccountAddress,
/// The account_ref_id of this account in client.
pub index: usize,
}
/// Account is represented either as an entry into accounts vector or as an address.
pub enum AccountEntry {
/// Index into client.accounts
Index(usize),
/// Address of the account
Address(AccountAddress),
}
/// Used to return the sequence and sender account index submitted for a transfer
pub struct IndexAndSequence {
/// Index/key of the account in TestClient::accounts vector.
pub account_index: AccountEntry,
/// Sequence number of the account.
pub sequence_number: u64,
}
/// Proxy handling CLI commands/inputs.
pub struct ClientProxy {
/// client for admission control interface.
pub client: LibraClient,
/// Created accounts.
pub accounts: Vec<AccountData>,
/// Address to account_ref_id map.
address_to_ref_id: HashMap<AccountAddress, usize>,
/// Host that operates a faucet service
faucet_server: String,
/// Account used for Libra Root operations (e.g., adding a new transaction script)
pub libra_root_account: Option<AccountData>,
/// Account used for "minting" operations
pub testnet_designated_dealer_account: Option<AccountData>,
/// Wallet library managing user accounts.
wallet: WalletLibrary,
/// Whether to sync with validator on wallet recovery.
sync_on_wallet_recovery: bool,
/// temp files (alive for duration of program)
temp_files: Vec<PathBuf>,
// invariant self.address_to_ref_id.values().iter().all(|i| i < self.accounts.len())
}
impl ClientProxy {
/// Construct a new TestClient.
pub fn new(
url: &str,
libra_root_account_file: &str,
testnet_designated_dealer_account_file: &str,
sync_on_wallet_recovery: bool,
faucet_server: Option<String>,
mnemonic_file: Option<String>,
waypoint: Waypoint,
) -> Result<Self> {
// fail fast if url is not valid
let url = Url::parse(url)?;
let mut client = LibraClient::new(url.clone(), waypoint)?;
let accounts = vec![];
let libra_root_account = if libra_root_account_file.is_empty() {
None
} else {
let libra_root_account_key = generate_key::load_key(libra_root_account_file);
let libra_root_account_data = Self::get_account_data_from_address(
&mut client,
libra_root_address(),
true,
Some(KeyPair::from(libra_root_account_key)),
None,
)?;
Some(libra_root_account_data)
};
let dd_account = if testnet_designated_dealer_account_file.is_empty() {
None
} else {
let dd_account_key = generate_key::load_key(testnet_designated_dealer_account_file);
let dd_account_data = Self::get_account_data_from_address(
&mut client,
testnet_dd_account_address(),
true,
Some(KeyPair::from(dd_account_key)),
None,
)?;
Some(dd_account_data)
};
let faucet_server = match faucet_server {
Some(server) => server,
None => url
.host_str()
.ok_or_else(|| format_err!("Missing host in URL"))?
.replace("client", "faucet"),
};
let address_to_ref_id = accounts
.iter()
.enumerate()
.map(|(ref_id, acc_data): (usize, &AccountData)| (acc_data.address, ref_id))
.collect::<HashMap<AccountAddress, usize>>();
Ok(ClientProxy {
client,
accounts,
address_to_ref_id,
faucet_server,
libra_root_account,
testnet_designated_dealer_account: dd_account,
wallet: Self::get_libra_wallet(mnemonic_file)?,
sync_on_wallet_recovery,
temp_files: vec![],
})
}
fn get_account_ref_id(&self, sender_account_address: &AccountAddress) -> Result<usize> {
Ok(*self
.address_to_ref_id
.get(&sender_account_address)
.ok_or_else(|| {
format_err!(
"Unable to find existing managing account by address: {}, to see all existing \
accounts, run: 'account list'",
sender_account_address
)
})?)
}
/// Returns the account index that should be used by user to reference this account
pub fn create_next_account(&mut self, sync_with_validator: bool) -> Result<AddressAndIndex> {
let (auth_key, _) = self.wallet.new_address()?;
let account_data = Self::get_account_data_from_address(
&mut self.client,
auth_key.derived_address(),
sync_with_validator,
None,
Some(auth_key.to_vec()),
)?;
Ok(self.insert_account_data(account_data))
}
/// Returns the ledger info corresonding to the latest epoch change
/// (could further be used for e.g., generating a waypoint)
pub fn latest_epoch_change_li(&self) -> Option<&LedgerInfoWithSignatures> {
self.client.latest_epoch_change_li()
}
/// Print index and address of all accounts.
pub fn print_all_accounts(&self) {
if self.accounts.is_empty() {
println!("No user accounts");
} else {
for (ref index, ref account) in self.accounts.iter().enumerate() {
println!(
"User account index: {}, address: {}, sequence number: {}, status: {:?}",
index,
hex::encode(&account.address),
account.sequence_number,
account.status,
);
}
}
if let Some(libra_root_account) = &self.libra_root_account {
println!(
"AssocRoot account address: {}, sequence_number: {}, status: {:?}",
hex::encode(&libra_root_account.address),
libra_root_account.sequence_number,
libra_root_account.status,
);
}
if let Some(testnet_dd_account) = &self.testnet_designated_dealer_account {
println!(
"Testnet DD account address: {}, sequence_number: {}, status: {:?}",
hex::encode(&testnet_dd_account.address),
testnet_dd_account.sequence_number,
testnet_dd_account.status,
);
}
}
/// Clone all accounts held in the client.
#[cfg(any(test, feature = "fuzzing"))]
pub fn copy_all_accounts(&self) -> Vec<AccountData> {
self.accounts.clone()
}
/// Set the account of this client instance.
pub fn set_accounts(&mut self, accounts: Vec<AccountData>) -> Vec<AddressAndIndex> {
self.accounts.clear();
self.address_to_ref_id.clear();
let mut ret = vec![];
for data in accounts {
ret.push(self.insert_account_data(data));
}
ret
}
/// Get balance from validator for the account specified.
pub fn get_balances(&mut self, space_delim_strings: &[&str]) -> Result<Vec<String>> {
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments for getting balances"
);
let (address, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
let currency_info: HashMap<_, _> = self
.client
.get_currency_info()?
.into_iter()
.map(|view| (view.code.clone(), view))
.collect();
self.get_account_resource_and_update(address)
.and_then(|res| {
res.balances
.iter()
.map(|amt_view| {
let info = currency_info.get(&amt_view.currency).ok_or_else(|| {
format_err!(
"Unable to get currencyy info for balance {}",
amt_view.currency
)
})?;
let whole_num = amt_view.amount / info.scaling_factor;
let remainder = amt_view.amount % info.scaling_factor;
Ok(format!(
"{}.{:0>6}{}",
whole_num.to_string(),
remainder.to_string(),
amt_view.currency
))
})
.collect()
})
}
/// Get the latest sequence number from validator for the account specified.
pub fn get_sequence_number(&mut self, space_delim_strings: &[&str]) -> Result<u64> {
ensure!(
space_delim_strings.len() == 2 || space_delim_strings.len() == 3,
"Invalid number of arguments for getting sequence number"
);
let (address, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
let sequence_number = self
.get_account_resource_and_update(address)?
.sequence_number;
let reset_sequence_number = if space_delim_strings.len() == 3 {
parse_bool(space_delim_strings[2]).map_err(|error| {
format_parse_data_error(
"reset_sequence_number",
InputType::Bool,
space_delim_strings[2],
error,
)
})?
} else {
false
};
if reset_sequence_number {
if let Some(libra_root_account) = &mut self.libra_root_account {
if libra_root_account.address == address {
libra_root_account.sequence_number = sequence_number;
return Ok(sequence_number);
}
}
if let Some(testnet_dd_account) = &mut self.testnet_designated_dealer_account {
if testnet_dd_account.address == address {
testnet_dd_account.sequence_number = sequence_number;
return Ok(sequence_number);
}
}
let mut account = self.mut_account_from_parameter(space_delim_strings[1])?;
// Set sequence_number to latest one.
account.sequence_number = sequence_number;
}
Ok(sequence_number)
}
/// Adds a currency to the sending account. Fails if that currency already exists.
pub fn add_currency(&mut self, space_delim_strings: &[&str], is_blocking: bool) -> Result<()> {
ensure!(
space_delim_strings.len() >= 3 && space_delim_strings.len() <= 6,
"Invalid number of arguments for adding currency"
);
let (sender_address, _) =
self.get_account_address_from_parameter(space_delim_strings[1])?;
let sender_ref_id = self.get_account_ref_id(&sender_address)?;
let sender = self.accounts.get(sender_ref_id).unwrap();
let sequence_number = sender.sequence_number;
let currency_to_add = space_delim_strings[2];
let currency_code = from_currency_code_string(currency_to_add).map_err(|_| {
format_err!(
"Invalid currency code {} provided to add currency",
currency_to_add
)
})?;
let gas_unit_price = if space_delim_strings.len() > 3 {
Some(space_delim_strings[3].parse::<u64>().map_err(|error| {
format_parse_data_error(
"gas_unit_price",
InputType::UnsignedInt,
space_delim_strings[3],
error,
)
})?)
} else {
None
};
let max_gas_amount = if space_delim_strings.len() > 4 {
Some(space_delim_strings[4].parse::<u64>().map_err(|error| {
format_parse_data_error(
"max_gas_amount",
InputType::UnsignedInt,
space_delim_strings[4],
error,
)
})?)
} else {
None
};
let gas_currency_code = if space_delim_strings.len() > 5 {
Some(space_delim_strings[5].to_owned())
} else {
None
};
let program = transaction_builder::encode_add_currency_to_account_script(
type_tag_for_currency_code(currency_code),
);
let txn = self.create_txn_to_submit(
TransactionPayload::Script(program),
&sender,
max_gas_amount, /* max_gas_amount */
gas_unit_price, /* gas_unit_price */
gas_currency_code, /* gas_currency_code */
)?;
self.client
.submit_transaction(self.accounts.get_mut(sender_ref_id), txn)?;
if is_blocking {
self.wait_for_transaction(sender_address, sequence_number + 1)?;
}
Ok(())
}
/// Mints coins for the receiver specified.
pub fn mint_coins(&mut self, space_delim_strings: &[&str], is_blocking: bool) -> Result<()> {
ensure!(
space_delim_strings.len() >= 4 && space_delim_strings.len() <= 5,
"Invalid number of arguments for mint"
);
let (receiver, receiver_auth_key_opt) =
self.get_account_address_from_parameter(space_delim_strings[1])?;
let receiver_auth_key = receiver_auth_key_opt.ok_or_else(|| {
format_err!("Need authentication key to create new account via minting")
})?;
let mint_currency = space_delim_strings[3];
let use_base_units = space_delim_strings
.get(4)
.map(|s| s == &"use_base_units")
.unwrap_or(false);
let num_coins = if !use_base_units {
self.convert_to_on_chain_represenation(space_delim_strings[2], mint_currency)?
} else {
Self::convert_to_scaled_representation(space_delim_strings[2], 1, 1)?
};
let currency_code = from_currency_code_string(mint_currency)
.map_err(|_| format_err!("Invalid currency code {} provided to mint", mint_currency))?;
ensure!(num_coins > 0, "Invalid number of coins to mint.");
if self.libra_root_account.is_some() {
let script = transaction_builder::encode_create_testing_account_script(
type_tag_for_currency_code(currency_code.clone()),
receiver,
receiver_auth_key.prefix().to_vec(),
false, /* add all currencies */
);
// If the receiver is local, create it now.
if let Some(pos) = self
.accounts
.iter()
.position(|account_data| account_data.address == receiver)
{
let status = &self.accounts.get(pos).unwrap().status;
if &AccountStatus::Local == status {
// This needs to be blocking since the mint can't happen until it completes
self.association_transaction_with_local_libra_root_account(
TransactionPayload::Script(script),
true,
)?;
self.accounts.get_mut(pos).unwrap().status = AccountStatus::Persisted;
}
} else {
// We can't determine the account state. So try and create the account, but
// if it already exists don't error.
let _ = self.association_transaction_with_local_libra_root_account(
TransactionPayload::Script(script),
true,
);
} // else, the account has already been created -- do nothing
}
match self.testnet_designated_dealer_account {
Some(_) => {
let script = transaction_builder::encode_testnet_mint_script(
type_tag_for_currency_code(currency_code),
receiver,
num_coins,
);
self.association_transaction_with_local_testnet_dd_account(
TransactionPayload::Script(script),
is_blocking,
)
}
None => self.mint_coins_with_faucet_service(
receiver_auth_key,
num_coins,
mint_currency.to_owned(),
is_blocking,
),
}
}
/// Allow executing arbitrary script in the network.
pub fn enable_custom_script(
&mut self,
space_delim_strings: &[&str],
is_blocking: bool,
) -> Result<()> {
ensure!(
space_delim_strings[0] == "enable_custom_script",
"inconsistent command '{}' for enable_custom_script",
space_delim_strings[0]
);
ensure!(
space_delim_strings.len() == 1,
"Invalid number of arguments for setting publishing option"
);
match self.libra_root_account {
Some(_) => self.association_transaction_with_local_libra_root_account(
TransactionPayload::Script(
transaction_builder::encode_modify_publishing_option_script(
VMPublishingOption::CustomScripts,
),
),
is_blocking,
),
None => unimplemented!(),
}
}
/// Only allow executing predefined script in the Move standard library in the network.
pub fn disable_custom_script(
&mut self,
space_delim_strings: &[&str],
is_blocking: bool,
) -> Result<()> {
ensure!(
space_delim_strings[0] == "disable_custom_script",
"inconsistent command '{}' for disable_custom_script",
space_delim_strings[0]
);
ensure!(
space_delim_strings.len() == 1,
"Invalid number of arguments for setting publishing option"
);
match self.libra_root_account {
Some(_) => self.association_transaction_with_local_libra_root_account(
TransactionPayload::Script(
transaction_builder::encode_modify_publishing_option_script(
VMPublishingOption::Locked(StdlibScript::whitelist()),
),
),
is_blocking,
),
None => unimplemented!(),
}
}
/// Only allow executing predefined script in the Move standard library in the network.
pub fn upgrade_stdlib(
&mut self,
space_delim_strings: &[&str],
is_blocking: bool,
) -> Result<()> {
ensure!(
space_delim_strings[0] == "upgrade_stdlib",
"inconsistent command '{}' for upgrade_stdlib",
space_delim_strings[0]
);
ensure!(
space_delim_strings.len() == 1,
"Invalid number of arguments for upgrading_stdlib_transaction"
);
match self.libra_root_account {
Some(_) => self.association_transaction_with_local_libra_root_account(
TransactionPayload::WriteSet(
transaction_builder::encode_stdlib_upgrade_transaction(StdLibOptions::Fresh),
),
is_blocking,
),
None => unimplemented!(),
}
}
/// Remove an existing validator from Validator Set.
pub fn remove_validator(
&mut self,
space_delim_strings: &[&str],
is_blocking: bool,
) -> Result<()> {
ensure!(
space_delim_strings[0] == "remove_validator",
"inconsistent command '{}' for remove_validator",
space_delim_strings[0]
);
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments for removing validator"
);
let (account_address, _) =
self.get_account_address_from_parameter(space_delim_strings[1])?;
match self.libra_root_account {
Some(_) => self.association_transaction_with_local_libra_root_account(
TransactionPayload::Script(transaction_builder::encode_remove_validator_script(
account_address,
)),
is_blocking,
),
None => unimplemented!(),
}
}
/// Add a new validator to the Validator Set.
pub fn add_validator(&mut self, space_delim_strings: &[&str], is_blocking: bool) -> Result<()> {
ensure!(
space_delim_strings[0] == "add_validator",
"inconsistent command '{}' for add_validator",
space_delim_strings[0]
);
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments for adding validator"
);
let (account_address, _) =
self.get_account_address_from_parameter(space_delim_strings[1])?;
match self.libra_root_account {
Some(_) => self.association_transaction_with_local_libra_root_account(
TransactionPayload::Script(transaction_builder::encode_add_validator_script(
account_address,
)),
is_blocking,
),
None => unimplemented!(),
}
}
/// Register an account as validator candidate with ValidatorConfig
pub fn register_validator(
&mut self,
space_delim_strings: &[&str],
is_blocking: bool,
) -> Result<()> {
ensure!(
space_delim_strings[0] == "register_validator",
"inconsistent command '{}' for register_validator",
space_delim_strings[0]
);
ensure!(
space_delim_strings.len() == 9,
"Invalid number of arguments for registering validator"
);
// parse args
let (address, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
let private_key = Ed25519PrivateKey::from_encoded_string(space_delim_strings[2])?;
let consensus_public_key = Ed25519PublicKey::from_encoded_string(space_delim_strings[3])?;
let network_identity_key = x25519::PublicKey::from_encoded_string(space_delim_strings[4])?;
let network_address = NetworkAddress::from_str(space_delim_strings[5])?;
let raw_network_address = RawNetworkAddress::try_from(&network_address)?;
let fullnode_identity_key = x25519::PublicKey::from_encoded_string(space_delim_strings[6])?;
let fullnode_network_address = NetworkAddress::from_str(space_delim_strings[7])?;
let raw_fullnode_network_address = RawNetworkAddress::try_from(&fullnode_network_address)?;
let mut sender = Self::get_account_data_from_address(
&mut self.client,
address,
true,
Some(KeyPair::from(private_key)),
None,
)?;
let seq_num = sender.sequence_number;
let addr_idx = 0;
let enc_network_address = raw_network_address.encrypt(
&TEST_SHARED_VAL_NETADDR_KEY,
TEST_SHARED_VAL_NETADDR_KEY_VERSION,
&address,
seq_num,
addr_idx,
);
let raw_enc_network_address = RawEncNetworkAddress::try_from(&enc_network_address)?;
let program = encode_set_validator_config_script(
address,
consensus_public_key.to_bytes().to_vec(),
network_identity_key.to_bytes(),
raw_enc_network_address.into(),
fullnode_identity_key.to_bytes(),
raw_fullnode_network_address.into(),
);
let txn = self.create_txn_to_submit(
TransactionPayload::Script(program),
&sender,
None,
None,
None,
)?;
self.client.submit_transaction(Some(&mut sender), txn)?;
if is_blocking {
self.wait_for_transaction(sender.address, sender.sequence_number)?;
}
Ok(())
}
/// Waits for the next transaction for a specific address and prints it
pub fn wait_for_transaction(
&mut self,
account: AccountAddress,
sequence_number: u64,
) -> Result<()> {
let mut max_iterations = 5000;
println!(
"waiting for {} with sequence number {}",
account, sequence_number
);
loop {
stdout().flush().unwrap();
match self
.client
.get_txn_by_acc_seq(account, sequence_number - 1, true)
{
Ok(Some(txn_view)) => {
if txn_view.vm_status == StatusCode::EXECUTED {
println!("transaction executed!");
if txn_view.events.is_empty() {
println!("no events emitted");
}
break Ok(());
} else {
break Err(format_err!(
"transaction failed to execute; status: {:?}!",
txn_view.vm_status
));
}
}
Err(e) => {
println!("Response with error: {:?}", e);
}
_ => {
print!(".");
}
}
max_iterations -= 1;
if max_iterations == 0 {
panic!("wait_for_transaction timeout");
}
thread::sleep(time::Duration::from_millis(10));
}
}
/// Transfer num_coins from sender account to receiver. If is_blocking = true,
/// it will keep querying validator till the sequence number is bumped up in validator.
pub fn transfer_coins_int(
&mut self,
sender_account_ref_id: usize,
receiver_address: &AccountAddress,
num_coins: u64,
coin_currency: String,
gas_unit_price: Option<u64>,
gas_currency_code: Option<String>,
max_gas_amount: Option<u64>,
is_blocking: bool,
) -> Result<IndexAndSequence> {
let sender_address;
let sender_sequence;
let currency_code = from_currency_code_string(&coin_currency)
.map_err(|_| format_err!("Invalid currency code {} specified", coin_currency))?;
let gas_currency_code = gas_currency_code.or(Some(coin_currency));
{
let sender = self.accounts.get(sender_account_ref_id).ok_or_else(|| {
format_err!("Unable to find sender account: {}", sender_account_ref_id)
})?;
let program = transaction_builder::encode_peer_to_peer_with_metadata_script(
type_tag_for_currency_code(currency_code),
*receiver_address,
num_coins,
vec![],
vec![],
);
let txn = self.create_txn_to_submit(
TransactionPayload::Script(program),
sender,
max_gas_amount, /* max_gas_amount */
gas_unit_price, /* gas_unit_price */
gas_currency_code, /* gas_currency_code */
)?;
let sender_mut = self
.accounts
.get_mut(sender_account_ref_id)
.ok_or_else(|| {
format_err!("Unable to find sender account: {}", sender_account_ref_id)
})?;
self.client.submit_transaction(Some(sender_mut), txn)?;
sender_address = sender_mut.address;
sender_sequence = sender_mut.sequence_number;
}
if is_blocking {
self.wait_for_transaction(sender_address, sender_sequence)?;
}
Ok(IndexAndSequence {
account_index: AccountEntry::Index(sender_account_ref_id),
sequence_number: sender_sequence - 1,
})
}
/// Prepare a transfer transaction: return the unsigned raw transaction
pub fn prepare_transfer_coins(
&mut self,
sender_address: AccountAddress,
sender_sequence_number: u64,
receiver_address: AccountAddress,
num_coins: u64,
coin_currency: String,
gas_unit_price: Option<u64>,
max_gas_amount: Option<u64>,
gas_currency_code: Option<String>,
) -> Result<RawTransaction> {
let currency_code = from_currency_code_string(&coin_currency)
.map_err(|_| format_err!("Invalid currency code {} specified", coin_currency))?;
let program = transaction_builder::encode_peer_to_peer_with_metadata_script(
type_tag_for_currency_code(currency_code),
receiver_address,
num_coins,
vec![],
vec![],
);
Ok(create_unsigned_txn(
TransactionPayload::Script(program),
sender_address,
sender_sequence_number,
max_gas_amount.unwrap_or(MAX_GAS_AMOUNT),
gas_unit_price.unwrap_or(GAS_UNIT_PRICE),
gas_currency_code.unwrap_or_else(|| LBR_NAME.to_owned()),
TX_EXPIRATION,
))
}
/// Transfers coins from sender to receiver.
pub fn transfer_coins(
&mut self,
space_delim_strings: &[&str],
is_blocking: bool,
) -> Result<IndexAndSequence> {
ensure!(
space_delim_strings.len() >= 5 && space_delim_strings.len() <= 7,
"Invalid number of arguments for transfer"
);
let (sender_account_address, _) =
self.get_account_address_from_parameter(space_delim_strings[1])?;
let (receiver_address, _) =
self.get_account_address_from_parameter(space_delim_strings[2])?;
let transfer_currency = space_delim_strings[4];
let num_coins =
self.convert_to_on_chain_represenation(space_delim_strings[3], transfer_currency)?;
let gas_unit_price = if space_delim_strings.len() > 5 {
Some(space_delim_strings[5].parse::<u64>().map_err(|error| {
format_parse_data_error(
"gas_unit_price",
InputType::UnsignedInt,
space_delim_strings[5],
error,
)
})?)
} else {
None
};
let max_gas_amount = if space_delim_strings.len() > 6 {
Some(space_delim_strings[6].parse::<u64>().map_err(|error| {
format_parse_data_error(
"max_gas_amount",
InputType::UnsignedInt,
space_delim_strings[6],
error,
)
})?)
} else {
None
};
let gas_currency = if space_delim_strings.len() > 7 {
space_delim_strings[7].to_owned()
} else {
transfer_currency.to_owned()
};
let sender_account_ref_id = self.get_account_ref_id(&sender_account_address)?;
self.transfer_coins_int(
sender_account_ref_id,
&receiver_address,
num_coins,
transfer_currency.to_owned(),
gas_unit_price,
Some(gas_currency),
max_gas_amount,
is_blocking,
)
}
/// Compile Move program
pub fn compile_program(&mut self, space_delim_strings: &[&str]) -> Result<Vec<String>> {
ensure!(
space_delim_strings[0] == "compile",
"inconsistent command '{}' for compile_program",
space_delim_strings[0]
);
let (address, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
let file_path = space_delim_strings[2];
let mut tmp_output_dir = TempPath::new();
tmp_output_dir.persist();
tmp_output_dir
.create_as_dir()
.expect("error creating temporary output directory");
let tmp_output_path = tmp_output_dir.as_ref();
self.temp_files.push(tmp_output_path.to_path_buf());
let mut args = format!(
"run -p move-lang --bin move-build -- {} -s {} -o {}",
file_path,
address,
tmp_output_path.display(),
);
for dep in &space_delim_strings[3..] {
args.push_str(&format!(" -d {}", dep));
}
let status = Command::new("cargo")
.args(args.split(' '))
.spawn()?
.wait()?;
if !status.success() {
return Err(format_err!("compilation failed"));
}
let output_files = walkdir::WalkDir::new(tmp_output_path)
.into_iter()
.filter_map(|e| e.ok())
.filter(|e| {
let path = e.path();
e.file_type().is_file()
&& path
.extension()
.and_then(|s| s.to_str())
.map(|ext| ext == "mv")
.unwrap_or(false)
})
.filter_map(|e| e.path().to_str().map(|s| s.to_string()))
.collect::<Vec<_>>();
if output_files.is_empty() {
bail!("compiler failed to produce an output file")
}
Ok(output_files)
}
/// Submit a transaction to the network given the unsigned raw transaction, sender public key
/// and signature
pub fn submit_signed_transaction(
&mut self,
raw_txn: RawTransaction,
public_key: Ed25519PublicKey,
signature: Ed25519Signature,
) -> Result<()> {
let transaction = SignedTransaction::new(raw_txn, public_key, signature);
let sender_address = transaction.sender();
let sender_sequence = transaction.sequence_number();
self.client.submit_transaction(None, transaction)?;
// blocking by default (until transaction completion)
self.wait_for_transaction(sender_address, sender_sequence + 1)
}
fn submit_program(
&mut self,
space_delim_strings: &[&str],
program: TransactionPayload,
) -> Result<()> {
let (sender_address, _) =
self.get_account_address_from_parameter(space_delim_strings[1])?;
let sender_ref_id = self.get_account_ref_id(&sender_address)?;
let sender = self.accounts.get(sender_ref_id).unwrap();
let sequence_number = sender.sequence_number;
let txn = self.create_txn_to_submit(program, &sender, None, None, None)?;
self.client
.submit_transaction(self.accounts.get_mut(sender_ref_id), txn)?;
self.wait_for_transaction(sender_address, sequence_number + 1)
}
/// Publish Move module
pub fn publish_module(&mut self, space_delim_strings: &[&str]) -> Result<()> {
ensure!(
space_delim_strings[0] == "publish",
"inconsistent command '{}' for publish_module",
space_delim_strings[0]
);
let module_bytes = fs::read(space_delim_strings[2])?;
self.submit_program(
space_delim_strings,
TransactionPayload::Module(Module::new(module_bytes)),
)
}
/// Execute custom script
pub fn execute_script(&mut self, space_delim_strings: &[&str]) -> Result<()> {
ensure!(
space_delim_strings[0] == "execute",
"inconsistent command '{}' for execute_script",
space_delim_strings[0]
);
let script_bytes = fs::read(space_delim_strings[2])?;
let arguments: Vec<_> = space_delim_strings[3..]
.iter()
.filter_map(|arg| parse_transaction_argument_for_client(arg).ok())
.collect();
// TODO: support type arguments in the client.
self.submit_program(
space_delim_strings,
TransactionPayload::Script(Script::new(script_bytes, vec![], arguments)),
)
}
/// Get the latest account state from validator.
pub fn get_latest_account_state(
&mut self,
space_delim_strings: &[&str],
) -> Result<(Option<AccountView>, Version)> {
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments to get latest account state"
);
let (account, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
self.get_account_state_and_update(account)
}
/// Get the latest annotated account resources from validator.
pub fn get_latest_account_resources(
&mut self,
space_delim_strings: &[&str],
) -> Result<(Option<AnnotatedAccountStateBlob>, Version)> {
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments to get latest account state"
);
let (account, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
self.get_annotate_account_blob(account)
}
/// Get committed txn by account and sequence number.
pub fn get_committed_txn_by_acc_seq(
&mut self,
space_delim_strings: &[&str],
) -> Result<Option<TransactionView>> {
ensure!(
space_delim_strings.len() == 4,
"Invalid number of arguments to get transaction by account and sequence number"
);
let (account, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
let sequence_number = space_delim_strings[2].parse::<u64>().map_err(|error| {
format_parse_data_error(
"account_sequence_number",
InputType::UnsignedInt,
space_delim_strings[2],
error,
)
})?;
let fetch_events = parse_bool(space_delim_strings[3]).map_err(|error| {
format_parse_data_error(
"fetch_events",
InputType::Bool,
space_delim_strings[3],
error,
)
})?;
self.client
.get_txn_by_acc_seq(account, sequence_number, fetch_events)
}
/// Get committed txn by account and sequence number
pub fn get_committed_txn_by_range(
&mut self,
space_delim_strings: &[&str],
) -> Result<Vec<TransactionView>> {
ensure!(
space_delim_strings.len() == 4,
"Invalid number of arguments to get transaction by range"
);
let start_version = space_delim_strings[1].parse::<u64>().map_err(|error| {
format_parse_data_error(
"start_version",
InputType::UnsignedInt,
space_delim_strings[1],
error,
)
})?;
let limit = space_delim_strings[2].parse::<u64>().map_err(|error| {
format_parse_data_error(
"limit",
InputType::UnsignedInt,
space_delim_strings[2],
error,
)
})?;
let fetch_events = parse_bool(space_delim_strings[3]).map_err(|error| {
format_parse_data_error(
"fetch_events",
InputType::Bool,
space_delim_strings[3],
error,
)
})?;
self.client
.get_txn_by_range(start_version, limit, fetch_events)
}
/// Get account address and (if applicable) authentication key from parameter. If the parameter
/// is string of address, try to convert it to address, otherwise, try to convert to u64 and
/// looking at TestClient::accounts.
pub fn get_account_address_from_parameter(
&self,
para: &str,
) -> Result<(AccountAddress, Option<AuthenticationKey>)> {
if is_authentication_key(para) {
let auth_key = ClientProxy::authentication_key_from_string(para)?;
Ok((auth_key.derived_address(), Some(auth_key)))
} else if is_address(para) {
Ok((ClientProxy::address_from_strings(para)?, None))
} else {
let account_ref_id = para.parse::<usize>().map_err(|error| {
format_parse_data_error(
"account_reference_id/account_address",
InputType::Usize,
para,
error,
)
})?;
let account_data = self.accounts.get(account_ref_id).ok_or_else(|| {
format_err!(
"Unable to find account by account reference id: {}, to see all existing \
accounts, run: 'account list'",
account_ref_id
)
})?;
Ok((
account_data.address,
account_data
.authentication_key
.clone()
.and_then(|bytes| AuthenticationKey::try_from(bytes).ok()),
))
}
}
/// Get events by account and event type with start sequence number and limit.
pub fn get_events_by_account_and_type(
&mut self,
space_delim_strings: &[&str],
) -> Result<(Vec<EventView>, AccountView)> {
ensure!(
space_delim_strings.len() == 5,
"Invalid number of arguments to get events by access path"
);
let (account, _) = self.get_account_address_from_parameter(space_delim_strings[1])?;
let path = match space_delim_strings[2] {
"sent" => ACCOUNT_SENT_EVENT_PATH.to_vec(),
"received" => ACCOUNT_RECEIVED_EVENT_PATH.to_vec(),
_ => bail!(
"Unknown event type: {:?}, only sent and received are supported",
space_delim_strings[2]
),
};
let access_path = AccessPath::new(account, path);
let start_seq_number = space_delim_strings[3].parse::<u64>().map_err(|error| {
format_parse_data_error(
"start_seq_number",
InputType::UnsignedInt,
space_delim_strings[3],
error,
)
})?;
let limit = space_delim_strings[4].parse::<u64>().map_err(|error| {
format_parse_data_error(
"start_seq_number",
InputType::UnsignedInt,
space_delim_strings[4],
error,
)
})?;
self.client
.get_events_by_access_path(access_path, start_seq_number, limit)
}
/// Write mnemonic recover to the file specified.
pub fn write_recovery(&self, space_delim_strings: &[&str]) -> Result<()> {
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments for writing recovery"
);
self.wallet
.write_recovery(&Path::new(space_delim_strings[1]))?;
Ok(())
}
/// Recover wallet accounts from command 'recover <file>' and return vec<(account_address, index)>.
pub fn recover_wallet_accounts(
&mut self,
space_delim_strings: &[&str],
) -> Result<Vec<AddressAndIndex>> {
ensure!(
space_delim_strings.len() == 2,
"Invalid number of arguments for recovering wallets"
);
let wallet = WalletLibrary::recover(&Path::new(space_delim_strings[1]))?;
self.set_wallet(wallet);
self.recover_accounts_in_wallet()
}
/// Recover accounts in wallets and sync state if sync_on_wallet_recovery is true.
pub fn recover_accounts_in_wallet(&mut self) -> Result<Vec<AddressAndIndex>> {
let wallet_addresses = self.wallet.get_addresses()?;
let mut account_data = Vec::new();
for address in wallet_addresses {
account_data.push(Self::get_account_data_from_address(
&mut self.client,
address,
self.sync_on_wallet_recovery,
None,
None,
)?);
}
// Clear current cached AccountData as we always swap the entire wallet completely.
Ok(self.set_accounts(account_data))
}
/// Insert the account data to Client::accounts and return its address and index.s
pub fn insert_account_data(&mut self, account_data: AccountData) -> AddressAndIndex {
let address = account_data.address;
self.accounts.push(account_data);
self.address_to_ref_id
.insert(address, self.accounts.len() - 1);
AddressAndIndex {
address,
index: self.accounts.len() - 1,
}
}
/// Test JSON RPC client connection with validator.
pub fn test_validator_connection(&mut self) -> Result<BlockMetadata> {
self.client.get_metadata()
}
/// Test client's connection to validator with proof.
pub fn test_trusted_connection(&mut self) -> Result<()> {
self.client.get_state_proof()
}
fn get_annotate_account_blob(
&mut self,
address: AccountAddress,
) -> Result<(Option<AnnotatedAccountStateBlob>, Version)> {
let (blob, ver) = self.client.get_account_state_blob(address)?;
if let Some(account_blob) = blob {
let state_view = NullStateView::default();
let annotator = MoveValueAnnotator::new(&state_view);
let annotate_blob =
annotator.view_account_state(&AccountState::try_from(&account_blob)?)?;
Ok((Some(annotate_blob), ver))
} else {
Ok((None, ver))
}
}
/// Get account state from validator and update status of account if it is cached locally.
fn get_account_state_and_update(
&mut self,
address: AccountAddress,
) -> Result<(Option<AccountView>, Version)> {
let account_state = self.client.get_account_state(address, true)?;
if self.address_to_ref_id.contains_key(&address) {
let account_ref_id = self
.address_to_ref_id
.get(&address)
.expect("Should have the key");
// assumption follows from invariant
let mut account_data: &mut AccountData =
self.accounts.get_mut(*account_ref_id).unwrap_or_else(|| unreachable!("Local cache not consistent, reference id {} not available in local accounts", account_ref_id));
if account_state.0.is_some() {
account_data.status = AccountStatus::Persisted;
}
};
Ok(account_state)
}
/// Get account resource from validator and update status of account if it is cached locally.
fn get_account_resource_and_update(&mut self, address: AccountAddress) -> Result<AccountView> {
let account_state = self.get_account_state_and_update(address)?;
if let Some(view) = account_state.0 {
Ok(view)
} else {
bail!("No account exists at {:?}", address)
}
}
/// Get account using specific address.
/// Sync with validator for account sequence number in case it is already created on chain.
/// This assumes we have a very low probability of mnemonic word conflict.
fn get_account_data_from_address(
client: &mut LibraClient,
address: AccountAddress,
sync_with_validator: bool,
key_pair: Option<KeyPair<Ed25519PrivateKey, Ed25519PublicKey>>,
authentication_key_opt: Option<Vec<u8>>,
) -> Result<AccountData> {
let (sequence_number, authentication_key, status) = if sync_with_validator {
match client.get_account_state(address, true) {
Ok(resp) => match resp.0 {
Some(account_view) => (
account_view.sequence_number,
Some(account_view.authentication_key.into_bytes()?),
AccountStatus::Persisted,
),
None => (0, authentication_key_opt, AccountStatus::Local),
},
Err(e) => {
error!("Failed to get account state from validator, error: {:?}", e);
(0, authentication_key_opt, AccountStatus::Unknown)
}
}
} else {
(0, authentication_key_opt, AccountStatus::Local)
};
Ok(AccountData {
address,
authentication_key,
key_pair,
sequence_number,
status,
})
}
fn get_libra_wallet(mnemonic_file: Option<String>) -> Result<WalletLibrary> {
let wallet_recovery_file_path = if let Some(input_mnemonic_word) = mnemonic_file {
Path::new(&input_mnemonic_word).to_path_buf()
} else {
let mut file_path = std::env::current_dir()?;
file_path.push(CLIENT_WALLET_MNEMONIC_FILE);
file_path
};
let wallet = if let Ok(recovered_wallet) = io_utils::recover(&wallet_recovery_file_path) {
recovered_wallet
} else {
let new_wallet = WalletLibrary::new();
new_wallet.write_recovery(&wallet_recovery_file_path)?;
new_wallet
};
Ok(wallet)
}
/// Set wallet instance used by this client.
fn set_wallet(&mut self, wallet: WalletLibrary) {
self.wallet = wallet;
}
fn address_from_strings(data: &str) -> Result<AccountAddress> {
let account_vec: Vec<u8> = hex::decode(data.parse::<String>()?)?;
ensure!(
account_vec.len() == AccountAddress::LENGTH,
"The address {:?} is of invalid length. Addresses must be 16-bytes long"
);
let account = AccountAddress::try_from(&account_vec[..]).map_err(|error| {
format_err!(
"The address {:?} is invalid, error: {:?}",
&account_vec,
error,
)
})?;
Ok(account)
}
fn authentication_key_from_string(data: &str) -> Result<AuthenticationKey> {
let bytes_vec: Vec<u8> = hex::decode(data.parse::<String>()?)?;
ensure!(
bytes_vec.len() == AuthenticationKey::LENGTH,
"The authentication key string {:?} is of invalid length. Authentication keys must be 32-bytes long"
);
let auth_key = AuthenticationKey::try_from(&bytes_vec[..]).map_err(|error| {
format_err!(
"The authentication key {:?} is invalid, error: {:?}",
&bytes_vec,
error,
)
})?;
Ok(auth_key)
}
fn association_transaction_with_local_libra_root_account(
&mut self,
payload: TransactionPayload,
is_blocking: bool,
) -> Result<()> {
ensure!(
self.libra_root_account.is_some(),
"No assoc root account loaded"
);
let sender = self.libra_root_account.as_ref().unwrap();
let sender_address = sender.address;
let txn = self.create_txn_to_submit(payload, sender, None, None, None)?;
let mut sender_mut = self.libra_root_account.as_mut().unwrap();
let resp = self.client.submit_transaction(Some(&mut sender_mut), txn);
if is_blocking {
self.wait_for_transaction(
sender_address,
self.libra_root_account.as_ref().unwrap().sequence_number,
)?;
}
resp
}
fn association_transaction_with_local_testnet_dd_account(
&mut self,
payload: TransactionPayload,
is_blocking: bool,
) -> Result<()> {
ensure!(
self.testnet_designated_dealer_account.is_some(),
"No testnet Designated Dealer account loaded"
);
let sender = self.testnet_designated_dealer_account.as_ref().unwrap();
let sender_address = sender.address;
let txn = self.create_txn_to_submit(payload, sender, None, None, None)?;
let mut sender_mut = self.testnet_designated_dealer_account.as_mut().unwrap();
let resp = self.client.submit_transaction(Some(&mut sender_mut), txn);
if is_blocking {
self.wait_for_transaction(
sender_address,
self.testnet_designated_dealer_account
.as_ref()
.unwrap()
.sequence_number,
)?;
}
resp
}
fn mint_coins_with_faucet_service(
&mut self,
receiver: AuthenticationKey,
num_coins: u64,
coin_currency: String,
is_blocking: bool,
) -> Result<()> {
let client = reqwest::blocking::ClientBuilder::new().build()?;
let url = Url::parse_with_params(
format!("http://{}", self.faucet_server).as_str(),
&[
("amount", num_coins.to_string().as_str()),
("auth_key", &hex::encode(receiver)),
("currency_code", coin_currency.as_str()),
],
)?;
let response = client.post(url).send()?;
let status_code = response.status();
let body = response.text()?;
if !status_code.is_success() {
return Err(format_err!(
"Failed to query remote faucet server[status={}]: {:?}",
status_code.as_str(),
body,
));
}
let sequence_number = body.parse::<u64>()?;
if is_blocking {
self.wait_for_transaction(testnet_dd_account_address(), sequence_number)?;
}
Ok(())
}
/// Scale the number in `input` based on `scaling_factor` and ensure the fractional part is no
/// less than `fractional_part` amount.
pub fn convert_to_scaled_representation(
input: &str,
scaling_factor: i64,
fractional_part: i64,
) -> Result<u64> {
ensure!(!input.is_empty(), "Empty input not allowed for libra unit");
let max_value = Decimal::from_u64(std::u64::MAX).unwrap() / Decimal::new(scaling_factor, 0);
let scale = input.find('.').unwrap_or(input.len() - 1);
let digits_after_decimal = input
.find('.')
.map(|num_digits| input.len() - num_digits - 1)
.unwrap_or(0) as u32;
ensure!(
digits_after_decimal <= 14,
"Input value is too small: {}",
input
);
let input_fractional_part = 10u64.pow(digits_after_decimal);
ensure!(
input_fractional_part <= fractional_part as u64,
"Input value has too small of a fractional part 1/{}, but smallest allowed is 1/{}",
input_fractional_part,
fractional_part
);
ensure!(
scale <= 14,
"Input value is too big: {:?}, max: {:?}",
input,
max_value
);
let original = Decimal::from_str(input)?;
ensure!(
original <= max_value,
"Input value is too big: {:?}, max: {:?}",
input,
max_value
);
let value = original * Decimal::new(scaling_factor, 0);
ensure!(value.fract().is_zero(), "invalid value");
value.to_u64().ok_or_else(|| format_err!("invalid value"))
}
/// convert number of coins (main unit) given as string to its on-chain represention
pub fn convert_to_on_chain_represenation(
&mut self,
input: &str,
currency: &str,
) -> Result<u64> {
ensure!(!input.is_empty(), "Empty input not allowed for libra unit");
// This is not supposed to panic as it is used as constant here.
let currencies_info = self.client.get_currency_info()?;
let currency_info = currencies_info
.iter()
.find(|info| info.code == currency)
.ok_or_else(|| {
format_err!(
"Unable to get currency info for {} when converting to on-chain units",
currency
)
})?;
Self::convert_to_scaled_representation(
input,
currency_info.scaling_factor as i64,
currency_info.fractional_part as i64,
)
}
/// Craft a transaction to be submitted.
fn create_txn_to_submit(
&self,
program: TransactionPayload,
sender_account: &AccountData,
max_gas_amount: Option<u64>,
gas_unit_price: Option<u64>,
gas_currency_code: Option<String>,
) -> Result<SignedTransaction> {
let signer: Box<&dyn TransactionSigner> = match &sender_account.key_pair {
Some(key_pair) => Box::new(key_pair),
None => Box::new(&self.wallet),
};
create_user_txn(
*signer,
program,
sender_account.address,
sender_account.sequence_number,
max_gas_amount.unwrap_or(MAX_GAS_AMOUNT),
gas_unit_price.unwrap_or(GAS_UNIT_PRICE),
gas_currency_code.unwrap_or_else(|| LBR_NAME.to_owned()),
TX_EXPIRATION,
)
}
fn mut_account_from_parameter(&mut self, para: &str) -> Result<&mut AccountData> {
let account_ref_id = if is_address(para) {
let account_address = ClientProxy::address_from_strings(para)?;
*self
.address_to_ref_id
.get(&account_address)
.ok_or_else(|| {
format_err!(
"Unable to find local account by address: {:?}",
account_address
)
})?
} else {
para.parse::<usize>()?
};
let account_data = self
.accounts
.get_mut(account_ref_id)
.ok_or_else(|| format_err!("Unable to find account by ref id: {}", account_ref_id))?;
Ok(account_data)
}
}
fn parse_transaction_argument_for_client(s: &str) -> Result<TransactionArgument> {
if is_address(s) {
let account_address = ClientProxy::address_from_strings(s)?;
return Ok(TransactionArgument::Address(account_address));
}
parse_transaction_argument(s)
}
fn format_parse_data_error<T: std::fmt::Debug>(
field: &str,
input_type: InputType,
value: &str,
error: T,
) -> Error {
format_err!(
"Unable to parse input for {} - \
please enter an {:?}. Input was: {}, error: {:?}",
field,
input_type,
value,
error
)
}
fn parse_bool(para: &str) -> Result<bool> {
Ok(para.to_lowercase().parse::<bool>()?)
}
impl fmt::Display for AccountEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
AccountEntry::Index(i) => write!(f, "{}", i),
AccountEntry::Address(addr) => write!(f, "{}", addr),
}
}
}
#[cfg(test)]
mod tests {
use crate::client_proxy::{parse_bool, AddressAndIndex, ClientProxy};
use libra_temppath::TempPath;
use libra_types::{ledger_info::LedgerInfo, on_chain_config::ValidatorSet, waypoint::Waypoint};
use libra_wallet::io_utils;
use proptest::prelude::*;
fn generate_accounts_from_wallet(count: usize) -> (ClientProxy, Vec<AddressAndIndex>) {
let mut accounts = Vec::new();
accounts.reserve(count);
let file = TempPath::new();
let mnemonic_path = file.path().to_str().unwrap().to_string();
let waypoint =
Waypoint::new_epoch_boundary(&LedgerInfo::mock_genesis(Some(ValidatorSet::empty())))
.unwrap();
// Note: `client_proxy` won't actually connect to URL - it will be used only to
// generate random accounts
let mut client_proxy = ClientProxy::new(
"http://localhost:8080",
&"",
&"",
false,
None,
Some(mnemonic_path),
waypoint,
)
.unwrap();
for _ in 0..count {
accounts.push(client_proxy.create_next_account(false).unwrap());
}
(client_proxy, accounts)
}
#[test]
fn test_parse_bool() {
assert!(parse_bool("true").unwrap());
assert!(parse_bool("True").unwrap());
assert!(parse_bool("TRue").unwrap());
assert!(parse_bool("TRUE").unwrap());
assert!(!parse_bool("false").unwrap());
assert!(!parse_bool("False").unwrap());
assert!(!parse_bool("FaLSe").unwrap());
assert!(!parse_bool("FALSE").unwrap());
assert!(parse_bool("1").is_err());
assert!(parse_bool("0").is_err());
assert!(parse_bool("2").is_err());
assert!(parse_bool("1adf").is_err());
assert!(parse_bool("ad13").is_err());
assert!(parse_bool("ad1f").is_err());
}
#[test]
fn test_micro_libra_conversion() {
assert!(ClientProxy::convert_to_scaled_representation("", 1_000_000, 1_000_000).is_err());
assert!(
ClientProxy::convert_to_scaled_representation("-11", 1_000_000, 1_000_000).is_err()
);
assert!(
ClientProxy::convert_to_scaled_representation("abc", 1_000_000, 1_000_000).is_err()
);
assert!(ClientProxy::convert_to_scaled_representation(
"11111112312321312321321321",
1_000_000,
1_000_000
)
.is_err());
assert!(ClientProxy::convert_to_scaled_representation("100000.0", 1, 1).is_err());
assert!(ClientProxy::convert_to_scaled_representation("0", 1_000_000, 1_000_000).is_ok());
assert!(ClientProxy::convert_to_scaled_representation("0", 1_000_000, 1_000_000).is_ok());
assert!(ClientProxy::convert_to_scaled_representation("1", 1_000_000, 1_000_000).is_ok());
assert!(ClientProxy::convert_to_scaled_representation("0.1", 1_000_000, 1_000_000).is_ok());
assert!(ClientProxy::convert_to_scaled_representation("1.1", 1_000_000, 1_000_000).is_ok());
// Max of micro libra is u64::MAX (18446744073709551615).
assert!(ClientProxy::convert_to_scaled_representation(
"18446744073709.551615",
1_000_000,
1_000_000
)
.is_ok());
assert!(ClientProxy::convert_to_scaled_representation(
"184467440737095.51615",
1_000_000,
1_000_000
)
.is_err());
assert!(ClientProxy::convert_to_scaled_representation(
"18446744073709.551616",
1_000_000,
1_000_000
)
.is_err());
}
#[test]
fn test_scaled_represenation() {
assert_eq!(
ClientProxy::convert_to_scaled_representation("10", 1_000_000, 100).unwrap(),
10 * 1_000_000
);
assert_eq!(
ClientProxy::convert_to_scaled_representation("10.", 1_000_000, 100).unwrap(),
10 * 1_000_000
);
assert_eq!(
ClientProxy::convert_to_scaled_representation("10.20", 1_000_000, 100).unwrap(),
(10.20 * 1_000_000f64) as u64
);
assert!(ClientProxy::convert_to_scaled_representation("10.201", 1_000_000, 100).is_err());
assert_eq!(
ClientProxy::convert_to_scaled_representation("10.991", 1_000_000, 1000).unwrap(),
(10.991 * 1_000_000f64) as u64
);
assert_eq!(
ClientProxy::convert_to_scaled_representation("100.99", 1000, 100).unwrap(),
(100.99 * 1000f64) as u64
);
assert_eq!(
ClientProxy::convert_to_scaled_representation("100000", 1, 1).unwrap(),
100_000
);
}
#[test]
fn test_generate() {
let num = 1;
let (_, accounts) = generate_accounts_from_wallet(num);
assert_eq!(accounts.len(), num);
}
#[test]
fn test_write_recover() {
let num = 100;
let (client, accounts) = generate_accounts_from_wallet(num);
assert_eq!(accounts.len(), num);
let file = TempPath::new();
let path = file.path();
io_utils::write_recovery(&client.wallet, &path).expect("failed to write to file");
let wallet = io_utils::recover(&path).expect("failed to load from file");
assert_eq!(client.wallet.mnemonic(), wallet.mnemonic());
}
proptest! {
// Proptest is used to verify that the conversion will not panic with random input.
#[test]
fn test_micro_libra_conversion_random_string(req in any::<String>()) {
let _res = ClientProxy::convert_to_scaled_representation(&req, 1_000_000, 1_000_000);
}
#[test]
fn test_micro_libra_conversion_random_f64(req in any::<f64>()) {
let req_str = req.to_string();
let _res = ClientProxy::convert_to_scaled_representation(&req_str, 1_000_000, 1_000_000);
}
#[test]
fn test_micro_libra_conversion_random_u64(req in any::<u64>()) {
let req_str = req.to_string();
let _res = ClientProxy::convert_to_scaled_representation(&req_str, 1_000_000, 1_000_000);
}
}
}
| 37.086386 | 182 | 0.574295 |
e43aa910c78c0eeb2d3de30591c347d7ef9e5642 | 2,242 | use std::net::{SocketAddr, UdpSocket, ToSocketAddrs};
use std::{io, str}; // For input
use std::env;
use regex::Regex;
pub fn main() -> std::io::Result<()> {
let ip_regex = Regex::new(r"^(?P<ip>[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+):(?P<port>[0-9]+)$").unwrap();
let args: Vec<String> = env::args().collect();
let ip_arg = &args[1];
let mut ip_input = String::new();
let mut client_addresses: SocketAddr;
'ip: loop {
if ip_regex.is_match(ip_arg) {
client_addresses = SocketAddr::from(ip_arg.to_socket_addrs().unwrap().next().unwrap());
break 'ip;
} else {
println!("Please submit a valid IP (#.#.#.#:#): ");
io::stdin().read_line(&mut ip_input)?;
ip_input.pop(); // pop off the "\n" that's appended
if ip_regex.is_match(&ip_input) {
client_addresses = SocketAddr::from(ip_input.to_socket_addrs().unwrap().next().unwrap());
break 'ip;
}
}
} // loop
// SERVER ADDRESS
let server_addresses: [SocketAddr; 1] = [
SocketAddr::from(([127, 0, 0, 1], 1666)),
// can add backup IPs
];
// BINDING & CONNECTING
let mut socket: UdpSocket;
socket = UdpSocket::bind(&client_addresses).expect("couldn't bind to address");
socket.connect(&server_addresses[..]).expect("connect function failed");
// SENDING
// socket.send(&[1,6,6,6]).expect("couldn't send message"); // Test1: Send array message
'sending: loop { // sending client side
// Test 2: Send input
println!("input: ");
let mut input = String::new();
io::stdin().read_line(&mut input)?;
socket.send(input.as_bytes())?; // must send a &[u8]
// Test 3: Recieving input from server
let mut buffer = [0u8; 4096]; // a buffer than accepts 4096
match socket.recv(&mut buffer) {
Ok(received) => {
// Test 3: print bytes recieved and array from server
println!("received {} bytes {:?}", received, &buffer[..received]); // test to print bytes and buffer
},
Err(e) => println!("recv function failed: {:?}", e),
} // deal with Result that's recieved from the buffer
// note: use ctrl+c to exit
} // close sending loop
Ok(())
} // close main fn | 34.492308 | 116 | 0.584746 |
11b5be4e80f887aa8fa60ef6e8c5d6e21c3ec5fe | 6,346 | // Copyright (c) 2021 The Lutino Projects
//
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file or at
// https://opensource.org/licenses/MIT.
use std::fmt::{self, Debug};
use std::iter;
use crate::iter::*;
use crate::traits::{Folder, Producer};
/// `Map` is an iterator that transforms the elements of an underlying iterator.
///
/// This struct is created by the [`map()`] method on [`ParallelIterator`]
///
/// [`map()`]: trait.ParallelIterator.html#method.map
/// [`ParallelIterator`]: trait.ParallelIterator.html
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Clone)]
pub struct ParallelMap<'env, I: ParallelIterator<'env>, F> {
base: I,
map_op: F,
env: &'env JobSystem,
}
impl<'env, I, F> Debug for ParallelMap<'env, I, F>
where
I: ParallelIterator<'env> + Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Map").field("base", &self.base).finish()
}
}
impl<'env, I, F> ParallelMap<'env, I, F>
where
I: ParallelIterator<'env>,
{
/// Creates a new `Map` iterator.
pub(super) fn new(base: I, env: &'env JobSystem, map_op: F) -> Self {
ParallelMap { base, map_op, env }
}
}
impl<'env, I, F, R> ParallelIterator<'env> for ParallelMap<'env, I, F>
where
I: ParallelIterator<'env>,
F: Fn(I::Item) -> R + Sync + Send,
R: Send,
{
type Item = F::Output;
fn drive_unindexed<C>(self, consumer: C) -> C::Result
where
C: UnindexedConsumer<Self::Item>,
{
let consumer1 = MapConsumer::new(consumer, &self.map_op);
self.base.drive_unindexed(consumer1)
}
fn opt_len(&self) -> Option<usize> { self.base.opt_len() }
#[inline(always)]
fn get_env(&self) -> &'env JobSystem { self.env }
}
impl<'env, I, F, R> IndexedParallelIterator<'env> for ParallelMap<'env, I, F>
where
I: IndexedParallelIterator<'env>,
F: Fn(I::Item) -> R + Sync + Send,
R: Send,
{
fn drive<C>(self, consumer: C) -> C::Result
where
C: Consumer<Self::Item>,
{
let consumer1 = MapConsumer::new(consumer, &self.map_op);
self.base.drive(consumer1)
}
fn len(&self) -> usize { self.base.len() }
fn with_producer<CB>(self, callback: CB) -> CB::Output
where
CB: ProducerCallback<Self::Item>,
{
return self.base.with_producer(Callback {
callback,
map_op: self.map_op,
});
struct Callback<CB, F> {
callback: CB,
map_op: F,
}
impl<T, F, R, CB> ProducerCallback<T> for Callback<CB, F>
where
CB: ProducerCallback<R>,
F: Fn(T) -> R + Sync,
R: Send,
{
type Output = CB::Output;
fn callback<P>(self, base: P) -> CB::Output
where
P: Producer<Item = T>,
{
let producer = MapProducer {
base,
map_op: &self.map_op,
};
self.callback.callback(producer)
}
}
}
}
/// ////////////////////////////////////////////////////////////////////////
struct MapProducer<'f, P, F> {
base: P,
map_op: &'f F,
}
impl<'f, P, F, R> Producer for MapProducer<'f, P, F>
where
P: Producer,
F: Fn(P::Item) -> R + Sync,
R: Send,
{
type IntoIter = iter::Map<P::IntoIter, &'f F>;
type Item = F::Output;
fn into_iter(self) -> Self::IntoIter { self.base.into_iter().map(self.map_op) }
fn min_len(&self) -> usize { self.base.min_len() }
fn max_len(&self) -> usize { self.base.max_len() }
fn split_at(self, index: usize) -> (Self, Self) {
let (left, right) = self.base.split_at(index);
(
MapProducer {
base: left,
map_op: self.map_op,
},
MapProducer {
base: right,
map_op: self.map_op,
},
)
}
fn fold_with<G>(self, folder: G) -> G
where
G: Folder<Self::Item>,
{
let folder1 = MapFolder {
base: folder,
map_op: self.map_op,
};
self.base.fold_with(folder1).base
}
}
/// ////////////////////////////////////////////////////////////////////////
/// Consumer implementation
struct MapConsumer<'f, C, F> {
base: C,
map_op: &'f F,
}
impl<'f, C, F> MapConsumer<'f, C, F> {
fn new(base: C, map_op: &'f F) -> Self { MapConsumer { base, map_op } }
}
impl<'f, T, R, C, F> Consumer<T> for MapConsumer<'f, C, F>
where
C: Consumer<F::Output>,
F: Fn(T) -> R + Sync,
R: Send,
{
type Folder = MapFolder<'f, C::Folder, F>;
type Reducer = C::Reducer;
type Result = C::Result;
fn split_at(self, index: usize) -> (Self, Self, Self::Reducer) {
let (left, right, reducer) = self.base.split_at(index);
(
MapConsumer::new(left, self.map_op),
MapConsumer::new(right, self.map_op),
reducer,
)
}
fn into_folder(self) -> Self::Folder {
MapFolder {
base: self.base.into_folder(),
map_op: self.map_op,
}
}
fn full(&self) -> bool { self.base.full() }
}
impl<'f, T, R, C, F> UnindexedConsumer<T> for MapConsumer<'f, C, F>
where
C: UnindexedConsumer<F::Output>,
F: Fn(T) -> R + Sync,
R: Send,
{
fn split_off_left(&self) -> Self { MapConsumer::new(self.base.split_off_left(), &self.map_op) }
fn to_reducer(&self) -> Self::Reducer { self.base.to_reducer() }
}
struct MapFolder<'f, C, F> {
base: C,
map_op: &'f F,
}
impl<'f, T, R, C, F> Folder<T> for MapFolder<'f, C, F>
where
C: Folder<F::Output>,
F: Fn(T) -> R,
{
type Result = C::Result;
fn consume(self, item: T) -> Self {
let mapped_item = (self.map_op)(item);
MapFolder {
base: self.base.consume(mapped_item),
map_op: self.map_op,
}
}
fn consume_iter<I>(mut self, iter: I) -> Self
where
I: IntoIterator<Item = T>,
{
self.base = self.base.consume_iter(iter.into_iter().map(self.map_op));
self
}
fn complete(self) -> C::Result { self.base.complete() }
fn full(&self) -> bool { self.base.full() }
}
| 25.18254 | 99 | 0.533722 |
56164b0b0c9931d5bc44ff881ca9a99d9044c794 | 96,167 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::OUT {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = "Possible values of the field `PIN0`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN0R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN0R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN0R::LOW => false,
PIN0R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN0R {
match value {
false => PIN0R::LOW,
true => PIN0R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN0R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN0R::HIGH
}
}
#[doc = "Possible values of the field `PIN1`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN1R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN1R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN1R::LOW => false,
PIN1R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN1R {
match value {
false => PIN1R::LOW,
true => PIN1R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN1R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN1R::HIGH
}
}
#[doc = "Possible values of the field `PIN2`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN2R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN2R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN2R::LOW => false,
PIN2R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN2R {
match value {
false => PIN2R::LOW,
true => PIN2R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN2R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN2R::HIGH
}
}
#[doc = "Possible values of the field `PIN3`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN3R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN3R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN3R::LOW => false,
PIN3R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN3R {
match value {
false => PIN3R::LOW,
true => PIN3R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN3R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN3R::HIGH
}
}
#[doc = "Possible values of the field `PIN4`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN4R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN4R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN4R::LOW => false,
PIN4R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN4R {
match value {
false => PIN4R::LOW,
true => PIN4R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN4R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN4R::HIGH
}
}
#[doc = "Possible values of the field `PIN5`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN5R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN5R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN5R::LOW => false,
PIN5R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN5R {
match value {
false => PIN5R::LOW,
true => PIN5R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN5R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN5R::HIGH
}
}
#[doc = "Possible values of the field `PIN6`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN6R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN6R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN6R::LOW => false,
PIN6R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN6R {
match value {
false => PIN6R::LOW,
true => PIN6R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN6R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN6R::HIGH
}
}
#[doc = "Possible values of the field `PIN7`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN7R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN7R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN7R::LOW => false,
PIN7R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN7R {
match value {
false => PIN7R::LOW,
true => PIN7R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN7R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN7R::HIGH
}
}
#[doc = "Possible values of the field `PIN8`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN8R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN8R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN8R::LOW => false,
PIN8R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN8R {
match value {
false => PIN8R::LOW,
true => PIN8R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN8R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN8R::HIGH
}
}
#[doc = "Possible values of the field `PIN9`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN9R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN9R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN9R::LOW => false,
PIN9R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN9R {
match value {
false => PIN9R::LOW,
true => PIN9R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN9R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN9R::HIGH
}
}
#[doc = "Possible values of the field `PIN10`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN10R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN10R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN10R::LOW => false,
PIN10R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN10R {
match value {
false => PIN10R::LOW,
true => PIN10R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN10R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN10R::HIGH
}
}
#[doc = "Possible values of the field `PIN11`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN11R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN11R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN11R::LOW => false,
PIN11R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN11R {
match value {
false => PIN11R::LOW,
true => PIN11R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN11R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN11R::HIGH
}
}
#[doc = "Possible values of the field `PIN12`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN12R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN12R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN12R::LOW => false,
PIN12R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN12R {
match value {
false => PIN12R::LOW,
true => PIN12R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN12R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN12R::HIGH
}
}
#[doc = "Possible values of the field `PIN13`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN13R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN13R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN13R::LOW => false,
PIN13R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN13R {
match value {
false => PIN13R::LOW,
true => PIN13R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN13R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN13R::HIGH
}
}
#[doc = "Possible values of the field `PIN14`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN14R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN14R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN14R::LOW => false,
PIN14R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN14R {
match value {
false => PIN14R::LOW,
true => PIN14R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN14R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN14R::HIGH
}
}
#[doc = "Possible values of the field `PIN15`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN15R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN15R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN15R::LOW => false,
PIN15R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN15R {
match value {
false => PIN15R::LOW,
true => PIN15R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN15R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN15R::HIGH
}
}
#[doc = "Possible values of the field `PIN16`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN16R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN16R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN16R::LOW => false,
PIN16R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN16R {
match value {
false => PIN16R::LOW,
true => PIN16R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN16R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN16R::HIGH
}
}
#[doc = "Possible values of the field `PIN17`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN17R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN17R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN17R::LOW => false,
PIN17R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN17R {
match value {
false => PIN17R::LOW,
true => PIN17R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN17R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN17R::HIGH
}
}
#[doc = "Possible values of the field `PIN18`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN18R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN18R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN18R::LOW => false,
PIN18R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN18R {
match value {
false => PIN18R::LOW,
true => PIN18R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN18R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN18R::HIGH
}
}
#[doc = "Possible values of the field `PIN19`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN19R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN19R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN19R::LOW => false,
PIN19R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN19R {
match value {
false => PIN19R::LOW,
true => PIN19R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN19R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN19R::HIGH
}
}
#[doc = "Possible values of the field `PIN20`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN20R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN20R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN20R::LOW => false,
PIN20R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN20R {
match value {
false => PIN20R::LOW,
true => PIN20R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN20R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN20R::HIGH
}
}
#[doc = "Possible values of the field `PIN21`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN21R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN21R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN21R::LOW => false,
PIN21R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN21R {
match value {
false => PIN21R::LOW,
true => PIN21R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN21R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN21R::HIGH
}
}
#[doc = "Possible values of the field `PIN22`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN22R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN22R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN22R::LOW => false,
PIN22R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN22R {
match value {
false => PIN22R::LOW,
true => PIN22R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN22R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN22R::HIGH
}
}
#[doc = "Possible values of the field `PIN23`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN23R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN23R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN23R::LOW => false,
PIN23R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN23R {
match value {
false => PIN23R::LOW,
true => PIN23R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN23R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN23R::HIGH
}
}
#[doc = "Possible values of the field `PIN24`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN24R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN24R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN24R::LOW => false,
PIN24R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN24R {
match value {
false => PIN24R::LOW,
true => PIN24R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN24R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN24R::HIGH
}
}
#[doc = "Possible values of the field `PIN25`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN25R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN25R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN25R::LOW => false,
PIN25R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN25R {
match value {
false => PIN25R::LOW,
true => PIN25R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN25R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN25R::HIGH
}
}
#[doc = "Possible values of the field `PIN26`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN26R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN26R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN26R::LOW => false,
PIN26R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN26R {
match value {
false => PIN26R::LOW,
true => PIN26R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN26R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN26R::HIGH
}
}
#[doc = "Possible values of the field `PIN27`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN27R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN27R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN27R::LOW => false,
PIN27R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN27R {
match value {
false => PIN27R::LOW,
true => PIN27R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN27R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN27R::HIGH
}
}
#[doc = "Possible values of the field `PIN28`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN28R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN28R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN28R::LOW => false,
PIN28R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN28R {
match value {
false => PIN28R::LOW,
true => PIN28R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN28R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN28R::HIGH
}
}
#[doc = "Possible values of the field `PIN29`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN29R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN29R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN29R::LOW => false,
PIN29R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN29R {
match value {
false => PIN29R::LOW,
true => PIN29R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN29R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN29R::HIGH
}
}
#[doc = "Possible values of the field `PIN30`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN30R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN30R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN30R::LOW => false,
PIN30R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN30R {
match value {
false => PIN30R::LOW,
true => PIN30R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN30R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN30R::HIGH
}
}
#[doc = "Possible values of the field `PIN31`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PIN31R {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN31R {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PIN31R::LOW => false,
PIN31R::HIGH => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PIN31R {
match value {
false => PIN31R::LOW,
true => PIN31R::HIGH,
}
}
#[doc = "Checks if the value of the field is `LOW`"]
#[inline]
pub fn is_low(&self) -> bool {
*self == PIN31R::LOW
}
#[doc = "Checks if the value of the field is `HIGH`"]
#[inline]
pub fn is_high(&self) -> bool {
*self == PIN31R::HIGH
}
}
#[doc = "Values that can be written to the field `PIN0`"]
pub enum PIN0W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN0W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN0W::LOW => false,
PIN0W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN0W<'a> {
w: &'a mut W,
}
impl<'a> _PIN0W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN0W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN0W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN0W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN1`"]
pub enum PIN1W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN1W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN1W::LOW => false,
PIN1W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN1W<'a> {
w: &'a mut W,
}
impl<'a> _PIN1W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN1W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN1W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN1W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN2`"]
pub enum PIN2W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN2W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN2W::LOW => false,
PIN2W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN2W<'a> {
w: &'a mut W,
}
impl<'a> _PIN2W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN2W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN2W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN2W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN3`"]
pub enum PIN3W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN3W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN3W::LOW => false,
PIN3W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN3W<'a> {
w: &'a mut W,
}
impl<'a> _PIN3W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN3W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN3W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN3W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN4`"]
pub enum PIN4W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN4W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN4W::LOW => false,
PIN4W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN4W<'a> {
w: &'a mut W,
}
impl<'a> _PIN4W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN4W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN4W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN4W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN5`"]
pub enum PIN5W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN5W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN5W::LOW => false,
PIN5W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN5W<'a> {
w: &'a mut W,
}
impl<'a> _PIN5W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN5W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN5W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN5W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN6`"]
pub enum PIN6W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN6W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN6W::LOW => false,
PIN6W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN6W<'a> {
w: &'a mut W,
}
impl<'a> _PIN6W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN6W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN6W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN6W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN7`"]
pub enum PIN7W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN7W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN7W::LOW => false,
PIN7W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN7W<'a> {
w: &'a mut W,
}
impl<'a> _PIN7W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN7W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN7W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN7W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN8`"]
pub enum PIN8W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN8W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN8W::LOW => false,
PIN8W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN8W<'a> {
w: &'a mut W,
}
impl<'a> _PIN8W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN8W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN8W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN8W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN9`"]
pub enum PIN9W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN9W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN9W::LOW => false,
PIN9W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN9W<'a> {
w: &'a mut W,
}
impl<'a> _PIN9W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN9W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN9W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN9W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN10`"]
pub enum PIN10W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN10W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN10W::LOW => false,
PIN10W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN10W<'a> {
w: &'a mut W,
}
impl<'a> _PIN10W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN10W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN10W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN10W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN11`"]
pub enum PIN11W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN11W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN11W::LOW => false,
PIN11W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN11W<'a> {
w: &'a mut W,
}
impl<'a> _PIN11W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN11W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN11W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN11W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN12`"]
pub enum PIN12W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN12W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN12W::LOW => false,
PIN12W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN12W<'a> {
w: &'a mut W,
}
impl<'a> _PIN12W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN12W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN12W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN12W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN13`"]
pub enum PIN13W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN13W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN13W::LOW => false,
PIN13W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN13W<'a> {
w: &'a mut W,
}
impl<'a> _PIN13W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN13W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN13W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN13W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN14`"]
pub enum PIN14W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN14W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN14W::LOW => false,
PIN14W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN14W<'a> {
w: &'a mut W,
}
impl<'a> _PIN14W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN14W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN14W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN14W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN15`"]
pub enum PIN15W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN15W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN15W::LOW => false,
PIN15W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN15W<'a> {
w: &'a mut W,
}
impl<'a> _PIN15W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN15W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN15W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN15W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN16`"]
pub enum PIN16W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN16W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN16W::LOW => false,
PIN16W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN16W<'a> {
w: &'a mut W,
}
impl<'a> _PIN16W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN16W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN16W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN16W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN17`"]
pub enum PIN17W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN17W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN17W::LOW => false,
PIN17W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN17W<'a> {
w: &'a mut W,
}
impl<'a> _PIN17W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN17W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN17W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN17W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN18`"]
pub enum PIN18W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN18W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN18W::LOW => false,
PIN18W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN18W<'a> {
w: &'a mut W,
}
impl<'a> _PIN18W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN18W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN18W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN18W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN19`"]
pub enum PIN19W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN19W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN19W::LOW => false,
PIN19W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN19W<'a> {
w: &'a mut W,
}
impl<'a> _PIN19W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN19W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN19W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN19W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN20`"]
pub enum PIN20W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN20W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN20W::LOW => false,
PIN20W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN20W<'a> {
w: &'a mut W,
}
impl<'a> _PIN20W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN20W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN20W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN20W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN21`"]
pub enum PIN21W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN21W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN21W::LOW => false,
PIN21W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN21W<'a> {
w: &'a mut W,
}
impl<'a> _PIN21W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN21W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN21W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN21W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN22`"]
pub enum PIN22W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN22W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN22W::LOW => false,
PIN22W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN22W<'a> {
w: &'a mut W,
}
impl<'a> _PIN22W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN22W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN22W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN22W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN23`"]
pub enum PIN23W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN23W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN23W::LOW => false,
PIN23W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN23W<'a> {
w: &'a mut W,
}
impl<'a> _PIN23W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN23W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN23W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN23W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 23;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN24`"]
pub enum PIN24W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN24W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN24W::LOW => false,
PIN24W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN24W<'a> {
w: &'a mut W,
}
impl<'a> _PIN24W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN24W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN24W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN24W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN25`"]
pub enum PIN25W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN25W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN25W::LOW => false,
PIN25W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN25W<'a> {
w: &'a mut W,
}
impl<'a> _PIN25W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN25W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN25W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN25W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 25;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN26`"]
pub enum PIN26W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN26W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN26W::LOW => false,
PIN26W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN26W<'a> {
w: &'a mut W,
}
impl<'a> _PIN26W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN26W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN26W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN26W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN27`"]
pub enum PIN27W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN27W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN27W::LOW => false,
PIN27W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN27W<'a> {
w: &'a mut W,
}
impl<'a> _PIN27W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN27W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN27W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN27W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN28`"]
pub enum PIN28W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN28W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN28W::LOW => false,
PIN28W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN28W<'a> {
w: &'a mut W,
}
impl<'a> _PIN28W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN28W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN28W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN28W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN29`"]
pub enum PIN29W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN29W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN29W::LOW => false,
PIN29W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN29W<'a> {
w: &'a mut W,
}
impl<'a> _PIN29W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN29W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN29W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN29W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN30`"]
pub enum PIN30W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN30W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN30W::LOW => false,
PIN30W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN30W<'a> {
w: &'a mut W,
}
impl<'a> _PIN30W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN30W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN30W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN30W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PIN31`"]
pub enum PIN31W {
#[doc = "Pin driver is low."]
LOW,
#[doc = "Pin driver is high."]
HIGH,
}
impl PIN31W {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PIN31W::LOW => false,
PIN31W::HIGH => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PIN31W<'a> {
w: &'a mut W,
}
impl<'a> _PIN31W<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PIN31W) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pin driver is low."]
#[inline]
pub fn low(self) -> &'a mut W {
self.variant(PIN31W::LOW)
}
#[doc = "Pin driver is high."]
#[inline]
pub fn high(self) -> &'a mut W {
self.variant(PIN31W::HIGH)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 31;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Pin 0."]
#[inline]
pub fn pin0(&self) -> PIN0R {
PIN0R::_from({
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 1 - Pin 1."]
#[inline]
pub fn pin1(&self) -> PIN1R {
PIN1R::_from({
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 2 - Pin 2."]
#[inline]
pub fn pin2(&self) -> PIN2R {
PIN2R::_from({
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 3 - Pin 3."]
#[inline]
pub fn pin3(&self) -> PIN3R {
PIN3R::_from({
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 4 - Pin 4."]
#[inline]
pub fn pin4(&self) -> PIN4R {
PIN4R::_from({
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 5 - Pin 5."]
#[inline]
pub fn pin5(&self) -> PIN5R {
PIN5R::_from({
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 6 - Pin 6."]
#[inline]
pub fn pin6(&self) -> PIN6R {
PIN6R::_from({
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 7 - Pin 7."]
#[inline]
pub fn pin7(&self) -> PIN7R {
PIN7R::_from({
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 8 - Pin 8."]
#[inline]
pub fn pin8(&self) -> PIN8R {
PIN8R::_from({
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 9 - Pin 9."]
#[inline]
pub fn pin9(&self) -> PIN9R {
PIN9R::_from({
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 10 - Pin 10."]
#[inline]
pub fn pin10(&self) -> PIN10R {
PIN10R::_from({
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 11 - Pin 11."]
#[inline]
pub fn pin11(&self) -> PIN11R {
PIN11R::_from({
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 12 - Pin 12."]
#[inline]
pub fn pin12(&self) -> PIN12R {
PIN12R::_from({
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 13 - Pin 13."]
#[inline]
pub fn pin13(&self) -> PIN13R {
PIN13R::_from({
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 14 - Pin 14."]
#[inline]
pub fn pin14(&self) -> PIN14R {
PIN14R::_from({
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 15 - Pin 15."]
#[inline]
pub fn pin15(&self) -> PIN15R {
PIN15R::_from({
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 16 - Pin 16."]
#[inline]
pub fn pin16(&self) -> PIN16R {
PIN16R::_from({
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 17 - Pin 17."]
#[inline]
pub fn pin17(&self) -> PIN17R {
PIN17R::_from({
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 18 - Pin 18."]
#[inline]
pub fn pin18(&self) -> PIN18R {
PIN18R::_from({
const MASK: bool = true;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 19 - Pin 19."]
#[inline]
pub fn pin19(&self) -> PIN19R {
PIN19R::_from({
const MASK: bool = true;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 20 - Pin 20."]
#[inline]
pub fn pin20(&self) -> PIN20R {
PIN20R::_from({
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 21 - Pin 21."]
#[inline]
pub fn pin21(&self) -> PIN21R {
PIN21R::_from({
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 22 - Pin 22."]
#[inline]
pub fn pin22(&self) -> PIN22R {
PIN22R::_from({
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 23 - Pin 23."]
#[inline]
pub fn pin23(&self) -> PIN23R {
PIN23R::_from({
const MASK: bool = true;
const OFFSET: u8 = 23;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 24 - Pin 24."]
#[inline]
pub fn pin24(&self) -> PIN24R {
PIN24R::_from({
const MASK: bool = true;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 25 - Pin 25."]
#[inline]
pub fn pin25(&self) -> PIN25R {
PIN25R::_from({
const MASK: bool = true;
const OFFSET: u8 = 25;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 26 - Pin 26."]
#[inline]
pub fn pin26(&self) -> PIN26R {
PIN26R::_from({
const MASK: bool = true;
const OFFSET: u8 = 26;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 27 - Pin 27."]
#[inline]
pub fn pin27(&self) -> PIN27R {
PIN27R::_from({
const MASK: bool = true;
const OFFSET: u8 = 27;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 28 - Pin 28."]
#[inline]
pub fn pin28(&self) -> PIN28R {
PIN28R::_from({
const MASK: bool = true;
const OFFSET: u8 = 28;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 29 - Pin 29."]
#[inline]
pub fn pin29(&self) -> PIN29R {
PIN29R::_from({
const MASK: bool = true;
const OFFSET: u8 = 29;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 30 - Pin 30."]
#[inline]
pub fn pin30(&self) -> PIN30R {
PIN30R::_from({
const MASK: bool = true;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bit 31 - Pin 31."]
#[inline]
pub fn pin31(&self) -> PIN31R {
PIN31R::_from({
const MASK: bool = true;
const OFFSET: u8 = 31;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Pin 0."]
#[inline]
pub fn pin0(&mut self) -> _PIN0W {
_PIN0W { w: self }
}
#[doc = "Bit 1 - Pin 1."]
#[inline]
pub fn pin1(&mut self) -> _PIN1W {
_PIN1W { w: self }
}
#[doc = "Bit 2 - Pin 2."]
#[inline]
pub fn pin2(&mut self) -> _PIN2W {
_PIN2W { w: self }
}
#[doc = "Bit 3 - Pin 3."]
#[inline]
pub fn pin3(&mut self) -> _PIN3W {
_PIN3W { w: self }
}
#[doc = "Bit 4 - Pin 4."]
#[inline]
pub fn pin4(&mut self) -> _PIN4W {
_PIN4W { w: self }
}
#[doc = "Bit 5 - Pin 5."]
#[inline]
pub fn pin5(&mut self) -> _PIN5W {
_PIN5W { w: self }
}
#[doc = "Bit 6 - Pin 6."]
#[inline]
pub fn pin6(&mut self) -> _PIN6W {
_PIN6W { w: self }
}
#[doc = "Bit 7 - Pin 7."]
#[inline]
pub fn pin7(&mut self) -> _PIN7W {
_PIN7W { w: self }
}
#[doc = "Bit 8 - Pin 8."]
#[inline]
pub fn pin8(&mut self) -> _PIN8W {
_PIN8W { w: self }
}
#[doc = "Bit 9 - Pin 9."]
#[inline]
pub fn pin9(&mut self) -> _PIN9W {
_PIN9W { w: self }
}
#[doc = "Bit 10 - Pin 10."]
#[inline]
pub fn pin10(&mut self) -> _PIN10W {
_PIN10W { w: self }
}
#[doc = "Bit 11 - Pin 11."]
#[inline]
pub fn pin11(&mut self) -> _PIN11W {
_PIN11W { w: self }
}
#[doc = "Bit 12 - Pin 12."]
#[inline]
pub fn pin12(&mut self) -> _PIN12W {
_PIN12W { w: self }
}
#[doc = "Bit 13 - Pin 13."]
#[inline]
pub fn pin13(&mut self) -> _PIN13W {
_PIN13W { w: self }
}
#[doc = "Bit 14 - Pin 14."]
#[inline]
pub fn pin14(&mut self) -> _PIN14W {
_PIN14W { w: self }
}
#[doc = "Bit 15 - Pin 15."]
#[inline]
pub fn pin15(&mut self) -> _PIN15W {
_PIN15W { w: self }
}
#[doc = "Bit 16 - Pin 16."]
#[inline]
pub fn pin16(&mut self) -> _PIN16W {
_PIN16W { w: self }
}
#[doc = "Bit 17 - Pin 17."]
#[inline]
pub fn pin17(&mut self) -> _PIN17W {
_PIN17W { w: self }
}
#[doc = "Bit 18 - Pin 18."]
#[inline]
pub fn pin18(&mut self) -> _PIN18W {
_PIN18W { w: self }
}
#[doc = "Bit 19 - Pin 19."]
#[inline]
pub fn pin19(&mut self) -> _PIN19W {
_PIN19W { w: self }
}
#[doc = "Bit 20 - Pin 20."]
#[inline]
pub fn pin20(&mut self) -> _PIN20W {
_PIN20W { w: self }
}
#[doc = "Bit 21 - Pin 21."]
#[inline]
pub fn pin21(&mut self) -> _PIN21W {
_PIN21W { w: self }
}
#[doc = "Bit 22 - Pin 22."]
#[inline]
pub fn pin22(&mut self) -> _PIN22W {
_PIN22W { w: self }
}
#[doc = "Bit 23 - Pin 23."]
#[inline]
pub fn pin23(&mut self) -> _PIN23W {
_PIN23W { w: self }
}
#[doc = "Bit 24 - Pin 24."]
#[inline]
pub fn pin24(&mut self) -> _PIN24W {
_PIN24W { w: self }
}
#[doc = "Bit 25 - Pin 25."]
#[inline]
pub fn pin25(&mut self) -> _PIN25W {
_PIN25W { w: self }
}
#[doc = "Bit 26 - Pin 26."]
#[inline]
pub fn pin26(&mut self) -> _PIN26W {
_PIN26W { w: self }
}
#[doc = "Bit 27 - Pin 27."]
#[inline]
pub fn pin27(&mut self) -> _PIN27W {
_PIN27W { w: self }
}
#[doc = "Bit 28 - Pin 28."]
#[inline]
pub fn pin28(&mut self) -> _PIN28W {
_PIN28W { w: self }
}
#[doc = "Bit 29 - Pin 29."]
#[inline]
pub fn pin29(&mut self) -> _PIN29W {
_PIN29W { w: self }
}
#[doc = "Bit 30 - Pin 30."]
#[inline]
pub fn pin30(&mut self) -> _PIN30W {
_PIN30W { w: self }
}
#[doc = "Bit 31 - Pin 31."]
#[inline]
pub fn pin31(&mut self) -> _PIN31W {
_PIN31W { w: self }
}
}
| 24.830106 | 59 | 0.487444 |
1af482b688696b4ef25545b25029d728d3188677 | 32,155 | use makepad_render::*;
use makepad_widget::*;
#[derive(Clone)]
pub struct FileTreeItemDraw {
pub filler: Quad,
pub tree_text: Text,
pub node_bg: Quad,
pub shadow: ScrollShadow,
pub node_layout: Layout,
pub row_height: f32,
pub filler_walk: Walk,
pub folder_walk: Walk,
pub color_tree_folder: Color,
pub color_tree_file: Color
}
#[derive(Clone)]
pub struct FileTree {
pub view: ScrollView,
pub drag_view: View,
pub _drag_move: Option<FingerMoveEvent>,
pub root_node: FileNode,
pub item_draw: FileTreeItemDraw,
pub drag_bg: Quad,
pub _shadow_area: Area,
}
#[derive(Clone, PartialEq)]
pub enum FileTreeEvent {
None,
DragMove {fe: FingerMoveEvent, paths: Vec<String>},
DragCancel,
DragEnd {fe: FingerUpEvent, paths: Vec<String>},
DragOut,
SelectFile {path: String},
SelectFolder {path: String}
}
#[derive(Clone)]
pub enum NodeState {
Open,
Opening(f64),
Closing(f64),
Closed
}
#[derive(Clone)]
pub struct NodeDraw {
animator: Animator,
marked: u64
}
#[derive(Clone)]
pub enum FileNode {
File {name: String, draw: Option<NodeDraw>},
Folder {name: String, draw: Option<NodeDraw>, state: NodeState, folder: Vec<FileNode>}
}
impl FileNode {
fn get_draw<'a>(&'a mut self) -> &'a mut Option<NodeDraw> {
match self {
FileNode::File {draw, ..} => draw,
FileNode::Folder {draw, ..} => draw
}
}
fn is_open(&self) -> bool {
match self {
FileNode::File {..} => false,
FileNode::Folder {state, ..} => match state {
NodeState::Opening(..) => true,
NodeState::Open => true,
_ => false
}
}
}
fn name(&self) -> String {
match self {
FileNode::File {name, ..} => name.clone(),
FileNode::Folder {name, ..} => name.clone()
}
}
}
struct StackEntry<'a> {
counter: usize,
index: usize,
len: usize,
closing: bool,
node: &'a mut FileNode
}
pub struct FileWalker<'a>
{
stack: Vec<StackEntry<'a>>,
}
// this flattens out recursion into an iterator. unfortunately needs unsafe. come on. thats not nice
impl<'a> FileWalker<'a> {
pub fn new(root_node: &'a mut FileNode) -> FileWalker<'a> {
return FileWalker {stack: vec![StackEntry {counter: 1, closing: false, index: 0, len: 0, node: root_node}]};
}
pub fn current_path(&self) -> String {
// the current stack top returned as path
let mut path = String::new();
for i in 0..self.stack.len() {
if i > 1 {
path.push_str("/");
}
path.push_str(
&self.stack[i].node.name()
);
};
path
}
pub fn current_closing(&self) -> bool {
if let Some(stack_top) = self.stack.last() {
stack_top.closing
}
else {
false
}
}
pub fn walk(&mut self) -> Option<(usize, usize, usize, &mut FileNode)> {
// lets get the current item on the stack
let stack_len = self.stack.len();
let push_or_pop = if let Some(stack_top) = self.stack.last_mut() {
// return item 'count'
match stack_top.node {
FileNode::File {..} => {
stack_top.counter += 1;
if stack_top.counter == 1 {
return Some((stack_len - 1, stack_top.index, stack_top.len, unsafe {std::mem::transmute(&mut *stack_top.node)}));
}
None // pop stack
},
FileNode::Folder {folder, state, ..} => {
stack_top.counter += 1;
if stack_top.counter == 1 { // return self
return Some((stack_len - 1, stack_top.index, stack_top.len, unsafe {std::mem::transmute(&mut *stack_top.node)}));
}
else {
let child_index = stack_top.counter - 2;
let opened = if let NodeState::Closed = state {false} else {true};
let closing = if let NodeState::Closing(_) = state {true} else {stack_top.closing};
if opened && child_index < folder.len() { // child on stack
Some(StackEntry {counter: 0, closing: closing, index: child_index, len: folder.len(), node: unsafe {std::mem::transmute(&mut folder[child_index])}})
}
else {
None // pop stack
}
}
}
}
}
else {
None
};
if let Some(item) = push_or_pop {
self.stack.push(item);
return self.walk();
}
else if self.stack.len() > 0 {
self.stack.pop();
return self.walk();
}
return None;
}
}
impl FileTreeItemDraw {
fn proto(cx: &mut Cx) -> Self {
Self {
tree_text: Text {z: 0.001, ..Text::proto(cx)},
node_bg: Quad::proto(cx),
//node_layout: LayoutFileTreeNode::id(),
filler: Quad {
z: 0.001,
..Quad::proto(cx)
},
shadow: ScrollShadow {
z: 0.01,
..ScrollShadow::proto(cx)
},
node_layout: Layout::default(),
row_height: 0.,
filler_walk: Walk::default(),
folder_walk: Walk::default(),
color_tree_folder: Color::default(),
color_tree_file: Color::default()
}
}
pub fn shadow_size() -> FloatId {uid!()}
pub fn layout_drag_bg() -> LayoutId {uid!()}
pub fn layout_node() -> LayoutId {uid!()}
pub fn text_style_label() -> TextStyleId {uid!()}
pub fn color_tree_folder() -> ColorId {uid!()}
pub fn color_tree_file() -> ColorId {uid!()}
pub fn color_filler() -> ColorId {uid!()}
pub fn walk_filler() -> WalkId {uid!()}
pub fn walk_folder() -> WalkId {uid!()}
pub fn instance_line_vec() -> InstanceVec2 {uid!()}
pub fn instance_anim_pos() -> InstanceFloat {uid!()}
pub fn shader_filler() -> ShaderId {uid!()}
pub fn anim_default() -> AnimId {uid!()}
pub fn anim_over() -> AnimId {uid!()}
pub fn style(cx: &mut Cx, opt: &StyleOptions) {
Self::shadow_size().set(cx, 6.0);
Self::color_tree_folder().set(cx, Theme::color_text_selected_focus().get(cx));
Self::color_tree_file().set(cx, Theme::color_text_deselected_focus().get(cx));
Self::color_filler().set(cx, Theme::color_icon().get(cx));
Self::layout_drag_bg().set(cx, Layout {
padding: Padding {l: 5., t: 5., r: 5., b: 5.},
walk: Walk::wh(Width::Compute, Height::Compute),
..Default::default()
});
Self::layout_node().set(cx, Layout {
walk: Walk::wh(Width::Fill, Height::Fix(20. * opt.scale)),
align: Align::left_center(),
padding: Padding {l: 5., t: 0., r: 0., b: 1.},
..Default::default()
});
Self::text_style_label().set(cx, TextStyle {
top_drop: 1.3,
..Theme::text_style_normal().get(cx)
});
Self::walk_filler().set(cx, Walk {
width: Width::Fix(10. * opt.scale),
height: Height::Fill,
margin: Margin {l: 1., t: 0., r: 4., b: 0.}
});
Self::walk_folder().set(cx, Walk {
width: Width::Fix(14. * opt.scale),
height: Height::Fill,
margin: Margin {l: 0., t: 0., r: 2., b: 0.}
});
Self::shader_filler().set(cx, Quad::def_quad_shader().compose(shader_ast!({
let line_vec: Self::instance_line_vec();
let anim_pos: Self::instance_anim_pos();
fn pixel() -> vec4 {
df_viewport(pos * vec2(w, h));
if anim_pos<-0.5 {
df_move_to(0.5 * w, line_vec.x * h);
df_line_to(0.5 * w, line_vec.y * h);
return df_stroke(color, 1.);
}
else { // its a folder
df_box(0. * w, 0.35 * h, 0.87 * w, 0.39 * h, 0.75);
df_box(0. * w, 0.28 * h, 0.5 * w, 0.3 * h, 1.);
df_union();
// ok so.
return df_fill(color);
}
}
})));
}
pub fn apply_style(&mut self, cx: &mut Cx) {
self.filler.color = Self::color_filler().get(cx);
self.node_layout = Self::layout_node().get(cx);
self.row_height = self.node_layout.walk.height.fixed();
self.filler_walk = Self::walk_filler().get(cx);
self.folder_walk = Self::walk_folder().get(cx);
self.color_tree_folder = Self::color_tree_folder().get(cx);
self.color_tree_file = Self::color_tree_file().get(cx);
self.tree_text.text_style = Self::text_style_label().get(cx);
self.filler.shader = Self::shader_filler().get(cx);
}
pub fn get_default_anim(cx: &Cx, counter: usize, marked: bool) -> Anim {
Anim::new(Play::Chain {duration: 0.01}, vec![
Track::color(Quad::instance_color(), Ease::Lin, vec![
(1.0, if marked {
Theme::color_bg_marked().get(cx)
} else if counter & 1 == 0 {
Theme::color_bg_selected().get(cx)
} else {
Theme::color_bg_odd().get(cx)
})
])
])
}
pub fn get_over_anim(cx: &Cx, counter: usize, marked: bool) -> Anim {
let over_color = if marked {
Theme::color_bg_marked_over().get(cx)
} else if counter & 1 == 0 {
Theme::color_bg_selected_over().get(cx)
} else {
Theme::color_bg_odd_over().get(cx)
};
Anim::new(Play::Cut {duration: 0.02}, vec![
Track::color(Quad::instance_color(), Ease::Lin, vec![
(0., over_color),
(1., over_color)
])
])
}
}
impl FileTree {
pub fn proto(cx: &mut Cx) -> Self {
Self {
//row_height: 20.,
//font_size: 8.0,
item_draw: FileTreeItemDraw::proto(cx),
root_node: FileNode::Folder {name: "".to_string(), state: NodeState::Open, draw: None, folder: vec![
FileNode::File {name: "loading...".to_string(), draw: None},
]},
drag_bg: Quad {
shader: cx.add_shader(Self::def_drag_bg_shader(), "FileTree.drag_bg"),
..Quad::proto(cx)
},
view: ScrollView {
scroll_v: Some(ScrollBar {
smoothing: Some(0.25),
..ScrollBar::proto(cx)
}),
..ScrollView::proto(cx)
},
drag_view: View {
is_overlay: true,
..View::proto(cx)
},
_drag_move: None,
_shadow_area: Area::Empty
}
}
pub fn color_drag_bg() -> ColorId {uid!()}
pub fn style(cx: &mut Cx, opt: &StyleOptions) {
Self::color_drag_bg().set(cx, Theme::color_bg_marked().get(cx));
FileTreeItemDraw::style(cx, opt)
}
pub fn def_drag_bg_shader() -> ShaderGen {
Quad::def_quad_shader().compose(shader_ast!({
fn pixel() -> vec4 {
df_viewport(pos * vec2(w, h));
df_box(0., 0., w, h, 2.);
return df_fill(color);
}
}))
}
/*
pub fn load_from_ron(&mut self, cx: &mut Cx, ron_data: &str) {
#[derive(Deserialize, Debug)]
struct RonFolder {
name: String,
open: bool,
files: Vec<RonFile>,
folders: Vec<RonFolder>
}
#[derive(Deserialize, Debug)]
struct RonFile {
name: String
}
fn recur_walk(node: RonFolder) -> FileNode {
let mut out = Vec::new();
for folder in node.folders {
out.push(recur_walk(folder));
}
for file in node.files {
out.push(FileNode::File {
name: file.name,
draw: None
})
};
FileNode::Folder {
name: node.name,
state: if node.open {NodeState::Open} else {NodeState::Closed},
draw: None,
folder: out
}
}
if let Ok(value) = ron::de::from_str(ron_data) {
self.root_node = recur_walk(value);
}
self.view.redraw_view_area(cx);
}*/
pub fn clear_roots(&mut self, cx: &mut Cx, names: &Vec<String>) {
self.root_node = FileNode::Folder {
name: "".to_string(),
draw: None,
state: NodeState::Open,
folder: names.iter().map( | v | FileNode::Folder {
name: v.clone(),
draw: None,
state: NodeState::Open,
folder: Vec::new()
}).collect()
};
self.view.redraw_view_area(cx);
}
pub fn save_open_folders(&mut self) -> Vec<String> {
let mut paths = Vec::new();
fn recur_walk(node: &mut FileNode, base: &str, paths: &mut Vec<String>) {
if node.is_open() {
if let FileNode::Folder {folder, name, ..} = node {
let new_base = if name.len()>0 {
if base.len()>0 {format!("{}/{}", base, name)}else {name.to_string()}
}else {base.to_string()};
paths.push(new_base.clone());
for node in folder {
recur_walk(node, &new_base, paths)
}
}
}
}
recur_walk(&mut self.root_node, "", &mut paths);
paths
}
pub fn load_open_folders(&mut self, cx: &mut Cx, paths: &Vec<String>) {
fn recur_walk(node: &mut FileNode, base: &str, depth: usize, paths: &Vec<String>) {
match node {
FileNode::File {..} => (),
FileNode::Folder {name, folder, state, ..} => {
let new_base = if name.len()>0 {
if base.len()>0 {format!("{}/{}", base, name)}else {name.to_string()}
}else {base.to_string()};
if depth == 0 || paths.iter().position( | v | *v == new_base).is_some() {
*state = NodeState::Open;
for node in folder {
recur_walk(node, &new_base, depth + 1, paths);
}
}
else {
*state = NodeState::Closed;
}
}
}
}
recur_walk(&mut self.root_node, "", 0, paths);
self.view.redraw_view_area(cx);
}
pub fn get_marked_paths(root: &mut FileNode) -> Vec<String> {
let mut paths = Vec::new();
let mut file_walker = FileWalker::new(root);
// make a path set of all marked items
while let Some((_depth, _index, _len, node)) = file_walker.walk() {
let node_draw = if let Some(node_draw) = node.get_draw() {node_draw}else {continue};
if node_draw.marked != 0 {
paths.push(file_walker.current_path());
}
}
paths
}
pub fn handle_file_tree(&mut self, cx: &mut Cx, event: &mut Event) -> FileTreeEvent {
// alright. someone clicking on the tree items.
let mut file_walker = FileWalker::new(&mut self.root_node);
let mut counter = 0;
self.view.handle_scroll_bars(cx, event);
// todo, optimize this so events are not passed through 'all' of our tree elements
// but filtered out somewhat based on a bounding rect
let mut unmark_nodes = false;
let mut drag_nodes = false;
let mut drag_end: Option<FingerUpEvent> = None;
let mut select_node = 0;
while let Some((_depth, _index, _len, node)) = file_walker.walk() {
// alright we haz a node. so now what.
let is_filenode = if let FileNode::File {..} = node {true} else {false};
let node_draw = if let Some(node_draw) = node.get_draw() {node_draw}else {continue};
match event.hits(cx, node_draw.animator.area, HitOpt::default()) {
Event::Animate(ae) => {
node_draw.animator.calc_area(cx, node_draw.animator.area, ae.time);
},
Event::AnimEnded(_) => {
node_draw.animator.end();
},
Event::FingerDown(_fe) => {
// mark ourselves, unmark others
if is_filenode {
select_node = 1;
}
else {
select_node = 2;
}
node_draw.marked = cx.event_id;
unmark_nodes = true;
node_draw.animator.play_anim(cx, FileTreeItemDraw::get_over_anim(cx, counter, node_draw.marked != 0));
if let FileNode::Folder {state, ..} = node {
*state = match state {
NodeState::Opening(fac) => {
NodeState::Closing(1.0 - *fac)
},
NodeState::Closing(fac) => {
NodeState::Opening(1.0 - *fac)
},
NodeState::Open => {
NodeState::Closing(1.0)
},
NodeState::Closed => {
NodeState::Opening(1.0)
}
};
// start the redraw loop
self.view.redraw_view_area(cx);
}
},
Event::FingerUp(fe) => {
if !self._drag_move.is_none() {
drag_end = Some(fe);
// we now have to do the drop....
self.drag_view.redraw_view_area(cx);
//self._drag_move = None;
}
},
Event::FingerMove(fe) => {
cx.set_down_mouse_cursor(MouseCursor::Hand);
if self._drag_move.is_none() {
if fe.move_distance() > 10. {
self._drag_move = Some(fe);
self.view.redraw_view_area(cx);
self.drag_view.redraw_view_area(cx);
}
}
else {
self._drag_move = Some(fe);
self.view.redraw_view_area(cx);
self.drag_view.redraw_view_area(cx);
}
drag_nodes = true;
},
Event::FingerHover(fe) => {
cx.set_hover_mouse_cursor(MouseCursor::Hand);
match fe.hover_state {
HoverState::In => {
node_draw.animator.play_anim(cx, FileTreeItemDraw::get_over_anim(cx, counter, node_draw.marked != 0));
},
HoverState::Out => {
node_draw.animator.play_anim(cx, FileTreeItemDraw::get_default_anim(cx, counter, node_draw.marked != 0));
},
_ => ()
}
},
_ => ()
}
counter += 1;
}
//unmark non selected nodes and also set even/odd animations to make sure its rendered properly
if unmark_nodes {
let mut file_walker = FileWalker::new(&mut self.root_node);
let mut counter = 0;
while let Some((_depth, _index, _len, node)) = file_walker.walk() {
if let Some(node_draw) = node.get_draw() {
if node_draw.marked != cx.event_id || node_draw.marked == 0 {
node_draw.marked = 0;
node_draw.animator.play_anim(cx, FileTreeItemDraw::get_default_anim(cx, counter, false));
}
}
if !file_walker.current_closing() {
counter += 1;
}
}
}
if let Some(fe) = drag_end {
self._drag_move = None;
let paths = Self::get_marked_paths(&mut self.root_node);
if !self.view.get_view_area(cx).get_rect(cx).contains(fe.abs.x, fe.abs.y) {
return FileTreeEvent::DragEnd {
fe: fe.clone(),
paths: paths
};
}
}
if drag_nodes {
if let Some(fe) = &self._drag_move {
// lets check if we are over our own filetree
// ifso, we need to support moving files with directories
let paths = Self::get_marked_paths(&mut self.root_node);
if !self.view.get_view_area(cx).get_rect(cx).contains(fe.abs.x, fe.abs.y) {
return FileTreeEvent::DragMove {
fe: fe.clone(),
paths: paths
};
}
else {
return FileTreeEvent::DragCancel;
}
}
};
if select_node != 0 {
let mut file_walker = FileWalker::new(&mut self.root_node);
while let Some((_depth, _index, _len, node)) = file_walker.walk() {
let node_draw = if let Some(node_draw) = node.get_draw() {node_draw}else {continue};
if node_draw.marked != 0 {
if select_node == 1 {
return FileTreeEvent::SelectFile {
path: file_walker.current_path()
};
}
else {
return FileTreeEvent::SelectFolder {
path: file_walker.current_path()
};
}
}
}
}
FileTreeEvent::None
}
pub fn draw_file_tree(&mut self, cx: &mut Cx) {
if self.view.begin_view(cx, Layout::default()).is_err() {return}
let mut file_walker = FileWalker::new(&mut self.root_node);
// lets draw the filetree
let mut counter = 0;
let mut scale_stack = Vec::new();
let mut last_stack = Vec::new();
scale_stack.push(1.0f64);
self.item_draw.apply_style(cx);
while let Some((depth, index, len, node)) = file_walker.walk() {
let is_first = index == 0;
let is_last = index == len - 1;
while depth < scale_stack.len() {
scale_stack.pop();
last_stack.pop();
}
let scale = scale_stack[depth - 1];
// lets store the bg area in the tree
let node_draw = node.get_draw();
if node_draw.is_none() {
*node_draw = Some(NodeDraw {
animator: Animator::default(),
marked: 0
})
}
let node_draw = node_draw.as_mut().unwrap();
node_draw.animator.init(cx, | cx | FileTreeItemDraw::get_default_anim(cx, counter, false));
// if we are NOT animating, we need to get change a default color.
self.item_draw.node_bg.color = node_draw.animator.last_color(cx, Quad::instance_color());
let mut node_layout = self.item_draw.node_layout.clone();
node_layout.walk.height = Height::Fix(self.item_draw.row_height * scale as f32);
let inst = self.item_draw.node_bg.begin_quad(cx, node_layout);
node_draw.animator.set_area(cx, inst.clone().into());
let is_marked = node_draw.marked != 0;
for i in 0..(depth - 1) {
if i == depth - 2 { // our own thread.
let area = self.item_draw.filler.draw_quad(cx, self.item_draw.filler_walk);
if is_last {
if is_first {
//line_vec
area.push_vec2(cx, Vec2 {x: 0.3, y: 0.7})
}
else {
//line_vec
area.push_vec2(cx, Vec2 {x: -0.2, y: 0.7})
}
}
else if is_first {
//line_vec
area.push_vec2(cx, Vec2 {x: -0.3, y: 1.2})
}
else {
//line_vec
area.push_vec2(cx, Vec2 {x: -0.2, y: 1.2});
}
//anim_pos
area.push_float(cx, -1.);
}
else {
let here_last = if last_stack.len()>1 {last_stack[i + 1]} else {false};
if here_last {
cx.walk_turtle(self.item_draw.filler_walk);
}
else {
let area = self.item_draw.filler.draw_quad(cx, self.item_draw.filler_walk);
//line_vec
area.push_vec2(cx, Vec2 {x: -0.2, y: 1.2});
//anim_pos
area.push_float(cx, -1.);
}
}
};
self.item_draw.filler.z = 0.;
self.item_draw.tree_text.z = 0.;
//self.item_draw.tree_text.font_size = self.font_size;
self.item_draw.tree_text.font_scale = scale as f32;
match node {
FileNode::Folder {name, state, ..} => {
// draw the folder icon
let inst = self.item_draw.filler.draw_quad(cx, self.item_draw.folder_walk);
inst.push_vec2(cx, Vec2::default());
inst.push_float(cx, 1.);
// move the turtle down a bit
//cx.move_turtle(0., 3.5);
cx.turtle_align_y();
//cx.realign_turtle(Align::left_center(), false);
self.item_draw.tree_text.color = self.item_draw.color_tree_folder;
let wleft = cx.get_width_left() - 10.;
self.item_draw.tree_text.wrapping = Wrapping::Ellipsis(wleft);
self.item_draw.tree_text.draw_text(cx, name);
let (new_scale, new_state) = match state {
NodeState::Opening(fac) => {
self.view.redraw_view_area(cx);
if *fac < 0.001 {
(1.0, NodeState::Open)
}
else {
(1.0 - *fac, NodeState::Opening(*fac * 0.6))
}
},
NodeState::Closing(fac) => {
self.view.redraw_view_area(cx);
if *fac < 0.001 {
(0.0, NodeState::Closed)
}
else {
(*fac, NodeState::Closing(*fac * 0.6))
}
},
NodeState::Open => {
(1.0, NodeState::Open)
},
NodeState::Closed => {
(1.0, NodeState::Closed)
}
};
*state = new_state;
last_stack.push(is_last);
scale_stack.push(scale * new_scale);
},
FileNode::File {name, ..} => {
//cx.move_turtle(0., 3.5);
cx.turtle_align_y();
let wleft = cx.get_width_left() - 10.;
self.item_draw.tree_text.wrapping = Wrapping::Ellipsis(wleft);
//cx.realign_turtle(Align::left_center(), false);
self.item_draw.tree_text.color = if is_marked {
self.item_draw.color_tree_folder
}
else {
self.item_draw.color_tree_file
};
self.item_draw.tree_text.draw_text(cx, name);
}
}
self.item_draw.node_bg.end_quad(cx, &inst);
cx.turtle_new_line();
// if any of the parents is closing, don't count alternating lines
if !file_walker.current_closing() {
counter += 1;
}
}
// draw filler nodes
if self.item_draw.row_height > 0. {
let view_total = cx.get_turtle_bounds();
let rect_now = cx.get_turtle_rect();
let mut y = view_total.y;
while y < rect_now.h {
self.item_draw.node_bg.color = if counter & 1 == 0 {Theme::color_bg_selected().get(cx)}else {Theme::color_bg_odd().get(cx)};
self.item_draw.node_bg.draw_quad(
cx,
Walk::wh(Width::Fill, Height::Fix((rect_now.h - y).min(self.item_draw.row_height))),
);
cx.turtle_new_line();
y += self.item_draw.row_height;
counter += 1;
}
}
// draw the drag item overlay layer if need be
if let Some(mv) = &self._drag_move {
if let Ok(()) = self.drag_view.begin_view(cx, Layout {
abs_origin: Some(Vec2 {x: mv.abs.x + 5., y: mv.abs.y + 5.}),
..Default::default()
}) {
let mut file_walker = FileWalker::new(&mut self.root_node);
while let Some((_depth, _index, _len, node)) = file_walker.walk() {
let node_draw = if let Some(node_draw) = node.get_draw() {node_draw}else {continue};
if node_draw.marked != 0 {
self.drag_bg.z = 10.0;
self.item_draw.tree_text.z = 10.0;
self.drag_bg.color = Self::color_drag_bg().get(cx);
let inst = self.drag_bg.begin_quad(cx, FileTreeItemDraw::layout_drag_bg().get(cx));
self.item_draw.tree_text.color = FileTreeItemDraw::color_tree_folder().get(cx);
self.item_draw.tree_text.draw_text(cx, match node {
FileNode::Folder {name, ..} => {name},
FileNode::File {name, ..} => {name}
});
self.drag_bg.end_quad(cx, &inst);
}
}
self.drag_view.end_view(cx);
}
}
self.item_draw.shadow.draw_shadow_top(cx);
self.view.end_view(cx);
}
}
| 37.918632 | 176 | 0.455046 |
22a4e96b9e4bf2fb8bb8ea3fc35d167bab791376 | 2,397 | use super::BackendTypes;
use crate::mir::debuginfo::{FunctionDebugContext, VariableKind};
use rustc::mir;
use rustc::ty::layout::Size;
use rustc::ty::{Instance, Ty};
use rustc_hir::def_id::CrateNum;
use rustc_span::{SourceFile, Span};
use rustc_target::abi::call::FnAbi;
use syntax::ast::Name;
pub trait DebugInfoMethods<'tcx>: BackendTypes {
fn create_vtable_metadata(&self, ty: Ty<'tcx>, vtable: Self::Value);
/// Creates the function-specific debug context.
///
/// Returns the FunctionDebugContext for the function which holds state needed
/// for debug info creation, if it is enabled.
fn create_function_debug_context(
&self,
instance: Instance<'tcx>,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
llfn: Self::Function,
mir: &mir::Body<'_>,
) -> Option<FunctionDebugContext<Self::DIScope>>;
fn extend_scope_to_file(
&self,
scope_metadata: Self::DIScope,
file: &SourceFile,
defining_crate: CrateNum,
) -> Self::DIScope;
fn debuginfo_finalize(&self);
// FIXME(eddyb) find a common convention for all of the debuginfo-related
// names (choose between `dbg`, `debug`, `debuginfo`, `debug_info` etc.).
fn create_dbg_var(
&self,
dbg_context: &FunctionDebugContext<Self::DIScope>,
variable_name: Name,
variable_type: Ty<'tcx>,
scope_metadata: Self::DIScope,
variable_kind: VariableKind,
span: Span,
) -> Self::DIVariable;
}
pub trait DebugInfoBuilderMethods: BackendTypes {
// FIXME(eddyb) find a common convention for all of the debuginfo-related
// names (choose between `dbg`, `debug`, `debuginfo`, `debug_info` etc.).
fn dbg_var_addr(
&mut self,
dbg_context: &FunctionDebugContext<Self::DIScope>,
dbg_var: Self::DIVariable,
scope_metadata: Self::DIScope,
variable_alloca: Self::Value,
direct_offset: Size,
// NB: each offset implies a deref (i.e. they're steps in a pointer chain).
indirect_offsets: &[Size],
span: Span,
);
fn set_source_location(
&mut self,
debug_context: &mut FunctionDebugContext<Self::DIScope>,
scope: Self::DIScope,
span: Span,
);
fn insert_reference_to_gdb_debug_scripts_section_global(&mut self);
fn set_var_name(&mut self, value: Self::Value, name: &str);
}
| 34.242857 | 83 | 0.650814 |
69aba203a51bbcc67dd995007c25b1815024280d | 899 | use std::os;
use std::collections::HashMap;
use cargo::core::MultiShell;
use cargo::util::{CliResult, CliError, config};
#[deriving(Decodable)]
struct ConfigListFlags {
flag_human: bool,
}
#[deriving(Encodable)]
struct ConfigOut {
values: HashMap<String, config::ConfigValue>
}
pub const USAGE: &'static str = "
Usage:
cargo config-list --human
cargo config-list -h | --help
Options:
-h, --help Print this message
";
pub fn execute(args: ConfigListFlags,
_: &mut MultiShell) -> CliResult<Option<ConfigOut>> {
let configs = try!(config::all_configs(os::getcwd()).map_err(|_|
CliError::new("Couldn't load configuration", 1)));
if args.flag_human {
for (key, value) in configs.iter() {
println!("{} = {}", key, value);
}
Ok(None)
} else {
Ok(Some(ConfigOut { values: configs }))
}
}
| 22.475 | 68 | 0.609566 |
0e6186ff7a149151ac8b58e93106ac5b02af8e07 | 9,591 | //! System call interface for userspace processes.
//!
//! Drivers implement these interfaces to expose operations to processes.
//!
//! # System-call Overview
//!
//! Tock supports six system calls. The `yield` and `memop` system calls are
//! handled by the core kernel, while four others are implemented by drivers:
//!
//! * `subscribe` passes a upcall to the driver which it can
//! invoke on the process later, when an event has occurred or data
//! of interest is available.
//!
//! * `command` tells the driver to do something immediately.
//!
//! * `allow_readwrite` provides the driver read-write access to an
//! application buffer.
//!
//! * `allow_readonly` provides the driver read-only access to an
//! application buffer.
//!
//! ## Mapping system-calls to drivers
//!
//! Each of these three system calls takes at least two
//! parameters. The first is a _driver identifier_ and tells the
//! scheduler which driver to forward the system call to. The second
//! parameters is a __syscall identifer_ and is used by the driver to
//! differentiate instances of the call with different driver-specific
//! meanings (e.g. `subscribe` for "data received" vs `subscribe` for
//! "send completed"). The mapping between _driver identifiers_ and
//! drivers is determined by a particular platform, while the _syscall
//! identifier_ is driver-specific.
//!
//! One convention in Tock is that _driver minor number_ 0 for the `command`
//! syscall can always be used to determine if the driver is supported by
//! the running kernel by checking the return code. If the return value is
//! greater than or equal to zero then the driver is present. Typically this is
//! implemented by a null command that only returns 0, but in some cases the
//! command can also return more information, like the number of supported
//! devices (useful for things like the number of LEDs).
//!
//! # The `yield` system call class
//!
//! While drivers do not handle `yield` system calls, it is important
//! to understand them and how they interact with `subscribe`, which
//! registers upcall functions with the kernel. When a process calls
//! a `yield` system call, the kernel checks if there are any pending
//! upcalls for the process. If there are pending upcalls, it
//! pushes one upcall onto the process stack. If there are no
//! pending upcalls, `yield-wait` will cause the process to sleep
//! until a upcall is trigered, while `yield-no-wait` returns
//! immediately.
//!
//! # Method result types
//!
//! Each driver method has a limited set of valid return types. Every
//! method has a single return type corresponding to success and a
//! single return type corresponding to failure. For the `subscribe`
//! and `allow` system calls, these return types are the same for
//! every instance of those calls. Each instance of the `command`
//! system call, however, has its own specified return types. A
//! command that requests a timestamp, for example, might return a
//! 32-bit number on success and an error code on failure, while a
//! command that requests time of day in microsecond granularity might
//! return a 64-bit number and a 32-bit timezone encoding on success,
//! and an error code on failure.
//!
//! These result types are represented as safe Rust types. The core
//! kernel (the scheduler and syscall dispatcher) is responsible for
//! encoding these types into the Tock system call ABI specification.
use crate::errorcode::ErrorCode;
use crate::mem::{ReadOnlyAppSlice, ReadWriteAppSlice};
use crate::process;
use crate::returncode::ReturnCode;
use crate::syscall::SyscallReturn;
use crate::upcall::{AppId, Upcall};
use core::convert::TryFrom;
/// Possible return values of a `command` driver method, as specified
/// in TRD104.
///
/// This is just a wrapper around
/// [`SyscallReturn`](SyscallReturn) since a
/// `command` driver method may only return primitve integer types as
/// payload.
///
/// It is important for this wrapper to only be constructable over
/// variants of
/// [`SyscallReturn`](SyscallReturn) that are
/// deemed safe for a capsule to construct and return to an
/// application (e.g. not
/// [`SubscribeSuccess`](crate::syscall::SyscallReturn::SubscribeSuccess)).
/// This means that the inner value **must** remain private.
pub struct CommandReturn(SyscallReturn);
impl CommandReturn {
pub(crate) fn into_inner(self) -> SyscallReturn {
self.0
}
/// Command error
pub fn failure(rc: ErrorCode) -> Self {
CommandReturn(SyscallReturn::Failure(rc))
}
/// Command error with an additional 32-bit data field
pub fn failure_u32(rc: ErrorCode, data0: u32) -> Self {
CommandReturn(SyscallReturn::FailureU32(rc, data0))
}
/// Command error with two additional 32-bit data fields
pub fn failure_u32_u32(rc: ErrorCode, data0: u32, data1: u32) -> Self {
CommandReturn(SyscallReturn::FailureU32U32(rc, data0, data1))
}
/// Command error with an additional 64-bit data field
pub fn failure_u64(rc: ErrorCode, data0: u64) -> Self {
CommandReturn(SyscallReturn::FailureU64(rc, data0))
}
/// Successful command
pub fn success() -> Self {
CommandReturn(SyscallReturn::Success)
}
/// Successful command with an additional 32-bit data field
pub fn success_u32(data0: u32) -> Self {
CommandReturn(SyscallReturn::SuccessU32(data0))
}
/// Successful command with two additional 32-bit data fields
pub fn success_u32_u32(data0: u32, data1: u32) -> Self {
CommandReturn(SyscallReturn::SuccessU32U32(data0, data1))
}
/// Successful command with three additional 32-bit data fields
pub fn success_u32_u32_u32(data0: u32, data1: u32, data2: u32) -> Self {
CommandReturn(SyscallReturn::SuccessU32U32U32(data0, data1, data2))
}
/// Successful command with an additional 64-bit data field
pub fn success_u64(data0: u64) -> Self {
CommandReturn(SyscallReturn::SuccessU64(data0))
}
/// Successful command with an additional 64-bit and 32-bit data field
pub fn success_u64_u32(data0: u64, data1: u32) -> Self {
CommandReturn(SyscallReturn::SuccessU64U32(data0, data1))
}
}
impl From<ReturnCode> for CommandReturn {
fn from(rc: ReturnCode) -> Self {
match rc {
ReturnCode::SUCCESS => CommandReturn::success(),
_ => CommandReturn::failure(ErrorCode::try_from(rc).unwrap()),
}
}
}
impl From<process::Error> for CommandReturn {
fn from(perr: process::Error) -> Self {
CommandReturn::failure(perr.into())
}
}
/// Trait for capsules implemeting peripheral driver system calls
/// specified in TRD104. The kernel translates the values passed from
/// userspace into Rust types and includes which process is making the
/// call. All of these system calls perform very little synchronous work;
/// long running computations or I/O should be split-phase, with an upcall
/// indicating their completion.
///
/// The exact instances of each of these methods (which identifiers are valid
/// and what they represents) are specific to the peripheral system call
/// driver.
#[allow(unused_variables)]
pub trait Driver {
/// System call for a process to provide an upcall function pointer to
/// the kernel. Peripheral system call driver capsules invoke
/// upcalls to indicate events have occurred. These events are typically triggered
/// in response to `command` calls. For example, a command that sets a timer to
/// fire in the future will cause an upcall to invoke after the command returns, when
/// the timer expires, while a command to sample a sensor will cause an upcall to
/// invoke when the sensor value is ready.
fn subscribe(
&self,
subscribe_identifier: usize,
upcall: Upcall,
app_id: AppId,
) -> Result<Upcall, (Upcall, ErrorCode)> {
Err((upcall, ErrorCode::NOSUPPORT))
}
/// System call for a process to perform a short synchronous operation
/// or start a long-running split-phase operation (whose completion
/// is signaled with an upcall). Command 0 is a reserved command to
/// detect if a peripheral system call driver is installed and must
/// always return a CommandReturn::Success.
fn command(&self, which: usize, r2: usize, r3: usize, caller_id: AppId) -> CommandReturn {
CommandReturn::failure(ErrorCode::NOSUPPORT)
}
/// System call for a process to pass a buffer (a ReadWriteAppSlice) to
/// the kernel that the kernel can either read or write. The kernel calls
/// this method only after it checks that the entire buffer is
/// within memory the process can both read and write.
fn allow_readwrite(
&self,
app: AppId,
which: usize,
slice: ReadWriteAppSlice,
) -> Result<ReadWriteAppSlice, (ReadWriteAppSlice, ErrorCode)> {
Err((slice, ErrorCode::NOSUPPORT))
}
/// System call for a process to pass a read-only buffer (a
/// ReadOnlyAppSlice) to the kernel that the kernel can read.
/// The kernel calls this method only after it
/// checks that that the entire buffer is within memory the
/// process can read. This system call allows a process to pass
/// read-only data (e.g., in flash) to the kernel.
fn allow_readonly(
&self,
app: AppId,
which: usize,
slice: ReadOnlyAppSlice,
) -> Result<ReadOnlyAppSlice, (ReadOnlyAppSlice, ErrorCode)> {
Err((slice, ErrorCode::NOSUPPORT))
}
}
| 41.340517 | 94 | 0.700553 |
69692ba164ee9c22ea4821de96560af0639bdeba | 11,673 | // Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! The storage admin protocol is a FIDL protocol that is hosted by the framework for clients to
//! perform privileged operations on isolated storage. Clients can perform tasks such as opening a
//! component's storage or outright deleting it.
//!
//! This API allows clients to perform a limited set of mutable operations on storage, without
//! direct access to the backing directory, with the goal of making it easier for clients to work
//! with isolated storage without needing to understand component_manager's storage layout.
use {
crate::{
capability::{CapabilityProvider, CapabilitySource, ComponentCapability, OptionalTask},
channel,
model::{
component::{BindReason, WeakComponentInstance},
error::ModelError,
hooks::{Event, EventPayload, EventType, Hook, HooksRegistration},
routing::{RouteRequest, RouteSource},
storage,
},
},
::routing::route_capability,
anyhow::{format_err, Error},
async_trait::async_trait,
cm_rust::{CapabilityName, ExposeDecl, OfferDecl, StorageDecl, UseDecl},
fidl::endpoints::{ProtocolMarker, ServerEnd},
fidl_fuchsia_component as fcomponent,
fidl_fuchsia_io::{MODE_TYPE_SERVICE, OPEN_RIGHT_READABLE, OPEN_RIGHT_WRITABLE},
fidl_fuchsia_sys2 as fsys, fuchsia_async as fasync, fuchsia_zircon as zx,
futures::TryStreamExt,
lazy_static::lazy_static,
log::*,
moniker::ExtendedMoniker,
moniker::{AbsoluteMoniker, AbsoluteMonikerBase},
routing::component_instance::ComponentInstanceInterface,
std::{
convert::TryInto,
path::PathBuf,
sync::{Arc, Weak},
},
};
lazy_static! {
pub static ref STORAGE_ADMIN_PROTOCOL_NAME: CapabilityName =
fsys::StorageAdminMarker::NAME.into();
}
struct StorageAdminProtocolProvider {
storage_decl: StorageDecl,
component: WeakComponentInstance,
storage_admin: Arc<StorageAdmin>,
}
impl StorageAdminProtocolProvider {
pub fn new(
storage_decl: StorageDecl,
component: WeakComponentInstance,
storage_admin: Arc<StorageAdmin>,
) -> Self {
Self { storage_decl, component, storage_admin }
}
}
#[async_trait]
impl CapabilityProvider for StorageAdminProtocolProvider {
async fn open(
self: Box<Self>,
flags: u32,
open_mode: u32,
in_relative_path: PathBuf,
server_end: &mut zx::Channel,
) -> Result<OptionalTask, ModelError> {
let server_end = channel::take_channel(server_end);
if (flags & (OPEN_RIGHT_READABLE | OPEN_RIGHT_WRITABLE))
!= (OPEN_RIGHT_READABLE | OPEN_RIGHT_WRITABLE)
{
warn!("open request for the storage admin protocol rejected: access denied");
return Ok(None.into());
}
if 0 == (open_mode & MODE_TYPE_SERVICE) {
warn!("open request for the storage admin protocol rejected: incorrect mode");
return Ok(None.into());
}
if in_relative_path != PathBuf::from("") {
warn!("open request for the storage admin protocol rejected: invalid path");
return Ok(None.into());
}
let storage_decl = self.storage_decl.clone();
let component = self.component.clone();
let storage_admin = self.storage_admin.clone();
Ok(fasync::Task::spawn(async move {
if let Err(e) = storage_admin.serve(storage_decl, component, server_end).await {
warn!("failed to serve storage admin protocol: {:?}", e);
}
})
.into())
}
}
pub struct StorageAdmin {}
// `StorageAdmin` is a `Hook` that serves the `StorageAdmin` FIDL protocol.
impl StorageAdmin {
pub fn new() -> Self {
Self {}
}
pub fn hooks(self: &Arc<Self>) -> Vec<HooksRegistration> {
vec![HooksRegistration::new(
"StorageAdmin",
vec![EventType::CapabilityRouted],
Arc::downgrade(self) as Weak<dyn Hook>,
)]
}
async fn extract_storage_decl(
source_capability: &ComponentCapability,
component: WeakComponentInstance,
) -> Result<Option<StorageDecl>, ModelError> {
match source_capability {
ComponentCapability::Offer(OfferDecl::Protocol(_))
| ComponentCapability::Expose(ExposeDecl::Protocol(_))
| ComponentCapability::Use(UseDecl::Protocol(_)) => (),
_ => return Ok(None),
}
if source_capability.source_name() != Some(&fsys::StorageAdminMarker::NAME.into()) {
return Ok(None);
}
let source_capability_name = source_capability.source_capability_name();
if source_capability_name.is_none() {
return Ok(None);
}
let source_component = component.upgrade()?;
let source_component_state = source_component.lock_resolved_state().await?;
let decl = source_component_state.decl();
Ok(decl.find_storage_source(source_capability_name.unwrap()).cloned())
}
async fn on_scoped_framework_capability_routed_async<'a>(
self: Arc<Self>,
source_capability: &'a ComponentCapability,
component: WeakComponentInstance,
capability_provider: Option<Box<dyn CapabilityProvider>>,
) -> Result<Option<Box<dyn CapabilityProvider>>, ModelError> {
// If some other capability has already been installed, then there's nothing to
// do here.
if capability_provider.is_some() {
return Ok(capability_provider);
}
// Find the storage decl, if it exists we're good to go
let storage_decl = Self::extract_storage_decl(source_capability, component.clone()).await?;
if let Some(storage_decl) = storage_decl {
return Ok(Some(Box::new(StorageAdminProtocolProvider::new(
storage_decl,
component,
self.clone(),
)) as Box<dyn CapabilityProvider>));
}
// The declaration referenced either a nonexistent capability, or a capability that isn't a
// storage capability. We can't be the provider for this.
Ok(None)
}
async fn serve(
self: Arc<Self>,
storage_decl: StorageDecl,
component: WeakComponentInstance,
server_end: zx::Channel,
) -> Result<(), Error> {
let component = component.upgrade().map_err(|e| {
format_err!(
"unable to serve storage admin protocol, model reference is no longer valid: {:?}",
e,
)
})?;
let storage_moniker = component.abs_moniker.clone();
let storage_capability_source_info = {
match route_capability(RouteRequest::StorageBackingDirectory(storage_decl), &component)
.await?
{
RouteSource::StorageBackingDirectory(storage_source) => storage_source,
_ => unreachable!("expected RouteSource::StorageBackingDirectory"),
}
};
let mut stream = ServerEnd::<fsys::StorageAdminMarker>::new(server_end)
.into_stream()
.expect("could not convert channel into stream");
while let Some(request) = stream.try_next().await? {
match request {
fsys::StorageAdminRequest::OpenComponentStorage {
relative_moniker,
flags,
mode,
object,
control_handle: _,
} => {
let relative_moniker = relative_moniker.as_str().try_into()?;
let abs_moniker =
AbsoluteMoniker::from_relative(&component.abs_moniker, &relative_moniker)?;
let instance_id = component
.try_get_component_id_index()?
.look_up_moniker(&abs_moniker)
.cloned();
let dir_proxy = storage::open_isolated_storage(
storage_capability_source_info.clone(),
relative_moniker,
instance_id.as_ref(),
mode,
&BindReason::AccessCapability {
target: ExtendedMoniker::ComponentInstance(storage_moniker.clone()),
path: storage_capability_source_info.backing_directory_path.clone(),
},
)
.await?;
dir_proxy.clone(flags, object)?;
}
fsys::StorageAdminRequest::DeleteComponentStorage {
relative_moniker,
responder,
} => {
let err_code = match relative_moniker.as_str().try_into() {
Err(e) => {
warn!("couldn't parse string as relative moniker for storage admin protocol: {:?}", e);
Err(fcomponent::Error::InvalidArguments)
}
Ok(relative_moniker) => {
let abs_moniker = AbsoluteMoniker::from_relative(
&component.abs_moniker,
&relative_moniker,
)?;
let instance_id = component
.try_get_component_id_index()?
.look_up_moniker(&abs_moniker)
.cloned();
let res = storage::delete_isolated_storage(
storage_capability_source_info.clone(),
relative_moniker,
instance_id.as_ref(),
)
.await;
match res {
Err(e) => {
warn!(
"couldn't delete storage for storage admin protocol: {:?}",
e
);
Err(fcomponent::Error::Internal)
}
Ok(()) => Ok(()),
}
}
};
match err_code {
Err(e) => responder.send(&mut Err(e))?,
Ok(()) => responder.send(&mut Ok(()))?,
}
}
}
}
Ok(())
}
}
#[async_trait]
impl Hook for StorageAdmin {
async fn on(self: Arc<Self>, event: &Event) -> Result<(), ModelError> {
match &event.result {
Ok(EventPayload::CapabilityRouted {
source: CapabilitySource::Capability { source_capability, component },
capability_provider,
}) => {
let mut capability_provider = capability_provider.lock().await;
*capability_provider = self
.on_scoped_framework_capability_routed_async(
source_capability,
component.clone(),
capability_provider.take(),
)
.await?;
Ok(())
}
_ => Ok(()),
}
}
}
| 39.83959 | 115 | 0.548102 |
ef6b106000778480fea19bbc1a99a06b5ba6a4ad | 926 | use {
super::CSVStorage,
crate::{Result, Row, RowIter, Schema, Store, Value, WIPError},
async_trait::async_trait,
csv::Reader,
};
#[async_trait(?Send)]
impl Store for CSVStorage {
async fn fetch_schema(&self, _table_name: &str) -> Result<Option<Schema>> {
Ok(self.schema.clone())
}
async fn scan_data(&self, _table_name: &str) -> Result<RowIter> {
let mut reader = Reader::from_path(self.path.as_str())
.map_err(|error| WIPError::Debug(format!("{:?}", error)))?;
let keyed_rows: Vec<Result<(Value, Row)>> = reader
.records()
.enumerate()
.map(|(index, record)| {
record
.map_err(|error| WIPError::Debug(format!("{:?}", error)).into())
.map(|record| {
(
Value::I64(index as i64),
Row(record
.into_iter()
.map(|cell| Value::Str(cell.to_string()))
.collect()),
)
})
})
.collect();
Ok(Box::new(keyed_rows.into_iter()))
}
}
| 23.74359 | 76 | 0.597192 |
67aa46b4650e69b8571789d4fd956edccdc79f0a | 1,704 | use super::errors::{BlockOperationError, IndexedAttestationInvalid as Invalid};
use super::signature_sets::{get_pubkey_from_state, indexed_attestation_signature_set};
use crate::VerifySignatures;
use types::*;
type Result<T> = std::result::Result<T, BlockOperationError<Invalid>>;
fn error(reason: Invalid) -> BlockOperationError<Invalid> {
BlockOperationError::invalid(reason)
}
/// Verify an `IndexedAttestation`.
///
/// Spec v0.11.1
pub fn is_valid_indexed_attestation<T: EthSpec>(
state: &BeaconState<T>,
indexed_attestation: &IndexedAttestation<T>,
verify_signatures: VerifySignatures,
spec: &ChainSpec,
) -> Result<()> {
let indices = &indexed_attestation.attesting_indices;
// Verify max number of indices
verify!(
indices.len() <= T::MaxValidatorsPerCommittee::to_usize(),
Invalid::MaxIndicesExceed(T::MaxValidatorsPerCommittee::to_usize(), indices.len())
);
// Check that indices are sorted and unique
let check_sorted = |list: &[u64]| -> Result<()> {
list.windows(2).enumerate().try_for_each(|(i, pair)| {
if pair[0] < pair[1] {
Ok(())
} else {
Err(error(Invalid::BadValidatorIndicesOrdering(i)))
}
})?;
Ok(())
};
check_sorted(indices)?;
if verify_signatures.is_true() {
verify!(
indexed_attestation_signature_set(
state,
|i| get_pubkey_from_state(state, i),
&indexed_attestation.signature,
&indexed_attestation,
spec
)?
.is_valid(),
Invalid::BadSignature
);
}
Ok(())
}
| 29.37931 | 90 | 0.607981 |
61b513d313f153b8c4face3b2417da7676ff6a6c | 900 | use serde_json::{Value};
use crate::auto_config;
// #[bean]
// pub fn autocall_config_loading(){
// single_get_ref_try!("autocall_loading_config_json",String,|config_json_str:&String|{
// let config_json = serde_json::from_str::<Value>(config_json_str).unwrap();
// let load_component_list = &config_json["load_component_list"];
// //加载预备的组件内容
// match load_component_list {
// Value::Array(r)=>{
// if r.contains(&Value::String(String::from("log4rs"))){
// auto_config::log4rs_config::loading();
// }
// if r.contains(&Value::String(String::from("rbatis"))){
// auto_config::rbatis_config::loading();
// }
// }
// _=>()
// }
// },{
// println!("autocall_loading_config_json not find");
// });
// } | 36 | 91 | 0.53 |
5b34fd3e7190ee92aa703cf245cbe97aa21aa602 | 2,204 | use crate::document_selection::DocumentSelection;
use crate::parse::HashMapSelectParams;
use crate::parse::SelectParams;
use serde::Deserialize;
use serde_json::{Map, Value};
/// Each: Only one of all, fields and one can exist at the same time.
#[derive(Default, Deserialize, Clone, Debug)]
pub struct Each {
pub all: Box<Option<SelectParams>>,
pub one: Box<Option<SelectParams>>,
pub fields: Option<HashMapSelectParams>,
}
impl Each {
pub fn each(&self, ds: DocumentSelection) -> Value {
return if self.all.is_some() {
self.all(ds)
} else if self.fields.is_some() {
self.fields(ds)
} else {
self.one(ds)
};
}
fn all(&self, mut ds: DocumentSelection) -> Value {
let params = self.all.as_ref().as_ref().unwrap();
let nodes = ds.nodes();
let mut array = Vec::new();
for node in nodes.iter() {
ds = DocumentSelection::ParseNode(node.to_owned());
let v = ds.parse(params);
if v.is_null() {
continue;
}
array.push(v);
}
Value::Array(array)
}
fn one(&self, mut ds: DocumentSelection) -> Value {
let params = self.one.as_ref().as_ref().unwrap();
let nodes = ds.nodes();
for node in nodes.iter() {
ds = DocumentSelection::ParseNode(node.to_owned());
let v = ds.parse(params);
if v.is_null() {
continue;
}
return v;
}
params.get_default_val()
}
fn fields(&self, ds: DocumentSelection) -> Value {
let each_keys = self.fields.as_ref().unwrap();
let nodes = ds.nodes();
let mut array = Vec::new();
for node in nodes.iter() {
let mut cur_map = Map::new();
for (k, v) in each_keys.iter() {
let ds = DocumentSelection::ParseNode(node.to_owned());
let c_val = ds.parse(v);
let val = c_val;
cur_map.insert(k.to_string(), Value::from(val));
}
array.push(Value::Object(cur_map));
}
Value::Array(array)
}
}
| 30.611111 | 71 | 0.53539 |
6944a04e0ab1c32e4f881d0c5bac53ced895364f | 7,245 | // Copyright (c) 2017 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::{
fs::File,
io::{BufRead, BufReader},
net::{SocketAddr, ToSocketAddrs},
path::{Path, PathBuf},
sync::{
atomic::{AtomicBool, Ordering},
Arc,
},
thread::Builder as ThreadBuilder,
};
use crate::{
butterfly::member::Member,
common::cli_defaults::GOSSIP_DEFAULT_PORT,
error::{Error, Result},
manager::file_watcher::{default_file_watcher, Callbacks},
};
static LOGKEY: &'static str = "PW";
pub struct PeerCallbacks {
have_events: Arc<AtomicBool>,
}
impl Callbacks for PeerCallbacks {
fn file_appeared(&mut self, _: &Path) {
self.have_events.store(true, Ordering::Relaxed);
}
fn file_modified(&mut self, _: &Path) {
self.have_events.store(true, Ordering::Relaxed)
}
fn file_disappeared(&mut self, _: &Path) {
self.have_events.store(true, Ordering::Relaxed)
}
}
pub struct PeerWatcher {
path: PathBuf,
have_events: Arc<AtomicBool>,
}
impl PeerWatcher {
pub fn run<P>(path: P) -> Result<Self>
where
P: Into<PathBuf>,
{
let path = path.into();
let have_events = Self::setup_watcher(path.clone())?;
Ok(PeerWatcher { path, have_events })
}
fn setup_watcher(path: PathBuf) -> Result<Arc<AtomicBool>> {
let have_events = Arc::new(AtomicBool::new(false));
let have_events_for_thread = Arc::clone(&have_events);
ThreadBuilder::new()
.name(format!("peer-watcher-[{}]", path.display()))
.spawn(move || {
// debug!("PeerWatcher({}) thread starting", abs_path.display());
loop {
let have_events_for_loop = Arc::clone(&have_events_for_thread);
if Self::file_watcher_loop_body(&path, have_events_for_loop) {
break;
}
}
})?;
Ok(have_events)
}
fn file_watcher_loop_body(path: &PathBuf, have_events: Arc<AtomicBool>) -> bool {
let callbacks = PeerCallbacks { have_events };
let mut file_watcher = match default_file_watcher(&path, callbacks) {
Ok(w) => w,
Err(sup_err) => match sup_err.err {
Error::NotifyError(err) => {
outputln!(
"PeerWatcher({}) failed to start watching the directories ({}), {}",
path.display(),
err,
"will try again",
);
return false;
}
_ => {
outputln!(
"PeerWatcher({}) could not create file watcher, ending thread ({})",
path.display(),
sup_err
);
return true;
}
},
};
if let Err(err) = file_watcher.run() {
outputln!(
"PeerWatcher({}) error during watching ({}), restarting",
path.display(),
err
);
}
false
}
pub fn has_fs_events(&self) -> bool {
self.have_events.load(Ordering::Relaxed)
}
pub fn get_members(&self) -> Result<Vec<Member>> {
if !self.path.is_file() {
self.have_events.store(false, Ordering::Relaxed);
return Ok(Vec::new());
}
let file = File::open(&self.path).map_err(|err| {
return sup_error!(Error::Io(err));
})?;
let reader = BufReader::new(file);
let mut members: Vec<Member> = Vec::new();
for line in reader.lines() {
if let Ok(peer) = line {
let peer_addr = if peer.find(':').is_some() {
peer
} else {
format!("{}:{}", peer, GOSSIP_DEFAULT_PORT)
};
let addrs: Vec<SocketAddr> = match peer_addr.to_socket_addrs() {
Ok(addrs) => addrs.collect(),
Err(e) => {
outputln!("Failed to resolve peer: {}", peer_addr);
return Err(sup_error!(Error::NameLookup(e)));
}
};
let addr: SocketAddr = addrs[0];
let mut member = Member::default();
member.address = format!("{}", addr.ip());
member.swim_port = addr.port();
member.gossip_port = addr.port();
members.push(member);
}
}
self.have_events.store(false, Ordering::Relaxed);
Ok(members)
}
}
#[cfg(test)]
mod tests {
use super::PeerWatcher;
use crate::{butterfly::member::Member, common::cli_defaults::GOSSIP_DEFAULT_PORT};
use std::{
fs::{File, OpenOptions},
io::Write,
};
use tempfile::TempDir;
#[test]
fn no_file() {
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("no_such_file");
let watcher = PeerWatcher::run(path).unwrap();
assert_eq!(false, watcher.has_fs_events());
assert_eq!(watcher.get_members().unwrap(), vec![]);
}
#[test]
fn empty_file() {
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("empty_file");
File::create(&path).unwrap();
let watcher = PeerWatcher::run(path).unwrap();
assert_eq!(watcher.get_members().unwrap(), vec![]);
}
#[test]
fn with_file() {
let tmpdir = TempDir::new().unwrap();
let path = tmpdir.path().join("some_file");
let mut file = OpenOptions::new()
.append(true)
.create_new(true)
.open(path.clone())
.unwrap();
let watcher = PeerWatcher::run(path).unwrap();
writeln!(file, "1.2.3.4:5").unwrap();
writeln!(file, "4.3.2.1").unwrap();
let mut member1 = Member::default();
member1.id = String::new();
member1.address = String::from("1.2.3.4");
member1.swim_port = 5;
member1.gossip_port = 5;
let mut member2 = Member::default();
member2.id = String::new();
member2.address = String::from("4.3.2.1");
member2.swim_port = GOSSIP_DEFAULT_PORT;
member2.gossip_port = GOSSIP_DEFAULT_PORT;
let expected_members = vec![member1, member2];
let mut members = watcher.get_members().unwrap();
for mut member in &mut members {
member.id = String::new();
}
assert_eq!(expected_members, members);
}
}
| 32.488789 | 92 | 0.532505 |
26acb34eee95e2f03ea19ae3d9989d92eca193c3 | 1,775 | use privsep::{imsg, net::Fd};
use serde_derive::{Deserialize, Serialize};
use std::{io, net::TcpListener, os::unix::io::IntoRawFd, time::Duration};
use tokio::time::interval;
#[derive(Debug, Serialize, Deserialize)]
struct Message {
id: usize,
name: String,
}
#[tokio::test(flavor = "multi_thread")]
async fn test_imsg() -> Result<(), io::Error> {
unix_channel().await
}
async fn unix_channel() -> Result<(), std::io::Error> {
let (sender, receiver) = imsg::Handler::pair()?;
let mut count = 3;
tokio::spawn(async move {
let mut interval = interval(Duration::from_millis(100));
let fd = TcpListener::bind("127.0.0.1:1234")
.ok()
.map(|stream| stream.into_raw_fd())
.map(Fd::from);
for id in 1..=count {
interval.tick().await;
let message = Message {
id,
name: "test".to_string(),
};
if let Err(err) = sender
.send_message(imsg::Message::min(), fd.as_ref(), &message)
.await
{
eprintln!("Failed to send message: {}", err);
}
}
});
let res = loop {
match { receiver.recv_message::<Message>().await } {
Ok(None) => break Ok(()),
Ok(Some((imsg, fd, message))) => {
count -= 1;
println!(
"Received message: {:?}, fd: {:?}, data: {:?}",
imsg, fd, message
);
}
Err(err) => {
eprintln!("Failed to receive message: {}", err);
break Err(err);
}
}
};
assert!(count == 0, "did not receive expected messages");
res
}
| 26.492537 | 74 | 0.477183 |
9c9454a1ee2ae7eaefa1d4196c5bc289cb07fdc6 | 2,957 | /// Ticket have two purposes in JIRA: capturing information about a task and tracking the
/// completion of the task itself.
///
/// Let's add a new field to our `Ticket` struct, `status`.
/// For the time being, we'll work under the simplified assumption that the set of statuses
/// for a ticket is fixed and can't be customised by the user.
/// A ticket is either in the to-do column, in progress, blocked or done.
/// What is the best way to represent this information in Rust?
struct Ticket {
title: String,
description: String,
status: Status,
}
/// Rust's enums are perfect for this usecase.
/// Enum stands for enumeration: a type encoding the constraint that only a finite set of
/// values is possible.
/// Enums are great to encode semantic information in your code: making domain constraints
/// explicit.
///
/// Each possible value of an enum is called a variant. By convention, they are Pascal-cased.
/// Check out the Rust book for more details on enums:
/// https://doc.rust-lang.org/book/ch06-01-defining-an-enum.html
///
/// Let's create a variant for each of the allowed statuses of our tickets.
pub enum Status {
ToDo,
Blocked,
InProgress,
Done,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn a_blocked_ticket() {
// Let's create a blocked ticket.
let ticket = Ticket {
title: "A ticket title".into(),
description: "A heart-breaking description".into(),
status: Status::Blocked,
};
// Let's check that the status corresponds to what we expect.
// We can use pattern matching to take a different course of action based on the enum
// variant we are looking at.
// The Rust compiler will make sure that the match statement is exhaustive: it has to
// handle all variants in our enums.
// If not, the compiler will complain and reject our program.
//
// This is extremely useful when working on evolving codebases: if tomorrow we decide
// that tickets can also have `Backlog` as their status, the Rust compiler will
// highlight all code locations where we need to account for the new variant.
// No way to forget!
//
// Checkout the Rust Book for more details:
// https://doc.rust-lang.org/book/ch06-02-match.html
match ticket.status {
// Variant => Expression
Status::Blocked => println!("Great, as expected!"),
// If we want to take the same action for multiple variants, we can use a | to list them.
// Variant | Variant | ... | Variant => Expression
//
// We are panicking in this case, thus making the test fail if this branch of our
// match statement gets executed.
Status::ToDo | Status::InProgress | Status::Done => {
panic!("The ticket is not blocked!")
}
}
}
}
| 39.959459 | 101 | 0.638485 |
c1705d687abb00bf6fbdad445cc6151b77b0c23c | 562 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![crate_name="crateresolve_calories#0.1"]
#![crate_type = "lib"]
pub fn f() -> isize { 100 }
| 37.466667 | 68 | 0.725979 |
721510976be927c68e48fb55baf8ab1499aeba2d | 2,638 | use regex::Regex;
use crate::utils;
pub(crate) struct DateFormatter {
_date_decomposition_regex_one: Regex,
_date_decomposition_regex_two: Regex
}
impl DateFormatter {
pub(crate) fn new() -> Result<Self, utils::Error> {
Ok(
Self {
_date_decomposition_regex_one: Regex::new(
r"^((?P<year>\d{4})[-/\.\s]((?P<month_n>\d{1,2})|(?P<month_w>\w{2,12}))[-/\.\s](?P<day>\d{1,2}))")?,
_date_decomposition_regex_two: Regex::new(
r"(^(?P<day>\d{1,2})[-/\.\s]((?P<month_n>\d{1,2})|(?P<month_w>\w{2,12}))[-/\.\s](?P<year>\d{4}))$")?
}
)
}
pub(crate) fn standardize(&self, original: &str) -> String {
let mut cap = self._date_decomposition_regex_one.captures(original);
if cap.is_none() {
cap = self._date_decomposition_regex_two.captures(original);
}
let mut year= "";
let mut month = String::new();
let mut day = "";
match cap {
Some(groups) => {
//println!("{:?}-{:?}-{:?}-{:?}", groups.name("year"), groups.name("month_w"), groups.name("month_n"), groups.name("day"));
if let Some(y) = groups.name("year") {
year = &original[y.start()..y.end()];
}
if let Some(mw) = groups.name("month_w") {
self.to_month_num(&original[mw.start()..mw.end()], &mut month);
}
if let Some(mn) = groups.name("month_n") {
month = String::from(&original[mn.start()..mn.end()]);
}
if let Some(d) = groups.name("day") {
day = &original[d.start()..d.end()];
}
format!("{}-{}-{}", year, month, day)
}
None => { String::from("")}
}
}
fn to_month_num(&self, from :&str, to: &mut String) {
match from {
"January" => {to.push_str("01");}
"February" => {to.push_str("02");}
"March" => {to.push_str("03");}
"April" => {to.push_str("04");}
"May" => {to.push_str("05");}
"June" => {to.push_str("06");}
"July" => {to.push_str("07");}
"August" => {to.push_str("08");}
"September" => {to.push_str("09");}
"October" => {to.push_str("10");}
"November" => {to.push_str("11");}
"December" => {to.push_str("12");}
_ => {}
};
}
} | 31.035294 | 139 | 0.432525 |
092bff4f7e2424bb0dce49b56a4fe54d49cb6dec | 8,193 | #[doc = "Reader of register SW_MUX_CTL_PAD_GPIO_SD_B0_03"]
pub type R = crate::R<u32, super::SW_MUX_CTL_PAD_GPIO_SD_B0_03>;
#[doc = "Writer for register SW_MUX_CTL_PAD_GPIO_SD_B0_03"]
pub type W = crate::W<u32, super::SW_MUX_CTL_PAD_GPIO_SD_B0_03>;
#[doc = "Register SW_MUX_CTL_PAD_GPIO_SD_B0_03 `reset()`'s with value 0x05"]
impl crate::ResetValue for super::SW_MUX_CTL_PAD_GPIO_SD_B0_03 {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0x05
}
}
#[doc = "MUX Mode Select Field.\n\nValue on reset: 5"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum MUX_MODE_A {
#[doc = "0: Select mux mode: ALT0 mux port: USDHC1_DATA1 of instance: usdhc1"]
ALT0 = 0,
#[doc = "1: Select mux mode: ALT1 mux port: FLEXPWM1_PWMB01 of instance: flexpwm1"]
ALT1 = 1,
#[doc = "2: Select mux mode: ALT2 mux port: LPUART8_RTS_B of instance: lpuart8"]
ALT2 = 2,
#[doc = "3: Select mux mode: ALT3 mux port: XBAR1_INOUT07 of instance: xbar1"]
ALT3 = 3,
#[doc = "4: Select mux mode: ALT4 mux port: LPSPI1_SDI of instance: lpspi1"]
ALT4 = 4,
#[doc = "5: Select mux mode: ALT5 mux port: GPIO3_IO15 of instance: gpio3"]
ALT5 = 5,
#[doc = "8: Select mux mode: ALT8 mux port: ENET2_RDATA00 of instance: enet2"]
ALT8 = 8,
#[doc = "9: Select mux mode: ALT9 mux port: SEMC_CLK6 of instance: semc"]
ALT9 = 9,
}
impl From<MUX_MODE_A> for u8 {
#[inline(always)]
fn from(variant: MUX_MODE_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `MUX_MODE`"]
pub type MUX_MODE_R = crate::R<u8, MUX_MODE_A>;
impl MUX_MODE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> crate::Variant<u8, MUX_MODE_A> {
use crate::Variant::*;
match self.bits {
0 => Val(MUX_MODE_A::ALT0),
1 => Val(MUX_MODE_A::ALT1),
2 => Val(MUX_MODE_A::ALT2),
3 => Val(MUX_MODE_A::ALT3),
4 => Val(MUX_MODE_A::ALT4),
5 => Val(MUX_MODE_A::ALT5),
8 => Val(MUX_MODE_A::ALT8),
9 => Val(MUX_MODE_A::ALT9),
i => Res(i),
}
}
#[doc = "Checks if the value of the field is `ALT0`"]
#[inline(always)]
pub fn is_alt0(&self) -> bool {
*self == MUX_MODE_A::ALT0
}
#[doc = "Checks if the value of the field is `ALT1`"]
#[inline(always)]
pub fn is_alt1(&self) -> bool {
*self == MUX_MODE_A::ALT1
}
#[doc = "Checks if the value of the field is `ALT2`"]
#[inline(always)]
pub fn is_alt2(&self) -> bool {
*self == MUX_MODE_A::ALT2
}
#[doc = "Checks if the value of the field is `ALT3`"]
#[inline(always)]
pub fn is_alt3(&self) -> bool {
*self == MUX_MODE_A::ALT3
}
#[doc = "Checks if the value of the field is `ALT4`"]
#[inline(always)]
pub fn is_alt4(&self) -> bool {
*self == MUX_MODE_A::ALT4
}
#[doc = "Checks if the value of the field is `ALT5`"]
#[inline(always)]
pub fn is_alt5(&self) -> bool {
*self == MUX_MODE_A::ALT5
}
#[doc = "Checks if the value of the field is `ALT8`"]
#[inline(always)]
pub fn is_alt8(&self) -> bool {
*self == MUX_MODE_A::ALT8
}
#[doc = "Checks if the value of the field is `ALT9`"]
#[inline(always)]
pub fn is_alt9(&self) -> bool {
*self == MUX_MODE_A::ALT9
}
}
#[doc = "Write proxy for field `MUX_MODE`"]
pub struct MUX_MODE_W<'a> {
w: &'a mut W,
}
impl<'a> MUX_MODE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: MUX_MODE_A) -> &'a mut W {
unsafe { self.bits(variant.into()) }
}
#[doc = "Select mux mode: ALT0 mux port: USDHC1_DATA1 of instance: usdhc1"]
#[inline(always)]
pub fn alt0(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT0)
}
#[doc = "Select mux mode: ALT1 mux port: FLEXPWM1_PWMB01 of instance: flexpwm1"]
#[inline(always)]
pub fn alt1(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT1)
}
#[doc = "Select mux mode: ALT2 mux port: LPUART8_RTS_B of instance: lpuart8"]
#[inline(always)]
pub fn alt2(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT2)
}
#[doc = "Select mux mode: ALT3 mux port: XBAR1_INOUT07 of instance: xbar1"]
#[inline(always)]
pub fn alt3(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT3)
}
#[doc = "Select mux mode: ALT4 mux port: LPSPI1_SDI of instance: lpspi1"]
#[inline(always)]
pub fn alt4(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT4)
}
#[doc = "Select mux mode: ALT5 mux port: GPIO3_IO15 of instance: gpio3"]
#[inline(always)]
pub fn alt5(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT5)
}
#[doc = "Select mux mode: ALT8 mux port: ENET2_RDATA00 of instance: enet2"]
#[inline(always)]
pub fn alt8(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT8)
}
#[doc = "Select mux mode: ALT9 mux port: SEMC_CLK6 of instance: semc"]
#[inline(always)]
pub fn alt9(self) -> &'a mut W {
self.variant(MUX_MODE_A::ALT9)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
#[doc = "Software Input On Field.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SION_A {
#[doc = "0: Input Path is determined by functionality"]
DISABLED = 0,
#[doc = "1: Force input path of pad GPIO_SD_B0_03"]
ENABLED = 1,
}
impl From<SION_A> for bool {
#[inline(always)]
fn from(variant: SION_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `SION`"]
pub type SION_R = crate::R<bool, SION_A>;
impl SION_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SION_A {
match self.bits {
false => SION_A::DISABLED,
true => SION_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
*self == SION_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
*self == SION_A::ENABLED
}
}
#[doc = "Write proxy for field `SION`"]
pub struct SION_W<'a> {
w: &'a mut W,
}
impl<'a> SION_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SION_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Input Path is determined by functionality"]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(SION_A::DISABLED)
}
#[doc = "Force input path of pad GPIO_SD_B0_03"]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(SION_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - MUX Mode Select Field."]
#[inline(always)]
pub fn mux_mode(&self) -> MUX_MODE_R {
MUX_MODE_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 4 - Software Input On Field."]
#[inline(always)]
pub fn sion(&self) -> SION_R {
SION_R::new(((self.bits >> 4) & 0x01) != 0)
}
}
impl W {
#[doc = "Bits 0:3 - MUX Mode Select Field."]
#[inline(always)]
pub fn mux_mode(&mut self) -> MUX_MODE_W {
MUX_MODE_W { w: self }
}
#[doc = "Bit 4 - Software Input On Field."]
#[inline(always)]
pub fn sion(&mut self) -> SION_W {
SION_W { w: self }
}
}
| 32.003906 | 87 | 0.572562 |
724272d0298df1d3a5c1a71853621539fdc62ce9 | 2,487 | use std::ptr;
use log::debug;
use inventory::Collect;
use inventory::submit;
use inventory::iter;
use inventory::Registry;
use crate::Property;
use crate::val::JsExports;
use crate::sys::napi_value;
use crate::sys::napi_env;
use crate::NjError;
type ClassCallback = fn(&mut JsExports) -> Result<(), NjError>;
enum NapiRegister {
Property(Property),
Callback(ClassCallback),
}
impl Collect for NapiRegister {
fn registry() -> &'static Registry<Self> {
static REGISTRY: Registry<NapiRegister> = Registry::new();
®ISTRY
}
}
/// submit property for including in global registry
pub fn submit_property(value: Property) {
submit::<NapiRegister>(NapiRegister::Property(value))
}
pub fn submit_register_callback(callback: ClassCallback) {
submit::<NapiRegister>(NapiRegister::Callback(callback));
}
#[no_mangle]
pub extern "C" fn init_modules(env: napi_env, exports: napi_value) -> napi_value {
debug!("initializing modules");
let mut js_exports = JsExports::new(env, exports);
let mut prop_builder = js_exports.prop_builder();
for register in iter::<NapiRegister> {
match register {
NapiRegister::Property(property) => {
debug!("registering property: {:#?}", property);
prop_builder.mut_append(property.to_owned());
}
NapiRegister::Callback(callback) => {
debug!("invoking register callback");
if let Err(err) = callback(&mut js_exports) {
panic!("error invoking JS callback: {}", err);
}
}
}
}
js_exports
.define_property(prop_builder)
.expect("property should not fail");
exports
}
#[crate::ctor]
fn init_module() {
use crate::c_str;
use crate::sys::NAPI_VERSION;
use crate::sys::napi_module;
use crate::sys::napi_module_register;
static mut _MODULE: napi_module = napi_module {
nm_version: NAPI_VERSION as i32,
nm_flags: 0,
nm_filename: c_str!("lib.rs").as_ptr() as *const ::std::os::raw::c_char,
nm_register_func: Some(init_modules),
nm_modname: c_str!("rust_module").as_ptr() as *const ::std::os::raw::c_char,
nm_priv: ptr::null_mut(),
reserved: [
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
ptr::null_mut(),
],
};
unsafe {
napi_module_register(&mut _MODULE);
}
}
| 26.741935 | 84 | 0.615199 |
f712a2eedb7e9cdb095ad83c20fb752ee11ba689 | 141 | pub fn main() {
const z: &'static isize = {
static p: isize = 3;
&p //~ ERROR constants cannot refer to statics
};
}
| 20.142857 | 54 | 0.524823 |
c1292a282fffded79ef70ee6358be448ae5ba987 | 11,304 | #[doc = "Register `INTENSET` reader"]
pub struct R(crate::R<INTENSET_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<INTENSET_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<INTENSET_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<INTENSET_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `INTENSET` writer"]
pub struct W(crate::W<INTENSET_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<INTENSET_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<INTENSET_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<INTENSET_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Write '1' to enable interrupt for event USBDETECTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBDETECTED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<USBDETECTED_A> for bool {
#[inline(always)]
fn from(variant: USBDETECTED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBDETECTED` reader - Write '1' to enable interrupt for event USBDETECTED"]
pub struct USBDETECTED_R(crate::FieldReader<bool, USBDETECTED_A>);
impl USBDETECTED_R {
pub(crate) fn new(bits: bool) -> Self {
USBDETECTED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USBDETECTED_A {
match self.bits {
false => USBDETECTED_A::DISABLED,
true => USBDETECTED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == USBDETECTED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == USBDETECTED_A::ENABLED
}
}
impl core::ops::Deref for USBDETECTED_R {
type Target = crate::FieldReader<bool, USBDETECTED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to enable interrupt for event USBDETECTED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBDETECTED_AW {
#[doc = "1: Enable"]
SET = 1,
}
impl From<USBDETECTED_AW> for bool {
#[inline(always)]
fn from(variant: USBDETECTED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBDETECTED` writer - Write '1' to enable interrupt for event USBDETECTED"]
pub struct USBDETECTED_W<'a> {
w: &'a mut W,
}
impl<'a> USBDETECTED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USBDETECTED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Enable"]
#[inline(always)]
pub fn set(self) -> &'a mut W {
self.variant(USBDETECTED_AW::SET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
#[doc = "Write '1' to enable interrupt for event USBREMOVED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBREMOVED_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<USBREMOVED_A> for bool {
#[inline(always)]
fn from(variant: USBREMOVED_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBREMOVED` reader - Write '1' to enable interrupt for event USBREMOVED"]
pub struct USBREMOVED_R(crate::FieldReader<bool, USBREMOVED_A>);
impl USBREMOVED_R {
pub(crate) fn new(bits: bool) -> Self {
USBREMOVED_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USBREMOVED_A {
match self.bits {
false => USBREMOVED_A::DISABLED,
true => USBREMOVED_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == USBREMOVED_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == USBREMOVED_A::ENABLED
}
}
impl core::ops::Deref for USBREMOVED_R {
type Target = crate::FieldReader<bool, USBREMOVED_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to enable interrupt for event USBREMOVED\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBREMOVED_AW {
#[doc = "1: Enable"]
SET = 1,
}
impl From<USBREMOVED_AW> for bool {
#[inline(always)]
fn from(variant: USBREMOVED_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBREMOVED` writer - Write '1' to enable interrupt for event USBREMOVED"]
pub struct USBREMOVED_W<'a> {
w: &'a mut W,
}
impl<'a> USBREMOVED_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USBREMOVED_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Enable"]
#[inline(always)]
pub fn set(self) -> &'a mut W {
self.variant(USBREMOVED_AW::SET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | ((value as u32 & 0x01) << 1);
self.w
}
}
#[doc = "Write '1' to enable interrupt for event USBPWRRDY\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBPWRRDY_A {
#[doc = "0: Read: Disabled"]
DISABLED = 0,
#[doc = "1: Read: Enabled"]
ENABLED = 1,
}
impl From<USBPWRRDY_A> for bool {
#[inline(always)]
fn from(variant: USBPWRRDY_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBPWRRDY` reader - Write '1' to enable interrupt for event USBPWRRDY"]
pub struct USBPWRRDY_R(crate::FieldReader<bool, USBPWRRDY_A>);
impl USBPWRRDY_R {
pub(crate) fn new(bits: bool) -> Self {
USBPWRRDY_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USBPWRRDY_A {
match self.bits {
false => USBPWRRDY_A::DISABLED,
true => USBPWRRDY_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == USBPWRRDY_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == USBPWRRDY_A::ENABLED
}
}
impl core::ops::Deref for USBPWRRDY_R {
type Target = crate::FieldReader<bool, USBPWRRDY_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Write '1' to enable interrupt for event USBPWRRDY\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBPWRRDY_AW {
#[doc = "1: Enable"]
SET = 1,
}
impl From<USBPWRRDY_AW> for bool {
#[inline(always)]
fn from(variant: USBPWRRDY_AW) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `USBPWRRDY` writer - Write '1' to enable interrupt for event USBPWRRDY"]
pub struct USBPWRRDY_W<'a> {
w: &'a mut W,
}
impl<'a> USBPWRRDY_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USBPWRRDY_AW) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Enable"]
#[inline(always)]
pub fn set(self) -> &'a mut W {
self.variant(USBPWRRDY_AW::SET)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | ((value as u32 & 0x01) << 2);
self.w
}
}
impl R {
#[doc = "Bit 0 - Write '1' to enable interrupt for event USBDETECTED"]
#[inline(always)]
pub fn usbdetected(&self) -> USBDETECTED_R {
USBDETECTED_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Write '1' to enable interrupt for event USBREMOVED"]
#[inline(always)]
pub fn usbremoved(&self) -> USBREMOVED_R {
USBREMOVED_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Write '1' to enable interrupt for event USBPWRRDY"]
#[inline(always)]
pub fn usbpwrrdy(&self) -> USBPWRRDY_R {
USBPWRRDY_R::new(((self.bits >> 2) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Write '1' to enable interrupt for event USBDETECTED"]
#[inline(always)]
pub fn usbdetected(&mut self) -> USBDETECTED_W {
USBDETECTED_W { w: self }
}
#[doc = "Bit 1 - Write '1' to enable interrupt for event USBREMOVED"]
#[inline(always)]
pub fn usbremoved(&mut self) -> USBREMOVED_W {
USBREMOVED_W { w: self }
}
#[doc = "Bit 2 - Write '1' to enable interrupt for event USBPWRRDY"]
#[inline(always)]
pub fn usbpwrrdy(&mut self) -> USBPWRRDY_W {
USBPWRRDY_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Enable interrupt\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [intenset](index.html) module"]
pub struct INTENSET_SPEC;
impl crate::RegisterSpec for INTENSET_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [intenset::R](R) reader structure"]
impl crate::Readable for INTENSET_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [intenset::W](W) writer structure"]
impl crate::Writable for INTENSET_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets INTENSET to value 0"]
impl crate::Resettable for INTENSET_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 30.80109 | 405 | 0.593064 |
bf116e0661a8b3d7d630af306d74f149293709ee | 17,463 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::DIEP2_CTL {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct MPSR {
bits: u16,
}
impl MPSR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u16 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct USBACTEPR {
bits: bool,
}
impl USBACTEPR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DPIDEOFR {
bits: bool,
}
impl DPIDEOFR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NAKSTSR {
bits: bool,
}
impl NAKSTSR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = "Possible values of the field `EPTYPE`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum EPTYPER {
#[doc = "Control Endpoint."]
CONTROL,
#[doc = "Isochronous Endpoint."]
ISO,
#[doc = "Bulk Endpoint."]
BULK,
#[doc = "Interrupt Endpoint."]
INT,
}
impl EPTYPER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
match *self {
EPTYPER::CONTROL => 0,
EPTYPER::ISO => 1,
EPTYPER::BULK => 2,
EPTYPER::INT => 3,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: u8) -> EPTYPER {
match value {
0 => EPTYPER::CONTROL,
1 => EPTYPER::ISO,
2 => EPTYPER::BULK,
3 => EPTYPER::INT,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `CONTROL`"]
#[inline]
pub fn is_control(&self) -> bool {
*self == EPTYPER::CONTROL
}
#[doc = "Checks if the value of the field is `ISO`"]
#[inline]
pub fn is_iso(&self) -> bool {
*self == EPTYPER::ISO
}
#[doc = "Checks if the value of the field is `BULK`"]
#[inline]
pub fn is_bulk(&self) -> bool {
*self == EPTYPER::BULK
}
#[doc = "Checks if the value of the field is `INT`"]
#[inline]
pub fn is_int(&self) -> bool {
*self == EPTYPER::INT
}
}
#[doc = r" Value of the field"]
pub struct STALLR {
bits: bool,
}
impl STALLR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TXFNUMR {
bits: u8,
}
impl TXFNUMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct EPDISR {
bits: bool,
}
impl EPDISR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct EPENAR {
bits: bool,
}
impl EPENAR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _MPSW<'a> {
w: &'a mut W,
}
impl<'a> _MPSW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
const MASK: u16 = 2047;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _USBACTEPW<'a> {
w: &'a mut W,
}
impl<'a> _USBACTEPW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `EPTYPE`"]
pub enum EPTYPEW {
#[doc = "Control Endpoint."]
CONTROL,
#[doc = "Isochronous Endpoint."]
ISO,
#[doc = "Bulk Endpoint."]
BULK,
#[doc = "Interrupt Endpoint."]
INT,
}
impl EPTYPEW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> u8 {
match *self {
EPTYPEW::CONTROL => 0,
EPTYPEW::ISO => 1,
EPTYPEW::BULK => 2,
EPTYPEW::INT => 3,
}
}
}
#[doc = r" Proxy"]
pub struct _EPTYPEW<'a> {
w: &'a mut W,
}
impl<'a> _EPTYPEW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: EPTYPEW) -> &'a mut W {
{
self.bits(variant._bits())
}
}
#[doc = "Control Endpoint."]
#[inline]
pub fn control(self) -> &'a mut W {
self.variant(EPTYPEW::CONTROL)
}
#[doc = "Isochronous Endpoint."]
#[inline]
pub fn iso(self) -> &'a mut W {
self.variant(EPTYPEW::ISO)
}
#[doc = "Bulk Endpoint."]
#[inline]
pub fn bulk(self) -> &'a mut W {
self.variant(EPTYPEW::BULK)
}
#[doc = "Interrupt Endpoint."]
#[inline]
pub fn int(self) -> &'a mut W {
self.variant(EPTYPEW::INT)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _STALLW<'a> {
w: &'a mut W,
}
impl<'a> _STALLW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TXFNUMW<'a> {
w: &'a mut W,
}
impl<'a> _TXFNUMW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CNAKW<'a> {
w: &'a mut W,
}
impl<'a> _CNAKW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 26;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SNAKW<'a> {
w: &'a mut W,
}
impl<'a> _SNAKW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 27;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SETD0PIDEFW<'a> {
w: &'a mut W,
}
impl<'a> _SETD0PIDEFW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 28;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _SETD1PIDOFW<'a> {
w: &'a mut W,
}
impl<'a> _SETD1PIDOFW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 29;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _EPDISW<'a> {
w: &'a mut W,
}
impl<'a> _EPDISW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 30;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _EPENAW<'a> {
w: &'a mut W,
}
impl<'a> _EPENAW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 31;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:10 - Maximum Packet Size"]
#[inline]
pub fn mps(&self) -> MPSR {
let bits = {
const MASK: u16 = 2047;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u16
};
MPSR { bits }
}
#[doc = "Bit 15 - USB Active Endpoint"]
#[inline]
pub fn usbactep(&self) -> USBACTEPR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USBACTEPR { bits }
}
#[doc = "Bit 16 - Endpoint Data PID / Even or Odd Frame"]
#[inline]
pub fn dpideof(&self) -> DPIDEOFR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) != 0
};
DPIDEOFR { bits }
}
#[doc = "Bit 17 - NAK Status"]
#[inline]
pub fn naksts(&self) -> NAKSTSR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NAKSTSR { bits }
}
#[doc = "Bits 18:19 - Endpoint Type"]
#[inline]
pub fn eptype(&self) -> EPTYPER {
EPTYPER::_from({
const MASK: u8 = 3;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) as u8
})
}
#[doc = "Bit 21 - Handshake"]
#[inline]
pub fn stall(&self) -> STALLR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
};
STALLR { bits }
}
#[doc = "Bits 22:25 - TxFIFO Number"]
#[inline]
pub fn txfnum(&self) -> TXFNUMR {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) as u8
};
TXFNUMR { bits }
}
#[doc = "Bit 30 - Endpoint Disable"]
#[inline]
pub fn epdis(&self) -> EPDISR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 30;
((self.bits >> OFFSET) & MASK as u32) != 0
};
EPDISR { bits }
}
#[doc = "Bit 31 - Endpoint Enable"]
#[inline]
pub fn epena(&self) -> EPENAR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 31;
((self.bits >> OFFSET) & MASK as u32) != 0
};
EPENAR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:10 - Maximum Packet Size"]
#[inline]
pub fn mps(&mut self) -> _MPSW {
_MPSW { w: self }
}
#[doc = "Bit 15 - USB Active Endpoint"]
#[inline]
pub fn usbactep(&mut self) -> _USBACTEPW {
_USBACTEPW { w: self }
}
#[doc = "Bits 18:19 - Endpoint Type"]
#[inline]
pub fn eptype(&mut self) -> _EPTYPEW {
_EPTYPEW { w: self }
}
#[doc = "Bit 21 - Handshake"]
#[inline]
pub fn stall(&mut self) -> _STALLW {
_STALLW { w: self }
}
#[doc = "Bits 22:25 - TxFIFO Number"]
#[inline]
pub fn txfnum(&mut self) -> _TXFNUMW {
_TXFNUMW { w: self }
}
#[doc = "Bit 26 - Clear NAK"]
#[inline]
pub fn cnak(&mut self) -> _CNAKW {
_CNAKW { w: self }
}
#[doc = "Bit 27 - Set NAK"]
#[inline]
pub fn snak(&mut self) -> _SNAKW {
_SNAKW { w: self }
}
#[doc = "Bit 28 - Set DATA0 PID / Even Frame"]
#[inline]
pub fn setd0pidef(&mut self) -> _SETD0PIDEFW {
_SETD0PIDEFW { w: self }
}
#[doc = "Bit 29 - Set DATA1 PID / Odd Frame"]
#[inline]
pub fn setd1pidof(&mut self) -> _SETD1PIDOFW {
_SETD1PIDOFW { w: self }
}
#[doc = "Bit 30 - Endpoint Disable"]
#[inline]
pub fn epdis(&mut self) -> _EPDISW {
_EPDISW { w: self }
}
#[doc = "Bit 31 - Endpoint Enable"]
#[inline]
pub fn epena(&mut self) -> _EPENAW {
_EPENAW { w: self }
}
}
| 25.199134 | 61 | 0.494073 |
8f5f9e61f7aaf784849a900402ccb9b14411caae | 2,128 | #[doc = "Register `TIMER0_CNT` reader"]
pub struct R(crate::R<TIMER0_CNT_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<TIMER0_CNT_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<TIMER0_CNT_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<TIMER0_CNT_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Field `CNT` reader - Timer Data Register\nRead this register to get CNT value. For example:\nIf EXTCNTEN (TIMERx_CTL\\[24\\]) is 0, user can read CNT value for getting current 24-bit counter value.\nIf EXTCNTEN (TIMERx_CTL\\[24\\]) is 1, user can read CNT value for getting current 24-bit event input counter value."]
pub struct CNT_R(crate::FieldReader<u32, u32>);
impl CNT_R {
pub(crate) fn new(bits: u32) -> Self {
CNT_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for CNT_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl R {
#[doc = "Bits 0:23 - Timer Data Register Read this register to get CNT value. For example: If EXTCNTEN (TIMERx_CTL\\[24\\]) is 0, user can read CNT value for getting current 24-bit counter value. If EXTCNTEN (TIMERx_CTL\\[24\\]) is 1, user can read CNT value for getting current 24-bit event input counter value."]
#[inline(always)]
pub fn cnt(&self) -> CNT_R {
CNT_R::new((self.bits & 0x00ff_ffff) as u32)
}
}
#[doc = "Timer0 Data Register\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [timer0_cnt](index.html) module"]
pub struct TIMER0_CNT_SPEC;
impl crate::RegisterSpec for TIMER0_CNT_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [timer0_cnt::R](R) reader structure"]
impl crate::Readable for TIMER0_CNT_SPEC {
type Reader = R;
}
#[doc = "`reset()` method sets TIMER0_CNT to value 0"]
impl crate::Resettable for TIMER0_CNT_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 40.150943 | 326 | 0.661654 |
3ac4e70a8980dd58ee25a7fcbe1f129faf4f0c6a | 527 | use syn::{Error, Ident, Type};
use crate::overrides::Overrides;
pub struct Field {
pub name: String,
pub ident: Ident,
pub type_: Type,
}
impl Field {
pub fn parse(raw: &syn::Field) -> Result<Field, Error> {
let overrides = Overrides::extract(&raw.attrs)?;
let ident = raw.ident.as_ref().unwrap().clone();
Ok(Field {
name: overrides.name.unwrap_or_else(|| ident.to_string()),
ident,
type_: raw.ty.clone(),
})
}
}
| 22.913043 | 71 | 0.540797 |
bf3ff4f721e9b5af4420c43e05558f81a946fa97 | 3,248 | use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vfmsub213sd_1() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM1)), operand3: Some(Direct(XMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 241, 171, 232], OperandSize::Dword)
}
fn vfmsub213sd_2() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM7)), operand3: Some(Indirect(ECX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 193, 171, 33], OperandSize::Dword)
}
fn vfmsub213sd_3() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 193, 171, 251], OperandSize::Qword)
}
fn vfmsub213sd_4() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM1)), operand3: Some(IndirectDisplaced(RAX, 740266477, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 241, 171, 144, 237, 145, 31, 44], OperandSize::Qword)
}
fn vfmsub213sd_5() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM3)), operand3: Some(Direct(XMM0)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Down), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 242, 229, 185, 171, 240], OperandSize::Dword)
}
fn vfmsub213sd_6() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM0)), operand3: Some(IndirectDisplaced(ESI, 1362542598, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 242, 253, 139, 171, 150, 6, 192, 54, 81], OperandSize::Dword)
}
fn vfmsub213sd_7() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM28)), operand2: Some(Direct(XMM29)), operand3: Some(Direct(XMM24)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Down), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 2, 149, 177, 171, 224], OperandSize::Qword)
}
fn vfmsub213sd_8() {
run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SD, operand1: Some(Direct(XMM27)), operand2: Some(Direct(XMM7)), operand3: Some(Indirect(RBX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 98, 197, 143, 171, 27], OperandSize::Qword)
}
| 81.2 | 413 | 0.716133 |
bb281cde9bb1faf398ca81029ce0b51fc597cd44 | 26,746 | use std::marker::PhantomData;
#[cfg(feature = "serialize")]
use {
rustc_hash::FxHashSet,
serde::ser::{self, Serialize, SerializeMap, SerializeSeq, Serializer},
std::{cell::RefCell, os::raw::c_void, result::Result as StdResult},
};
use crate::error::{Error, Result};
use crate::ffi;
use crate::function::Function;
use crate::types::{Integer, LuaRef};
use crate::util::{assert_stack, check_stack, StackGuard};
use crate::value::{FromLua, FromLuaMulti, Nil, ToLua, ToLuaMulti, Value};
#[cfg(feature = "async")]
use {futures_core::future::LocalBoxFuture, futures_util::future};
/// Handle to an internal Lua table.
#[derive(Clone, Debug)]
pub struct Table<'lua>(pub(crate) LuaRef<'lua>);
#[allow(clippy::len_without_is_empty)]
impl<'lua> Table<'lua> {
/// Sets a key-value pair in the table.
///
/// If the value is `nil`, this will effectively remove the pair.
///
/// This might invoke the `__newindex` metamethod. Use the [`raw_set`] method if that is not
/// desired.
///
/// # Examples
///
/// Export a value as a global to make it usable from Lua:
///
/// ```
/// # use mlua::{Lua, Result};
/// # fn main() -> Result<()> {
/// # let lua = Lua::new();
/// let globals = lua.globals();
///
/// globals.set("assertions", cfg!(debug_assertions))?;
///
/// lua.load(r#"
/// if assertions == true then
/// -- ...
/// elseif assertions == false then
/// -- ...
/// else
/// error("assertions neither on nor off?")
/// end
/// "#).exec()?;
/// # Ok(())
/// # }
/// ```
///
/// [`raw_set`]: #method.raw_set
pub fn set<K: ToLua<'lua>, V: ToLua<'lua>>(&self, key: K, value: V) -> Result<()> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = value.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 5)?;
lua.push_ref(&self.0);
lua.push_value(key)?;
lua.push_value(value)?;
protect_lua!(lua.state, 3, 0, fn(state) ffi::lua_settable(state, -3))
}
}
/// Gets the value associated to `key` from the table.
///
/// If no value is associated to `key`, returns the `nil` value.
///
/// This might invoke the `__index` metamethod. Use the [`raw_get`] method if that is not
/// desired.
///
/// # Examples
///
/// Query the version of the Lua interpreter:
///
/// ```
/// # use mlua::{Lua, Result};
/// # fn main() -> Result<()> {
/// # let lua = Lua::new();
/// let globals = lua.globals();
///
/// let version: String = globals.get("_VERSION")?;
/// println!("Lua version: {}", version);
/// # Ok(())
/// # }
/// ```
///
/// [`raw_get`]: #method.raw_get
pub fn get<K: ToLua<'lua>, V: FromLua<'lua>>(&self, key: K) -> Result<V> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 4)?;
lua.push_ref(&self.0);
lua.push_value(key)?;
protect_lua!(lua.state, 2, 1, fn(state) ffi::lua_gettable(state, -2))?;
lua.pop_value()
};
V::from_lua(value, lua)
}
/// Checks whether the table contains a non-nil value for `key`.
pub fn contains_key<K: ToLua<'lua>>(&self, key: K) -> Result<bool> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 4)?;
lua.push_ref(&self.0);
lua.push_value(key)?;
protect_lua!(lua.state, 2, 1, fn(state) ffi::lua_gettable(state, -2))?;
Ok(ffi::lua_isnil(lua.state, -1) == 0)
}
}
/// Compares two tables for equality.
///
/// Tables are compared by reference first.
/// If they are not primitively equals, then mlua will try to invoke the `__eq` metamethod.
/// mlua will check `self` first for the metamethod, then `other` if not found.
///
/// # Examples
///
/// Compare two tables using `__eq` metamethod:
///
/// ```
/// # use mlua::{Lua, Result, Table};
/// # fn main() -> Result<()> {
/// # let lua = Lua::new();
/// let table1 = lua.create_table()?;
/// table1.set(1, "value")?;
///
/// let table2 = lua.create_table()?;
/// table2.set(2, "value")?;
///
/// let always_equals_mt = lua.create_table()?;
/// always_equals_mt.set("__eq", lua.create_function(|_, (_t1, _t2): (Table, Table)| Ok(true))?)?;
/// table2.set_metatable(Some(always_equals_mt));
///
/// assert!(table1.equals(&table1.clone())?);
/// assert!(table1.equals(&table2)?);
/// # Ok(())
/// # }
/// ```
pub fn equals<T: AsRef<Self>>(&self, other: T) -> Result<bool> {
let other = other.as_ref();
if self == other {
return Ok(true);
}
// Compare using __eq metamethod if exists
// First, check the self for the metamethod.
// If self does not define it, then check the other table.
if let Some(mt) = self.get_metatable() {
if mt.contains_key("__eq")? {
return mt
.get::<_, Function>("__eq")?
.call((self.clone(), other.clone()));
}
}
if let Some(mt) = other.get_metatable() {
if mt.contains_key("__eq")? {
return mt
.get::<_, Function>("__eq")?
.call((self.clone(), other.clone()));
}
}
Ok(false)
}
/// Sets a key-value pair without invoking metamethods.
pub fn raw_set<K: ToLua<'lua>, V: ToLua<'lua>>(&self, key: K, value: V) -> Result<()> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = value.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 5)?;
lua.push_ref(&self.0);
lua.push_value(key)?;
lua.push_value(value)?;
protect_lua!(lua.state, 3, 0, fn(state) ffi::lua_rawset(state, -3))
}
}
/// Gets the value associated to `key` without invoking metamethods.
pub fn raw_get<K: ToLua<'lua>, V: FromLua<'lua>>(&self, key: K) -> Result<V> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
let value = unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 3)?;
lua.push_ref(&self.0);
lua.push_value(key)?;
ffi::lua_rawget(lua.state, -2);
lua.pop_value()
};
V::from_lua(value, lua)
}
/// Inserts element value at position `idx` to the table, shifting up the elements from `table[idx]`.
/// The worst case complexity is O(n), where n is the table length.
pub fn raw_insert<V: ToLua<'lua>>(&self, idx: Integer, value: V) -> Result<()> {
let lua = self.0.lua;
let size = self.raw_len();
if idx < 1 || idx > size + 1 {
return Err(Error::RuntimeError("index out of bounds".to_string()));
}
let value = value.to_lua(lua)?;
unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 5)?;
lua.push_ref(&self.0);
lua.push_value(value)?;
protect_lua!(lua.state, 2, 0, |state| {
for i in (idx..=size).rev() {
// table[i+1] = table[i]
ffi::lua_rawgeti(state, -2, i);
ffi::lua_rawseti(state, -3, i + 1);
}
ffi::lua_rawseti(state, -2, idx)
})
}
}
/// Removes a key from the table.
///
/// If `key` is an integer, mlua shifts down the elements from `table[key+1]`,
/// and erases element `table[key]`. The complexity is O(n) in the worst case,
/// where n is the table length.
///
/// For other key types this is equivalent to setting `table[key] = nil`.
pub fn raw_remove<K: ToLua<'lua>>(&self, key: K) -> Result<()> {
let lua = self.0.lua;
let key = key.to_lua(lua)?;
match key {
Value::Integer(idx) => {
let size = self.raw_len();
if idx < 1 || idx > size {
return Err(Error::RuntimeError("index out of bounds".to_string()));
}
unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 4)?;
lua.push_ref(&self.0);
protect_lua!(lua.state, 1, 0, |state| {
for i in idx..size {
ffi::lua_rawgeti(state, -1, i + 1);
ffi::lua_rawseti(state, -2, i);
}
ffi::lua_pushnil(state);
ffi::lua_rawseti(state, -2, size);
})
}
}
_ => self.raw_set(key, Nil),
}
}
/// Returns the result of the Lua `#` operator.
///
/// This might invoke the `__len` metamethod. Use the [`raw_len`] method if that is not desired.
///
/// [`raw_len`]: #method.raw_len
pub fn len(&self) -> Result<Integer> {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 4)?;
lua.push_ref(&self.0);
protect_lua!(lua.state, 1, 0, |state| ffi::luaL_len(state, -1))
}
}
/// Returns the result of the Lua `#` operator, without invoking the `__len` metamethod.
pub fn raw_len(&self) -> Integer {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 1);
lua.push_ref(&self.0);
ffi::lua_rawlen(lua.state, -1) as Integer
}
}
/// Returns a reference to the metatable of this table, or `None` if no metatable is set.
///
/// Unlike the `getmetatable` Lua function, this method ignores the `__metatable` field.
pub fn get_metatable(&self) -> Option<Table<'lua>> {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 2);
lua.push_ref(&self.0);
if ffi::lua_getmetatable(lua.state, -1) == 0 {
None
} else {
Some(Table(lua.pop_ref()))
}
}
}
/// Sets or removes the metatable of this table.
///
/// If `metatable` is `None`, the metatable is removed (if no metatable is set, this does
/// nothing).
pub fn set_metatable(&self, metatable: Option<Table<'lua>>) {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 2);
lua.push_ref(&self.0);
if let Some(metatable) = metatable {
lua.push_ref(&metatable.0);
} else {
ffi::lua_pushnil(lua.state);
}
ffi::lua_setmetatable(lua.state, -2);
}
}
/// Consume this table and return an iterator over the pairs of the table.
///
/// This works like the Lua `pairs` function, but does not invoke the `__pairs` metamethod.
///
/// The pairs are wrapped in a [`Result`], since they are lazily converted to `K` and `V` types.
///
/// # Note
///
/// While this method consumes the `Table` object, it can not prevent code from mutating the
/// table while the iteration is in progress. Refer to the [Lua manual] for information about
/// the consequences of such mutation.
///
/// # Examples
///
/// Iterate over all globals:
///
/// ```
/// # use mlua::{Lua, Result, Value};
/// # fn main() -> Result<()> {
/// # let lua = Lua::new();
/// let globals = lua.globals();
///
/// for pair in globals.pairs::<Value, Value>() {
/// let (key, value) = pair?;
/// # let _ = (key, value); // used
/// // ...
/// }
/// # Ok(())
/// # }
/// ```
///
/// [`Result`]: crate::Result
/// [Lua manual]: http://www.lua.org/manual/5.4/manual.html#pdf-next
pub fn pairs<K: FromLua<'lua>, V: FromLua<'lua>>(self) -> TablePairs<'lua, K, V> {
TablePairs {
table: self.0,
key: Some(Nil),
_phantom: PhantomData,
}
}
/// Consume this table and return an iterator over all values in the sequence part of the table.
///
/// The iterator will yield all values `t[1]`, `t[2]`, and so on, until a `nil` value is
/// encountered. This mirrors the behavior of Lua's `ipairs` function and will invoke the
/// `__index` metamethod according to the usual rules. However, the deprecated `__ipairs`
/// metatable will not be called.
///
/// Just like [`pairs`], the values are wrapped in a [`Result`].
///
/// # Note
///
/// While this method consumes the `Table` object, it can not prevent code from mutating the
/// table while the iteration is in progress. Refer to the [Lua manual] for information about
/// the consequences of such mutation.
///
/// # Examples
///
/// ```
/// # use mlua::{Lua, Result, Table};
/// # fn main() -> Result<()> {
/// # let lua = Lua::new();
/// let my_table: Table = lua.load(r#"
/// {
/// [1] = 4,
/// [2] = 5,
/// [4] = 7,
/// key = 2
/// }
/// "#).eval()?;
///
/// let expected = [4, 5];
/// for (&expected, got) in expected.iter().zip(my_table.sequence_values::<u32>()) {
/// assert_eq!(expected, got?);
/// }
/// # Ok(())
/// # }
/// ```
///
/// [`pairs`]: #method.pairs
/// [`Result`]: crate::Result
/// [Lua manual]: http://www.lua.org/manual/5.4/manual.html#pdf-next
pub fn sequence_values<V: FromLua<'lua>>(self) -> TableSequence<'lua, V> {
TableSequence {
table: self.0,
index: Some(1),
len: None,
raw: false,
_phantom: PhantomData,
}
}
/// Consume this table and return an iterator over all values in the sequence part of the table.
///
/// Unlike the `sequence_values`, does not invoke `__index` metamethod when iterating.
///
/// [`sequence_values`]: #method.sequence_values
pub fn raw_sequence_values<V: FromLua<'lua>>(self) -> TableSequence<'lua, V> {
TableSequence {
table: self.0,
index: Some(1),
len: None,
raw: true,
_phantom: PhantomData,
}
}
/// C++ side implementation of table size calculating function.
/// For more info on function see <https://lua-api.factorio.com/latest/Libraries.html>,
/// `table_size()` section.
/// Factorio uses this with `fuzzy` set to false
#[cfg(feature = "lua-factorio")]
pub fn table_size(&self, fuzzy: bool) -> Integer {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 1);
lua.push_ref(&self.0);
ffi::lua_tablesize(lua.state, -1, fuzzy as i32) as Integer
}
}
#[cfg(any(feature = "async", feature = "serialize"))]
pub(crate) fn raw_sequence_values_by_len<V: FromLua<'lua>>(
self,
len: Option<Integer>,
) -> TableSequence<'lua, V> {
let len = len.unwrap_or_else(|| self.raw_len());
TableSequence {
table: self.0,
index: Some(1),
len: Some(len),
raw: true,
_phantom: PhantomData,
}
}
#[cfg(feature = "serialize")]
pub(crate) fn is_array(&self) -> bool {
let lua = self.0.lua;
unsafe {
let _sg = StackGuard::new(lua.state);
assert_stack(lua.state, 3);
lua.push_ref(&self.0);
if ffi::lua_getmetatable(lua.state, -1) == 0 {
return false;
}
crate::serde::push_array_metatable(lua.state);
ffi::lua_rawequal(lua.state, -1, -2) != 0
}
}
}
impl<'lua> PartialEq for Table<'lua> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl<'lua> AsRef<Table<'lua>> for Table<'lua> {
#[inline]
fn as_ref(&self) -> &Self {
self
}
}
/// An extension trait for `Table`s that provides a variety of convenient functionality.
pub trait TableExt<'lua> {
/// Calls the table as function assuming it has `__call` metamethod.
///
/// The metamethod is called with the table as its first argument, followed by the passed arguments.
fn call<A, R>(&self, args: A) -> Result<R>
where
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua>;
/// Asynchronously calls the table as function assuming it has `__call` metamethod.
///
/// The metamethod is called with the table as its first argument, followed by the passed arguments.
#[cfg(feature = "async")]
#[cfg_attr(docsrs, doc(cfg(feature = "async")))]
fn call_async<'fut, A, R>(&self, args: A) -> LocalBoxFuture<'fut, Result<R>>
where
'lua: 'fut,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua> + 'fut;
/// Gets the function associated to `key` from the table and executes it,
/// passing the table itself along with `args` as function arguments.
///
/// This is a shortcut for
/// `table.get::<_, Function>(key)?.call((table.clone(), arg1, ..., argN))`
///
/// This might invoke the `__index` metamethod.
fn call_method<K, A, R>(&self, key: K, args: A) -> Result<R>
where
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua>;
/// Gets the function associated to `key` from the table and executes it,
/// passing `args` as function arguments.
///
/// This is a shortcut for
/// `table.get::<_, Function>(key)?.call(args)`
///
/// This might invoke the `__index` metamethod.
fn call_function<K, A, R>(&self, key: K, args: A) -> Result<R>
where
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua>;
/// Gets the function associated to `key` from the table and asynchronously executes it,
/// passing the table itself along with `args` as function arguments and returning Future.
///
/// Requires `feature = "async"`
///
/// This might invoke the `__index` metamethod.
#[cfg(feature = "async")]
#[cfg_attr(docsrs, doc(cfg(feature = "async")))]
fn call_async_method<'fut, K, A, R>(&self, key: K, args: A) -> LocalBoxFuture<'fut, Result<R>>
where
'lua: 'fut,
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua> + 'fut;
/// Gets the function associated to `key` from the table and asynchronously executes it,
/// passing `args` as function arguments and returning Future.
///
/// Requires `feature = "async"`
///
/// This might invoke the `__index` metamethod.
#[cfg(feature = "async")]
#[cfg_attr(docsrs, doc(cfg(feature = "async")))]
fn call_async_function<'fut, K, A, R>(
&self,
key: K,
args: A,
) -> LocalBoxFuture<'fut, Result<R>>
where
'lua: 'fut,
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua> + 'fut;
}
impl<'lua> TableExt<'lua> for Table<'lua> {
fn call<A, R>(&self, args: A) -> Result<R>
where
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua>,
{
// Convert table to a function and call via pcall that respects the `__call` metamethod.
Function(self.0.clone()).call(args)
}
#[cfg(feature = "async")]
fn call_async<'fut, A, R>(&self, args: A) -> LocalBoxFuture<'fut, Result<R>>
where
'lua: 'fut,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua> + 'fut,
{
Function(self.0.clone()).call_async(args)
}
fn call_method<K, A, R>(&self, key: K, args: A) -> Result<R>
where
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua>,
{
let lua = self.0.lua;
let mut args = args.to_lua_multi(lua)?;
args.push_front(Value::Table(self.clone()));
self.get::<_, Function>(key)?.call(args)
}
fn call_function<K, A, R>(&self, key: K, args: A) -> Result<R>
where
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua>,
{
self.get::<_, Function>(key)?.call(args)
}
#[cfg(feature = "async")]
fn call_async_method<'fut, K, A, R>(&self, key: K, args: A) -> LocalBoxFuture<'fut, Result<R>>
where
'lua: 'fut,
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua> + 'fut,
{
let lua = self.0.lua;
let mut args = match args.to_lua_multi(lua) {
Ok(args) => args,
Err(e) => return Box::pin(future::err(e)),
};
args.push_front(Value::Table(self.clone()));
self.call_async_function(key, args)
}
#[cfg(feature = "async")]
fn call_async_function<'fut, K, A, R>(&self, key: K, args: A) -> LocalBoxFuture<'fut, Result<R>>
where
'lua: 'fut,
K: ToLua<'lua>,
A: ToLuaMulti<'lua>,
R: FromLuaMulti<'lua> + 'fut,
{
match self.get::<_, Function>(key) {
Ok(func) => func.call_async(args),
Err(e) => Box::pin(future::err(e)),
}
}
}
#[cfg(feature = "serialize")]
impl<'lua> Serialize for Table<'lua> {
fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error>
where
S: Serializer,
{
thread_local! {
static VISITED: RefCell<FxHashSet<*const c_void>> = RefCell::new(FxHashSet::default());
}
let lua = self.0.lua;
let ptr = unsafe { lua.ref_thread_exec(|refthr| ffi::lua_topointer(refthr, self.0.index)) };
let res = VISITED.with(|visited| {
{
let mut visited = visited.borrow_mut();
if visited.contains(&ptr) {
return Err(ser::Error::custom("recursive table detected"));
}
visited.insert(ptr);
}
let len = self.raw_len() as usize;
if len > 0 || self.is_array() {
let mut seq = serializer.serialize_seq(Some(len))?;
for v in self.clone().raw_sequence_values_by_len::<Value>(None) {
let v = v.map_err(serde::ser::Error::custom)?;
seq.serialize_element(&v)?;
}
return seq.end();
}
let mut map = serializer.serialize_map(None)?;
for kv in self.clone().pairs::<Value, Value>() {
let (k, v) = kv.map_err(serde::ser::Error::custom)?;
map.serialize_entry(&k, &v)?;
}
map.end()
});
VISITED.with(|visited| {
visited.borrow_mut().remove(&ptr);
});
res
}
}
/// An iterator over the pairs of a Lua table.
///
/// This struct is created by the [`Table::pairs`] method.
///
/// [`Table::pairs`]: crate::Table::pairs
pub struct TablePairs<'lua, K, V> {
table: LuaRef<'lua>,
key: Option<Value<'lua>>,
_phantom: PhantomData<(K, V)>,
}
impl<'lua, K, V> Iterator for TablePairs<'lua, K, V>
where
K: FromLua<'lua>,
V: FromLua<'lua>,
{
type Item = Result<(K, V)>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(prev_key) = self.key.take() {
let lua = self.table.lua;
let res = (|| unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 5)?;
lua.push_ref(&self.table);
lua.push_value(prev_key)?;
let next = protect_lua!(lua.state, 2, ffi::LUA_MULTRET, |state| {
ffi::lua_next(state, -2)
})?;
if next != 0 {
let value = lua.pop_value();
let key = lua.pop_value();
Ok(Some((
key.clone(),
K::from_lua(key, lua)?,
V::from_lua(value, lua)?,
)))
} else {
Ok(None)
}
})();
match res {
Ok(Some((key, ret_key, value))) => {
self.key = Some(key);
Some(Ok((ret_key, value)))
}
Ok(None) => None,
Err(e) => Some(Err(e)),
}
} else {
None
}
}
}
/// An iterator over the sequence part of a Lua table.
///
/// This struct is created by the [`Table::sequence_values`] method.
///
/// [`Table::sequence_values`]: crate::Table::sequence_values
pub struct TableSequence<'lua, V> {
table: LuaRef<'lua>,
index: Option<Integer>,
len: Option<Integer>,
raw: bool,
_phantom: PhantomData<V>,
}
impl<'lua, V> Iterator for TableSequence<'lua, V>
where
V: FromLua<'lua>,
{
type Item = Result<V>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(index) = self.index.take() {
let lua = self.table.lua;
let res = (|| unsafe {
let _sg = StackGuard::new(lua.state);
check_stack(lua.state, 1 + if self.raw { 0 } else { 3 })?;
lua.push_ref(&self.table);
let res = if self.raw {
ffi::lua_rawgeti(lua.state, -1, index)
} else {
protect_lua!(lua.state, 1, 1, |state| ffi::lua_geti(state, -1, index))?
};
match res {
ffi::LUA_TNIL if index > self.len.unwrap_or(0) => Ok(None),
_ => Ok(Some((index, lua.pop_value()))),
}
})();
match res {
Ok(Some((index, r))) => {
self.index = Some(index + 1);
Some(V::from_lua(r, lua))
}
Ok(None) => None,
Err(err) => Some(Err(err)),
}
} else {
None
}
}
}
| 32.224096 | 105 | 0.510805 |
28e490e2a892104c35cb41b9f11a1ece222a38f4 | 21,130 | // Copyright (c) Microsoft. All rights reserved.
/// A reference to an object stored in a slot.
pub struct Object<T> {
session: std::sync::Arc<crate::Session>,
handle: pkcs11_sys::CK_OBJECT_HANDLE,
_key: std::marker::PhantomData<T>,
}
impl<T> Object<T> {
pub(crate) fn new(
session: std::sync::Arc<crate::Session>,
handle: pkcs11_sys::CK_OBJECT_HANDLE,
) -> Self {
Object {
session,
handle,
_key: Default::default(),
}
}
}
impl Object<()> {
/// Use this key to sign the given digest and store the result into the given signature buffer.
pub fn sign(
&self,
digest: &[u8],
signature: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, SignError> {
unsafe {
// Signing with the key needs login
self.session.login().map_err(SignError::LoginFailed)?;
let signature_len = sign_inner(
&self.session,
self.handle,
pkcs11_sys::CKM_SHA256_HMAC,
digest,
signature,
)?;
Ok(signature_len)
}
}
}
impl Object<()> {
/// Use this key to verify the given digest has the given signature.
pub fn verify(&self, digest: &[u8], signature: &[u8]) -> Result<bool, VerifyError> {
unsafe {
// Verifying with the key needs login
self.session.login().map_err(VerifyError::LoginFailed)?;
let ok = verify_inner(
&self.session,
self.handle,
pkcs11_sys::CKM_SHA256_HMAC,
digest,
signature,
)?;
Ok(ok)
}
}
}
impl Object<()> {
pub fn encrypt(
&self,
iv: &[u8],
aad: &[u8],
plaintext: &[u8],
ciphertext: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, EncryptError> {
unsafe {
// Encrypting with the key needs login
self.session.login().map_err(EncryptError::LoginFailed)?;
let iv_len = std::convert::TryInto::try_into(iv.len()).expect("usize -> CK_ULONG");
let params = pkcs11_sys::CK_GCM_PARAMS {
pIv: iv.as_ptr(),
ulIvLen: iv_len,
ulIvBits: iv_len * 8,
pAAD: aad.as_ptr(),
ulAADLen: std::convert::TryInto::try_into(aad.len()).expect("usize -> CK_ULONG"),
ulTagBits: 16 * 8,
};
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism: pkcs11_sys::CKM_AES_GCM,
pParameter: (¶ms as *const pkcs11_sys::CK_GCM_PARAMS).cast(),
ulParameterLen: std::convert::TryInto::try_into(std::mem::size_of_val(¶ms))
.expect("usize -> CK_ULONG"),
};
let ciphertext_len = encrypt_inner(
&self.session,
self.handle,
&mechanism,
plaintext,
ciphertext,
)?;
Ok(ciphertext_len)
}
}
}
impl Object<()> {
pub fn decrypt(
&self,
iv: &[u8],
aad: &[u8],
ciphertext: &[u8],
plaintext: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, DecryptError> {
unsafe {
// Decrypting with the key needs login
self.session.login().map_err(DecryptError::LoginFailed)?;
let iv_len = std::convert::TryInto::try_into(iv.len()).expect("usize -> CK_ULONG");
let params = pkcs11_sys::CK_GCM_PARAMS {
pIv: iv.as_ptr(),
ulIvLen: iv_len,
ulIvBits: iv_len * 8,
pAAD: aad.as_ptr(),
ulAADLen: std::convert::TryInto::try_into(aad.len()).expect("usize -> CK_ULONG"),
ulTagBits: 16 * 8,
};
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism: pkcs11_sys::CKM_AES_GCM,
pParameter: (¶ms as *const pkcs11_sys::CK_GCM_PARAMS).cast(),
ulParameterLen: std::convert::TryInto::try_into(std::mem::size_of_val(¶ms))
.expect("usize -> CK_ULONG"),
};
let plaintext_len = decrypt_inner(
&self.session,
self.handle,
&mechanism,
ciphertext,
plaintext,
)?;
Ok(plaintext_len)
}
}
}
impl Object<openssl::ec::EcKey<openssl::pkey::Public>> {
/// Get the EC parameters of this EC public key object.
pub fn parameters(
&self,
) -> Result<openssl::ec::EcKey<openssl::pkey::Public>, GetKeyParametersError> {
unsafe {
let curve = get_attribute_value_byte_buf(
&self.session,
self,
pkcs11_sys::CKA_EC_PARAMS,
self.session.context.C_GetAttributeValue,
)?;
let curve = openssl2::EcCurve::from_oid_der(&curve)
.ok_or(GetKeyParametersError::UnrecognizedEcCurve(curve))?;
let curve = curve.as_nid();
let mut group = openssl::ec::EcGroup::from_curve_name(curve)
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
group.set_asn1_flag(openssl::ec::Asn1Flag::NAMED_CURVE);
// CKA_EC_POINT returns a DER encoded octet string representing the point.
//
// The octet string is in the RFC 5480 format which is exactly what EC_POINT_oct2point expected, so we just need to strip the DER type and length prefix.
let point = get_attribute_value_byte_buf(
&self.session,
self,
pkcs11_sys::CKA_EC_POINT,
self.session.context.C_GetAttributeValue,
)?;
let point = openssl_sys2::d2i_ASN1_OCTET_STRING(
std::ptr::null_mut(),
&mut point.as_ptr().cast(),
std::convert::TryInto::try_into(point.len()).expect("usize -> c_long"),
);
if point.is_null() {
return Err(GetKeyParametersError::MalformedEcPoint(
openssl::error::ErrorStack::get(),
));
}
let point: openssl::asn1::Asn1String =
foreign_types_shared::ForeignType::from_ptr(point);
let mut big_num_context = openssl::bn::BigNumContext::new()
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
let point =
openssl::ec::EcPoint::from_bytes(&group, point.as_slice(), &mut big_num_context)
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
let parameters =
openssl::ec::EcKey::<openssl::pkey::Public>::from_public_key(&group, &point)
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
Ok(parameters)
}
}
}
/// An error from getting the parameters of a key object.
#[derive(Debug)]
pub enum GetKeyParametersError {
ConvertToOpenssl(openssl::error::ErrorStack),
GetAttributeValueFailed(pkcs11_sys::CK_RV),
MalformedEcPoint(openssl::error::ErrorStack),
UnrecognizedEcCurve(Vec<u8>),
}
impl std::fmt::Display for GetKeyParametersError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
GetKeyParametersError::ConvertToOpenssl(_) => {
write!(f, "could not convert components to openssl types")
}
GetKeyParametersError::GetAttributeValueFailed(result) => {
write!(f, "C_GetAttributeValue failed with {}", result)
}
GetKeyParametersError::MalformedEcPoint(_) => {
write!(f, "could not parse the DER-encoded EC point")
}
GetKeyParametersError::UnrecognizedEcCurve(curve) => {
write!(f, "the EC point is using an unknown curve: {:?}", curve)
}
}
}
}
impl Object<openssl::rsa::Rsa<openssl::pkey::Public>> {
/// Get the RSA parameters of this RSA public key object.
pub fn parameters(
&self,
) -> Result<openssl::rsa::Rsa<openssl::pkey::Public>, GetKeyParametersError> {
unsafe {
let modulus = get_attribute_value_byte_buf(
&self.session,
self,
pkcs11_sys::CKA_MODULUS,
self.session.context.C_GetAttributeValue,
)?;
let modulus = openssl::bn::BigNum::from_slice(&modulus)
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
let public_exponent = get_attribute_value_byte_buf(
&self.session,
self,
pkcs11_sys::CKA_PUBLIC_EXPONENT,
self.session.context.C_GetAttributeValue,
)?;
let public_exponent = openssl::bn::BigNum::from_slice(&public_exponent)
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
let parameters = openssl::rsa::Rsa::<openssl::pkey::Public>::from_public_components(
modulus,
public_exponent,
)
.map_err(GetKeyParametersError::ConvertToOpenssl)?;
Ok(parameters)
}
}
}
impl std::error::Error for GetKeyParametersError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
GetKeyParametersError::ConvertToOpenssl(inner) => Some(inner),
GetKeyParametersError::GetAttributeValueFailed(_) => None,
GetKeyParametersError::MalformedEcPoint(inner) => Some(inner),
GetKeyParametersError::UnrecognizedEcCurve(_) => None,
}
}
}
impl Object<openssl::ec::EcKey<openssl::pkey::Private>> {
/// Use this key to sign the given digest and store the result into the given signature buffer.
pub fn sign(
&self,
digest: &[u8],
signature: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, SignError> {
unsafe {
// Signing with the private key needs login
self.session.login().map_err(SignError::LoginFailed)?;
let signature_len = sign_inner(
&self.session,
self.handle,
pkcs11_sys::CKM_ECDSA,
digest,
signature,
)?;
Ok(signature_len)
}
}
}
pub enum RsaSignMechanism {
Pkcs1,
X509,
}
impl Object<openssl::rsa::Rsa<openssl::pkey::Private>> {
/// Use this key to sign the given digest with the given mechanism type and store the result into the given signature buffer.
pub fn sign(
&self,
mechanism: &RsaSignMechanism,
digest: &[u8],
signature: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, SignError> {
unsafe {
// Signing with the private key needs login
self.session.login().map_err(SignError::LoginFailed)?;
let mechanism = match mechanism {
RsaSignMechanism::Pkcs1 => pkcs11_sys::CKM_RSA_PKCS,
RsaSignMechanism::X509 => pkcs11_sys::CKM_RSA_X509,
};
let signature_len =
sign_inner(&self.session, self.handle, mechanism, digest, signature)?;
Ok(signature_len)
}
}
}
impl Object<openssl::rsa::Rsa<openssl::pkey::Public>> {
/// Use this key to encrypt the given plaintext and store the result into the given ciphertext buffer.
pub fn encrypt(
&self,
mechanism: pkcs11_sys::CK_MECHANISM_TYPE,
plaintext: &[u8],
ciphertext: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, EncryptError> {
unsafe {
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism,
pParameter: std::ptr::null(),
ulParameterLen: 0,
};
let ciphertext_len = encrypt_inner(
&self.session,
self.handle,
&mechanism,
plaintext,
ciphertext,
)?;
Ok(ciphertext_len)
}
}
}
unsafe fn sign_inner(
session: &crate::Session,
handle: pkcs11_sys::CK_OBJECT_HANDLE,
mechanism: pkcs11_sys::CK_MECHANISM_TYPE,
digest: &[u8],
signature: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, SignError> {
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism,
pParameter: std::ptr::null(),
ulParameterLen: 0,
};
let result = (session.context.C_SignInit)(session.handle, &mechanism, handle);
if result != pkcs11_sys::CKR_OK {
return Err(SignError::SignInitFailed(result));
}
let original_signature_len =
std::convert::TryInto::try_into(signature.len()).expect("usize -> CK_ULONG");
let mut signature_len = original_signature_len;
let result = (session.context.C_Sign)(
session.handle,
digest.as_ptr(),
std::convert::TryInto::try_into(digest.len()).expect("usize -> CK_ULONG"),
signature.as_mut_ptr(),
&mut signature_len,
);
if result != pkcs11_sys::CKR_OK {
return Err(SignError::SignFailed(result));
}
assert!(signature_len <= original_signature_len);
Ok(signature_len)
}
#[derive(Debug)]
pub enum SignError {
LoginFailed(crate::LoginError),
SignInitFailed(pkcs11_sys::CK_RV),
SignFailed(pkcs11_sys::CK_RV),
}
impl std::fmt::Display for SignError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
SignError::LoginFailed(_) => f.write_str("could not log in to the token"),
SignError::SignInitFailed(result) => write!(f, "C_SignInit failed with {}", result),
SignError::SignFailed(result) => write!(f, "C_Sign failed with {}", result),
}
}
}
impl std::error::Error for SignError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
SignError::LoginFailed(inner) => Some(inner),
SignError::SignInitFailed(_) => None,
SignError::SignFailed(_) => None,
}
}
}
unsafe fn verify_inner(
session: &crate::Session,
handle: pkcs11_sys::CK_OBJECT_HANDLE,
mechanism: pkcs11_sys::CK_MECHANISM_TYPE,
digest: &[u8],
signature: &[u8],
) -> Result<bool, VerifyError> {
let mechanism = pkcs11_sys::CK_MECHANISM_IN {
mechanism,
pParameter: std::ptr::null(),
ulParameterLen: 0,
};
let result = (session.context.C_VerifyInit)(session.handle, &mechanism, handle);
if result != pkcs11_sys::CKR_OK {
return Err(VerifyError::VerifyInitFailed(result));
}
let result = (session.context.C_Verify)(
session.handle,
digest.as_ptr(),
std::convert::TryInto::try_into(digest.len()).expect("usize -> CK_ULONG"),
signature.as_ptr(),
std::convert::TryInto::try_into(signature.len()).expect("usize -> CK_ULONG"),
);
match result {
pkcs11_sys::CKR_OK => Ok(true),
pkcs11_sys::CKR_SIGNATURE_INVALID | pkcs11_sys::CKR_SIGNATURE_LEN_RANGE => Ok(false),
result => Err(VerifyError::VerifyFailed(result)),
}
}
#[derive(Debug)]
pub enum VerifyError {
LoginFailed(crate::LoginError),
VerifyInitFailed(pkcs11_sys::CK_RV),
VerifyFailed(pkcs11_sys::CK_RV),
}
impl std::fmt::Display for VerifyError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
VerifyError::LoginFailed(_) => f.write_str("could not log in to the token"),
VerifyError::VerifyInitFailed(result) => {
write!(f, "C_VerifyInit failed with {}", result)
}
VerifyError::VerifyFailed(result) => write!(f, "C_Verify failed with {}", result),
}
}
}
impl std::error::Error for VerifyError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
#[allow(clippy::match_same_arms)]
match self {
VerifyError::LoginFailed(inner) => Some(inner),
VerifyError::VerifyInitFailed(_) => None,
VerifyError::VerifyFailed(_) => None,
}
}
}
unsafe fn encrypt_inner(
session: &crate::Session,
handle: pkcs11_sys::CK_OBJECT_HANDLE,
mechanism: &pkcs11_sys::CK_MECHANISM_IN,
plaintext: &[u8],
ciphertext: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, EncryptError> {
let result = (session.context.C_EncryptInit)(session.handle, mechanism, handle);
if result != pkcs11_sys::CKR_OK {
return Err(EncryptError::EncryptInitFailed(result));
}
let original_ciphertext_len =
std::convert::TryInto::try_into(ciphertext.len()).expect("usize -> CK_ULONG");
let mut ciphertext_len = original_ciphertext_len;
let result = (session.context.C_Encrypt)(
session.handle,
plaintext.as_ptr(),
std::convert::TryInto::try_into(plaintext.len()).expect("usize -> CK_ULONG"),
ciphertext.as_mut_ptr(),
&mut ciphertext_len,
);
if result != pkcs11_sys::CKR_OK {
return Err(EncryptError::EncryptFailed(result));
}
assert!(ciphertext_len <= original_ciphertext_len);
Ok(ciphertext_len)
}
#[derive(Debug)]
pub enum EncryptError {
EncryptInitFailed(pkcs11_sys::CK_RV),
EncryptFailed(pkcs11_sys::CK_RV),
LoginFailed(crate::LoginError),
}
impl std::fmt::Display for EncryptError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EncryptError::EncryptInitFailed(result) => {
write!(f, "C_EncryptInit failed with {}", result)
}
EncryptError::EncryptFailed(result) => write!(f, "C_Encrypt failed with {}", result),
EncryptError::LoginFailed(_) => f.write_str("could not log in to the token"),
}
}
}
impl std::error::Error for EncryptError {}
unsafe fn decrypt_inner(
session: &crate::Session,
handle: pkcs11_sys::CK_OBJECT_HANDLE,
mechanism: &pkcs11_sys::CK_MECHANISM_IN,
ciphertext: &[u8],
plaintext: &mut [u8],
) -> Result<pkcs11_sys::CK_ULONG, DecryptError> {
let result = (session.context.C_DecryptInit)(session.handle, mechanism, handle);
if result != pkcs11_sys::CKR_OK {
return Err(DecryptError::DecryptInitFailed(result));
}
let original_plaintext_len =
std::convert::TryInto::try_into(plaintext.len()).expect("usize -> CK_ULONG");
let mut plaintext_len = original_plaintext_len;
let result = (session.context.C_Decrypt)(
session.handle,
ciphertext.as_ptr(),
std::convert::TryInto::try_into(ciphertext.len()).expect("usize -> CK_ULONG"),
plaintext.as_mut_ptr(),
&mut plaintext_len,
);
if result != pkcs11_sys::CKR_OK {
return Err(DecryptError::DecryptFailed(result));
}
assert!(plaintext_len <= original_plaintext_len);
Ok(plaintext_len)
}
#[derive(Debug)]
pub enum DecryptError {
DecryptInitFailed(pkcs11_sys::CK_RV),
DecryptFailed(pkcs11_sys::CK_RV),
LoginFailed(crate::LoginError),
}
impl std::fmt::Display for DecryptError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DecryptError::DecryptInitFailed(result) => {
write!(f, "C_DecryptInit failed with {}", result)
}
DecryptError::DecryptFailed(result) => write!(f, "C_Decrypt failed with {}", result),
DecryptError::LoginFailed(_) => f.write_str("could not log in to the token"),
}
}
}
impl std::error::Error for DecryptError {}
/// Query an attribute value as a byte buffer of arbitrary length.
unsafe fn get_attribute_value_byte_buf<T>(
session: &crate::Session,
object: &Object<T>,
r#type: pkcs11_sys::CK_ATTRIBUTE_TYPE,
C_GetAttributeValue: pkcs11_sys::CK_C_GetAttributeValue,
) -> Result<Vec<u8>, GetKeyParametersError> {
// Per the docs of C_GetAttributeValue, it is legal to call it with pValue == NULL and ulValueLen == 0.
// In this case it will set ulValueLen to the size of buffer it needs and return CKR_OK.
let mut attribute = pkcs11_sys::CK_ATTRIBUTE {
r#type,
pValue: std::ptr::null_mut(),
ulValueLen: 0,
};
let result = C_GetAttributeValue(session.handle, object.handle, &mut attribute, 1);
if result != pkcs11_sys::CKR_OK {
return Err(GetKeyParametersError::GetAttributeValueFailed(result));
}
let mut buf = vec![
0_u8;
std::convert::TryInto::try_into(attribute.ulValueLen)
.expect("CK_ULONG -> usize")
];
attribute.pValue = buf.as_mut_ptr().cast();
let result = C_GetAttributeValue(session.handle, object.handle, &mut attribute, 1);
if result != pkcs11_sys::CKR_OK {
return Err(GetKeyParametersError::GetAttributeValueFailed(result));
}
Ok(buf)
}
| 34.190939 | 165 | 0.585045 |
6a4569847b89bf268582545dac11b7e557485a95 | 7,116 | use std::sync::Arc;
use criterion::criterion_group;
use criterion::criterion_main;
use criterion::BenchmarkId;
use criterion::Criterion;
use holo_hash::EntryHash;
use holo_hash::EntryHashes;
use holochain::sweettest::*;
use holochain_conductor_api::conductor::ConductorConfig;
use holochain_conductor_api::AdminInterfaceConfig;
use holochain_conductor_api::InterfaceDriver;
use holochain_test_wasm_common::AnchorInput;
use holochain_test_wasm_common::ManyAnchorInput;
use holochain_wasm_test_utils::TestWasm;
use kitsune_p2p::KitsuneP2pConfig;
use tokio::runtime::Builder;
use tokio::runtime::Runtime;
// TODO: Produce a high data version of this bench.
// TODO: Add profile function to queries that need optimizing.
// TODO: Research indexing.
criterion_group!(benches, consistency);
criterion_main!(benches);
fn consistency(bench: &mut Criterion) {
observability::test_run().ok();
let mut group = bench.benchmark_group("consistency");
group.sample_size(
std::env::var_os("BENCH_SAMPLE_SIZE")
.and_then(|s| s.to_string_lossy().parse::<usize>().ok())
.unwrap_or(100),
);
let runtime = rt();
let (mut producer, mut consumer, others) = runtime.block_on(setup());
if let Some(n) = std::env::var_os("BENCH_NUM_OPS") {
let num_ops = n.to_string_lossy().parse::<usize>().unwrap();
runtime.block_on(async {
producer.fill(num_ops).await;
let mut cells = vec![&consumer.cell, &producer.cell];
cells.extend(others.cells.iter());
let num_tries = std::env::var_os("BENCH_NUM_WAITS")
.and_then(|s| s.to_string_lossy().parse::<usize>().ok())
.unwrap_or(100);
holochain::test_utils::consistency(
&cells,
num_tries,
std::time::Duration::from_millis(500),
)
.await;
// holochain_state::prelude::dump_tmp(consumer.cell.env());
});
}
let mut cells = vec![consumer.cell.clone(), producer.cell.clone()];
cells.extend(others.cells.clone());
runtime.spawn(async move {
producer.run().await;
producer.conductor.shutdown_and_wait().await;
});
group.bench_function(BenchmarkId::new("test", format!("test")), |b| {
b.iter(|| {
runtime.block_on(async { consumer.run(&cells[..]).await });
});
});
runtime.block_on(async move {
consumer.conductor.shutdown_and_wait().await;
drop(consumer);
for c in others.conductors {
c.shutdown_and_wait().await;
drop(c);
}
});
runtime.shutdown_background();
}
struct Producer {
conductor: SweetConductor,
cell: SweetCell,
rx: tokio::sync::mpsc::Receiver<usize>,
}
struct Consumer {
conductor: SweetConductor,
cell: SweetCell,
last: usize,
tx: tokio::sync::mpsc::Sender<usize>,
}
struct Others {
conductors: Vec<SweetConductor>,
cells: Vec<SweetCell>,
}
impl Producer {
async fn run(&mut self) {
while let Some(mut i) = self.rx.recv().await {
i += 1;
let _: EntryHash = self
.conductor
.call(
&self.cell.zome("anchor"),
"anchor",
AnchorInput("alice".to_string(), i.to_string()),
)
.await;
}
}
#[tracing::instrument(skip(self))]
async fn fill(&mut self, num_ops: usize) {
let inputs: Vec<_> = (0..num_ops)
.map(|i| AnchorInput("alice_fill".to_string(), i.to_string()))
.collect();
let _: Vec<EntryHash> = self
.conductor
.call(
&self.cell.zome("anchor"),
"anchor_many",
ManyAnchorInput(inputs),
)
.await;
// holochain_state::prelude::dump_tmp(self.cell.env());
}
}
impl Consumer {
async fn run(&mut self, cells: &[SweetCell]) {
let start = std::time::Instant::now();
let mut num = self.last;
while num <= self.last {
let hashes: EntryHashes = self
.conductor
.call(
&self.cell.zome("anchor"),
"list_anchor_addresses",
"alice".to_string(),
)
.await;
num = hashes.0.len();
if start.elapsed().as_secs() > 1 {
for cell in cells {
holochain::test_utils::consistency(
&[cell],
1,
std::time::Duration::from_millis(10),
)
.await;
}
}
// dump_tmp(self.cell.env());
// dump_tmp(prod.env());
}
self.last = num;
dbg!(start.elapsed());
self.tx.send(num).await.unwrap();
}
}
async fn setup() -> (Producer, Consumer, Others) {
let (tx, rx) = tokio::sync::mpsc::channel(1);
let (dna, _) = SweetDnaFile::unique_from_test_wasms(vec![TestWasm::Anchor])
.await
.unwrap();
let config = || {
let mut tuning =
kitsune_p2p_types::config::tuning_params_struct::KitsuneP2pTuningParams::default();
tuning.gossip_strategy = "sharded-gossip".to_string();
// tuning.gossip_strategy = "simple-bloom".to_string();
let mut network = KitsuneP2pConfig::default();
network.transport_pool = vec![kitsune_p2p::TransportConfig::Quic {
bind_to: None,
override_host: None,
override_port: None,
}];
network.tuning_params = Arc::new(tuning);
ConductorConfig {
network: Some(network),
admin_interfaces: Some(vec![AdminInterfaceConfig {
driver: InterfaceDriver::Websocket { port: 0 },
}]),
..Default::default()
}
};
let configs = vec![config(), config(), config(), config(), config()];
let mut conductors = SweetConductorBatch::from_configs(configs.clone()).await;
for c in conductors.iter() {
c.set_skip_publish(true);
}
let apps = conductors.setup_app("app", &[dna]).await.unwrap();
let mut cells = apps
.into_inner()
.into_iter()
.map(|c| c.into_cells().into_iter().next().unwrap());
let alice = cells.next().unwrap();
let bobbo = cells.next().unwrap();
conductors.exchange_peer_info().await;
let mut conductors = conductors.into_inner().into_iter();
tx.send(0).await.unwrap();
(
Producer {
conductor: conductors.next().unwrap(),
cell: alice,
rx,
},
Consumer {
conductor: conductors.next().unwrap(),
cell: bobbo,
tx,
last: 0,
},
Others {
conductors: conductors.collect(),
cells: cells.collect(),
},
)
}
pub fn rt() -> Runtime {
Builder::new_multi_thread().enable_all().build().unwrap()
}
| 31.210526 | 95 | 0.554947 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.