hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e4ffab90cc3507c1223c3e8a7f77faa0bb197c6d
| 805 |
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ArgsNumType {
Zero,
One,
ZeroOrOne,
OneOrMore,
Any,
}
impl ArgsNumType {
pub fn from_parse(required_arg: Option<()>, multi_args: Option<bool>) -> Self {
if required_arg.is_some() {
match multi_args {
Some(true) => Self::OneOrMore,
Some(false) => unreachable!(),
None => Self::One,
}
} else {
match multi_args {
Some(true) => Self::Any,
Some(false) => Self::ZeroOrOne,
None => Self::Zero,
}
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum OptionToken {
LongOption(String), // --help
ShortOption(char), // -h
OldOption(String), // -help
}
| 23.676471 | 83 | 0.500621 |
4ac22b9d80d02920dedcab4814c7d3829d3c2754
| 1,242 |
use super::{isample, Complex};
use std::error::Error;
pub fn compute_fft(input: &[isample], output: &mut [Complex<f32>]) -> Result<(), Box<dyn Error>>{
let N: usize = input.len();
let mut working_with = Vec::<Complex<f32>>::with_capacity(N);
for x in input {
working_with.push(Complex::new(*x as f32, 0.0));
}
let omega = -2f32 * std::f32::consts::PI / N as f32;
let log = (N as f32).log2() as i32;
let mut block_size = N;
let mut num_blocks = 1;
for _ in 0..log as usize { // Stage
let half_block = block_size / 2;
for j in 0..num_blocks { // Block
for k in 0..half_block { // Operation
let ind_x = block_size * j + k;
let ind_y = block_size * j + k + half_block;
let src_x = working_with[ind_x];
let src_y = working_with[ind_y];
working_with[ind_x] = src_x + src_y;
working_with[ind_y] = (src_x - src_y) * Complex::from_polar(1.0, omega * (k * num_blocks) as f32);
}
}
num_blocks *= 2;
block_size /= 2;
}
let shift = std::mem::size_of::<usize>() * 8 - log as usize;
for i in 0..N {
output[i.reverse_bits() >> shift] = working_with[i];
}
Ok(())
}
| 31.05 | 114 | 0.555556 |
fcdfec0fc7481d6754370e43df37c910be1282db
| 21,679 |
// Copyright © 2018 Cormac O'Brien
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software
// and associated documentation files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or
// substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
// BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
use std::{
cell::{Cell, RefCell},
collections::HashMap,
rc::Rc,
str::FromStr,
string::ToString,
};
use crate::common::{
console::{CmdRegistry, Console},
parse,
};
use failure::Error;
use strum::IntoEnumIterator;
use strum_macros::EnumIter;
use winit::{
dpi::LogicalPosition,
event::{
DeviceEvent, ElementState, Event, KeyboardInput, MouseButton, MouseScrollDelta,
VirtualKeyCode as Key, WindowEvent,
},
};
const ACTION_COUNT: usize = 19;
static INPUT_NAMES: [&str; 79] = [
",",
".",
"/",
"0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"A",
"ALT",
"B",
"BACKSPACE",
"C",
"CTRL",
"D",
"DEL",
"DOWNARROW",
"E",
"END",
"ENTER",
"ESCAPE",
"F",
"F1",
"F10",
"F11",
"F12",
"F2",
"F3",
"F4",
"F5",
"F6",
"F7",
"F8",
"F9",
"G",
"H",
"HOME",
"I",
"INS",
"J",
"K",
"L",
"LEFTARROW",
"M",
"MOUSE1",
"MOUSE2",
"MOUSE3",
"MWHEELDOWN",
"MWHEELUP",
"N",
"O",
"P",
"PGDN",
"PGUP",
"Q",
"R",
"RIGHTARROW",
"S",
"SEMICOLON",
"SHIFT",
"SPACE",
"T",
"TAB",
"U",
"UPARROW",
"V",
"W",
"X",
"Y",
"Z",
"[",
"\\",
"]",
"`",
];
static INPUT_VALUES: [BindInput; 79] = [
BindInput::Key(Key::Comma),
BindInput::Key(Key::Period),
BindInput::Key(Key::Slash),
BindInput::Key(Key::Key0),
BindInput::Key(Key::Key1),
BindInput::Key(Key::Key2),
BindInput::Key(Key::Key3),
BindInput::Key(Key::Key4),
BindInput::Key(Key::Key5),
BindInput::Key(Key::Key6),
BindInput::Key(Key::Key7),
BindInput::Key(Key::Key8),
BindInput::Key(Key::Key9),
BindInput::Key(Key::A),
BindInput::Key(Key::LAlt),
BindInput::Key(Key::B),
BindInput::Key(Key::Back),
BindInput::Key(Key::C),
BindInput::Key(Key::LControl),
BindInput::Key(Key::D),
BindInput::Key(Key::Delete),
BindInput::Key(Key::Down),
BindInput::Key(Key::E),
BindInput::Key(Key::End),
BindInput::Key(Key::Return),
BindInput::Key(Key::Escape),
BindInput::Key(Key::F),
BindInput::Key(Key::F1),
BindInput::Key(Key::F10),
BindInput::Key(Key::F11),
BindInput::Key(Key::F12),
BindInput::Key(Key::F2),
BindInput::Key(Key::F3),
BindInput::Key(Key::F4),
BindInput::Key(Key::F5),
BindInput::Key(Key::F6),
BindInput::Key(Key::F7),
BindInput::Key(Key::F8),
BindInput::Key(Key::F9),
BindInput::Key(Key::G),
BindInput::Key(Key::H),
BindInput::Key(Key::Home),
BindInput::Key(Key::I),
BindInput::Key(Key::Insert),
BindInput::Key(Key::J),
BindInput::Key(Key::K),
BindInput::Key(Key::L),
BindInput::Key(Key::Left),
BindInput::Key(Key::M),
BindInput::MouseButton(MouseButton::Left),
BindInput::MouseButton(MouseButton::Right),
BindInput::MouseButton(MouseButton::Middle),
BindInput::MouseWheel(MouseWheel::Down),
BindInput::MouseWheel(MouseWheel::Up),
BindInput::Key(Key::N),
BindInput::Key(Key::O),
BindInput::Key(Key::P),
BindInput::Key(Key::PageDown),
BindInput::Key(Key::PageUp),
BindInput::Key(Key::Q),
BindInput::Key(Key::R),
BindInput::Key(Key::Right),
BindInput::Key(Key::S),
BindInput::Key(Key::Semicolon),
BindInput::Key(Key::LShift),
BindInput::Key(Key::Space),
BindInput::Key(Key::T),
BindInput::Key(Key::Tab),
BindInput::Key(Key::U),
BindInput::Key(Key::Up),
BindInput::Key(Key::V),
BindInput::Key(Key::W),
BindInput::Key(Key::X),
BindInput::Key(Key::Y),
BindInput::Key(Key::Z),
BindInput::Key(Key::LBracket),
BindInput::Key(Key::Backslash),
BindInput::Key(Key::RBracket),
BindInput::Key(Key::Grave),
];
/// A unique identifier for an in-game action.
#[derive(Clone, Copy, Debug, Eq, PartialEq, EnumIter)]
pub enum Action {
/// Move forward.
Forward = 0,
/// Move backward.
Back = 1,
/// Strafe left.
MoveLeft = 2,
/// Strafe right.
MoveRight = 3,
/// Move up (when swimming).
MoveUp = 4,
/// Move down (when swimming).
MoveDown = 5,
/// Look up.
LookUp = 6,
/// Look down.
LookDown = 7,
/// Look left.
Left = 8,
/// Look right.
Right = 9,
/// Change move speed (walk/run).
Speed = 10,
/// Jump.
Jump = 11,
/// Interpret `Left`/`Right` like `MoveLeft`/`MoveRight`.
Strafe = 12,
/// Attack with the current weapon.
Attack = 13,
/// Interact with an object (not used).
Use = 14,
/// Interpret `Forward`/`Back` like `LookUp`/`LookDown`.
KLook = 15,
/// Interpret upward/downward vertical mouse movements like `LookUp`/`LookDown`.
MLook = 16,
/// If in single-player, show the current level stats. If in multiplayer, show the scoreboard.
ShowScores = 17,
/// Show the team scoreboard.
ShowTeamScores = 18,
}
impl FromStr for Action {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let action = match s.to_lowercase().as_str() {
"forward" => Action::Forward,
"back" => Action::Back,
"moveleft" => Action::MoveLeft,
"moveright" => Action::MoveRight,
"moveup" => Action::MoveUp,
"movedown" => Action::MoveDown,
"lookup" => Action::LookUp,
"lookdown" => Action::LookDown,
"left" => Action::Left,
"right" => Action::Right,
"speed" => Action::Speed,
"jump" => Action::Jump,
"strafe" => Action::Strafe,
"attack" => Action::Attack,
"use" => Action::Use,
"klook" => Action::KLook,
"mlook" => Action::MLook,
"showscores" => Action::ShowScores,
"showteamscores" => Action::ShowTeamScores,
_ => bail!("Invalid action name: {}", s),
};
Ok(action)
}
}
impl ToString for Action {
fn to_string(&self) -> String {
String::from(match *self {
Action::Forward => "forward",
Action::Back => "back",
Action::MoveLeft => "moveleft",
Action::MoveRight => "moveright",
Action::MoveUp => "moveup",
Action::MoveDown => "movedown",
Action::LookUp => "lookup",
Action::LookDown => "lookdown",
Action::Left => "left",
Action::Right => "right",
Action::Speed => "speed",
Action::Jump => "jump",
Action::Strafe => "strafe",
Action::Attack => "attack",
Action::Use => "use",
Action::KLook => "klook",
Action::MLook => "mlook",
Action::ShowScores => "showscores",
Action::ShowTeamScores => "showteamscores",
})
}
}
// for game input, we only care about the direction the mouse wheel moved, not how far it went in
// one event
/// A movement of the mouse wheel up or down.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum MouseWheel {
Up,
Down,
}
// TODO: this currently doesn't handle NaN and treats 0.0 as negative which is probably not optimal
impl ::std::convert::From<MouseScrollDelta> for MouseWheel {
fn from(src: MouseScrollDelta) -> MouseWheel {
match src {
MouseScrollDelta::LineDelta(_, y) => {
if y > 0.0 {
MouseWheel::Up
} else {
MouseWheel::Down
}
}
MouseScrollDelta::PixelDelta(LogicalPosition { y, .. }) => {
if y > 0.0 {
MouseWheel::Up
} else {
MouseWheel::Down
}
}
}
}
}
/// A physical input that can be bound to a command.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum BindInput {
/// A key pressed on the keyboard.
Key(Key),
/// A button pressed on the mouse.
MouseButton(MouseButton),
/// A direction scrolled on the mouse wheel.
MouseWheel(MouseWheel),
}
impl ::std::convert::From<Key> for BindInput {
fn from(src: Key) -> BindInput {
BindInput::Key(src)
}
}
impl ::std::convert::From<MouseButton> for BindInput {
fn from(src: MouseButton) -> BindInput {
BindInput::MouseButton(src)
}
}
impl ::std::convert::From<MouseWheel> for BindInput {
fn from(src: MouseWheel) -> BindInput {
BindInput::MouseWheel(src)
}
}
impl ::std::convert::From<MouseScrollDelta> for BindInput {
fn from(src: MouseScrollDelta) -> BindInput {
BindInput::MouseWheel(MouseWheel::from(src))
}
}
impl FromStr for BindInput {
type Err = Error;
fn from_str(src: &str) -> Result<BindInput, Error> {
let upper = src.to_uppercase();
for (i, name) in INPUT_NAMES.iter().enumerate() {
if upper == *name {
return Ok(INPUT_VALUES[i]);
}
}
bail!("\"{}\" isn't a valid key", src);
}
}
impl ToString for BindInput {
fn to_string(&self) -> String {
// this could be a binary search but it's unlikely to affect performance much
for (i, input) in INPUT_VALUES.iter().enumerate() {
if self == input {
return INPUT_NAMES[i].to_owned();
}
}
String::new()
}
}
/// An operation to perform when a `BindInput` is received.
#[derive(Clone, Debug)]
pub enum BindTarget {
/// An action to set/unset.
Action {
// + is true, - is false
// so "+forward" maps to trigger: true, action: Action::Forward
trigger: ElementState,
action: Action,
},
/// Text to push to the console execution buffer.
ConsoleInput { text: String },
}
impl FromStr for BindTarget {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match parse::action(s) {
// first, check if this is an action
Ok((_, (trigger, action_str))) => {
let action = match Action::from_str(action_str) {
Ok(a) => a,
_ => return Ok(BindTarget::ConsoleInput { text: s.to_owned() }),
};
Ok(BindTarget::Action { trigger, action })
}
// if the parse fails, assume it's a cvar/cmd and return the text
_ => Ok(BindTarget::ConsoleInput { text: s.to_owned() }),
}
}
}
impl ToString for BindTarget {
fn to_string(&self) -> String {
match *self {
BindTarget::Action { trigger, action } => {
String::new()
+ match trigger {
ElementState::Pressed => "+",
ElementState::Released => "-",
}
+ &action.to_string()
}
BindTarget::ConsoleInput { ref text } => format!("\"{}\"", text.to_owned()),
}
}
}
#[derive(Clone)]
pub struct GameInput {
console: Rc<RefCell<Console>>,
bindings: Rc<RefCell<HashMap<BindInput, BindTarget>>>,
action_states: Rc<RefCell<[bool; ACTION_COUNT]>>,
mouse_delta: (f64, f64),
impulse: Rc<Cell<u8>>,
}
impl GameInput {
pub fn new(console: Rc<RefCell<Console>>) -> GameInput {
GameInput {
console,
bindings: Rc::new(RefCell::new(HashMap::new())),
action_states: Rc::new(RefCell::new([false; ACTION_COUNT])),
mouse_delta: (0.0, 0.0),
impulse: Rc::new(Cell::new(0)),
}
}
pub fn mouse_delta(&self) -> (f64, f64) {
self.mouse_delta
}
pub fn impulse(&self) -> u8 {
self.impulse.get()
}
/// Bind the default controls.
pub fn bind_defaults(&mut self) {
self.bind(Key::W, BindTarget::from_str("+forward").unwrap());
self.bind(Key::A, BindTarget::from_str("+moveleft").unwrap());
self.bind(Key::S, BindTarget::from_str("+back").unwrap());
self.bind(Key::D, BindTarget::from_str("+moveright").unwrap());
self.bind(Key::Space, BindTarget::from_str("+jump").unwrap());
self.bind(Key::Up, BindTarget::from_str("+lookup").unwrap());
self.bind(Key::Left, BindTarget::from_str("+left").unwrap());
self.bind(Key::Down, BindTarget::from_str("+lookdown").unwrap());
self.bind(Key::Right, BindTarget::from_str("+right").unwrap());
self.bind(Key::LControl, BindTarget::from_str("+attack").unwrap());
self.bind(Key::E, BindTarget::from_str("+use").unwrap());
self.bind(Key::Grave, BindTarget::from_str("toggleconsole").unwrap());
self.bind(Key::Key1, BindTarget::from_str("impulse 1").unwrap());
self.bind(Key::Key2, BindTarget::from_str("impulse 2").unwrap());
self.bind(Key::Key3, BindTarget::from_str("impulse 3").unwrap());
self.bind(Key::Key4, BindTarget::from_str("impulse 4").unwrap());
self.bind(Key::Key5, BindTarget::from_str("impulse 5").unwrap());
self.bind(Key::Key6, BindTarget::from_str("impulse 6").unwrap());
self.bind(Key::Key7, BindTarget::from_str("impulse 7").unwrap());
self.bind(Key::Key8, BindTarget::from_str("impulse 8").unwrap());
self.bind(Key::Key9, BindTarget::from_str("impulse 9").unwrap());
}
/// Bind a `BindInput` to a `BindTarget`.
pub fn bind<I, T>(&mut self, input: I, target: T) -> Option<BindTarget>
where
I: Into<BindInput>,
T: Into<BindTarget>,
{
self.bindings
.borrow_mut()
.insert(input.into(), target.into())
}
/// Return the `BindTarget` that `input` is bound to, or `None` if `input` is not present.
pub fn binding<I>(&self, input: I) -> Option<BindTarget>
where
I: Into<BindInput>,
{
self.bindings.borrow().get(&input.into()).cloned()
}
pub fn handle_event<T>(&mut self, outer_event: Event<T>) {
let (input, state): (BindInput, _) = match outer_event {
Event::WindowEvent { event, .. } => match event {
WindowEvent::KeyboardInput {
input:
KeyboardInput {
state,
virtual_keycode: Some(key),
..
},
..
} => (key.into(), state),
WindowEvent::MouseInput { state, button, .. } => (button.into(), state),
WindowEvent::MouseWheel { delta, .. } => (delta.into(), ElementState::Pressed),
_ => return,
},
Event::DeviceEvent { event, .. } => match event {
DeviceEvent::MouseMotion { delta } => {
self.mouse_delta.0 += delta.0;
self.mouse_delta.1 += delta.1;
return;
}
_ => return,
},
_ => return,
};
self.handle_input(input, state);
}
pub fn handle_input<I>(&mut self, input: I, state: ElementState)
where
I: Into<BindInput>,
{
let bind_input = input.into();
// debug!("handle input {:?}: {:?}", &bind_input, state);
if let Some(target) = self.bindings.borrow().get(&bind_input) {
match *target {
BindTarget::Action { trigger, action } => {
self.action_states.borrow_mut()[action as usize] = state == trigger;
debug!(
"{}{}",
if state == trigger { '+' } else { '-' },
action.to_string()
);
}
BindTarget::ConsoleInput { ref text } => {
if state == ElementState::Pressed {
self.console.borrow_mut().stuff_text(text);
}
}
}
}
}
pub fn action_state(&self, action: Action) -> bool {
self.action_states.borrow()[action as usize]
}
// TODO: roll actions into a loop
pub fn register_cmds(&self, cmds: &mut CmdRegistry) {
let states = [("+", true), ("-", false)];
for action in Action::iter() {
for (state_str, state_bool) in states.iter().cloned() {
let action_states = self.action_states.clone();
let cmd_name = format!("{}{}", state_str, action.to_string());
cmds.insert_or_replace(
&cmd_name,
Box::new(move |_| {
action_states.borrow_mut()[action as usize] = state_bool;
String::new()
}),
)
.unwrap();
}
}
// "bind"
let bindings = self.bindings.clone();
cmds.insert_or_replace(
"bind",
Box::new(move |args| {
match args.len() {
// bind (key)
// queries what (key) is bound to, if anything
1 => match BindInput::from_str(args[0]) {
Ok(i) => match bindings.borrow().get(&i) {
Some(t) => format!("\"{}\" = \"{}\"", i.to_string(), t.to_string()),
None => format!("\"{}\" is not bound", i.to_string()),
},
Err(_) => format!("\"{}\" isn't a valid key", args[0]),
},
// bind (key) [command]
2 => match BindInput::from_str(args[0]) {
Ok(input) => match BindTarget::from_str(args[1]) {
Ok(target) => {
bindings.borrow_mut().insert(input, target);
debug!("Bound {:?} to {:?}", input, args[1]);
String::new()
}
Err(_) => {
format!("\"{}\" isn't a valid bind target", args[1])
}
},
Err(_) => format!("\"{}\" isn't a valid key", args[0]),
},
_ => "bind [key] (command): attach a command to a key".to_owned(),
}
}),
)
.unwrap();
// "unbindall"
let bindings = self.bindings.clone();
cmds.insert_or_replace(
"unbindall",
Box::new(move |args| match args.len() {
0 => {
let _ = bindings.replace(HashMap::new());
String::new()
}
_ => "unbindall: delete all keybindings".to_owned(),
}),
)
.unwrap();
// "impulse"
let impulse = self.impulse.clone();
cmds.insert_or_replace(
"impulse",
Box::new(move |args| {
println!("args: {}", args.len());
match args.len() {
1 => match u8::from_str(args[0]) {
Ok(i) => {
impulse.set(i);
String::new()
}
Err(_) => "Impulse must be a number between 0 and 255".to_owned(),
},
_ => "usage: impulse [number]".to_owned(),
}
}),
)
.unwrap();
}
// must be called every frame!
pub fn refresh(&mut self) {
self.clear_mouse();
self.clear_impulse();
}
fn clear_mouse(&mut self) {
self.handle_input(MouseWheel::Up, ElementState::Released);
self.handle_input(MouseWheel::Down, ElementState::Released);
self.mouse_delta = (0.0, 0.0);
}
fn clear_impulse(&mut self) {
self.impulse.set(0);
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_action_to_string() {
let act = Action::Forward;
assert_eq!(act.to_string(), "forward");
}
#[test]
fn test_bind_target_action_to_string() {
let target = BindTarget::Action {
trigger: ElementState::Pressed,
action: Action::Forward,
};
assert_eq!(target.to_string(), "+forward");
}
}
| 29.138441 | 99 | 0.513216 |
bfe19768c90b1ca173c9b41ac8489069b0a2bb15
| 17,111 |
#[doc = "Reader of register INTEN"]
pub type R = crate::R<u8, super::INTEN>;
#[doc = "Writer for register INTEN"]
pub type W = crate::W<u8, super::INTEN>;
#[doc = "Register INTEN `reset()`'s with value 0"]
impl crate::ResetValue for super::INTEN {
type Type = u8;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "USBRST Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum USBRSTEN_A {
#[doc = "0: Disables the USBRST interrupt."]
USBRSTEN_0,
#[doc = "1: Enables the USBRST interrupt."]
USBRSTEN_1,
}
impl From<USBRSTEN_A> for bool {
#[inline(always)]
fn from(variant: USBRSTEN_A) -> Self {
match variant {
USBRSTEN_A::USBRSTEN_0 => false,
USBRSTEN_A::USBRSTEN_1 => true,
}
}
}
#[doc = "Reader of field `USBRSTEN`"]
pub type USBRSTEN_R = crate::R<bool, USBRSTEN_A>;
impl USBRSTEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> USBRSTEN_A {
match self.bits {
false => USBRSTEN_A::USBRSTEN_0,
true => USBRSTEN_A::USBRSTEN_1,
}
}
#[doc = "Checks if the value of the field is `USBRSTEN_0`"]
#[inline(always)]
pub fn is_usbrsten_0(&self) -> bool {
*self == USBRSTEN_A::USBRSTEN_0
}
#[doc = "Checks if the value of the field is `USBRSTEN_1`"]
#[inline(always)]
pub fn is_usbrsten_1(&self) -> bool {
*self == USBRSTEN_A::USBRSTEN_1
}
}
#[doc = "Write proxy for field `USBRSTEN`"]
pub struct USBRSTEN_W<'a> {
w: &'a mut W,
}
impl<'a> USBRSTEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: USBRSTEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables the USBRST interrupt."]
#[inline(always)]
pub fn usbrsten_0(self) -> &'a mut W {
self.variant(USBRSTEN_A::USBRSTEN_0)
}
#[doc = "Enables the USBRST interrupt."]
#[inline(always)]
pub fn usbrsten_1(self) -> &'a mut W {
self.variant(USBRSTEN_A::USBRSTEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u8) & 0x01);
self.w
}
}
#[doc = "ERROR Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum ERROREN_A {
#[doc = "0: Disables the ERROR interrupt."]
ERROREN_0,
#[doc = "1: Enables the ERROR interrupt."]
ERROREN_1,
}
impl From<ERROREN_A> for bool {
#[inline(always)]
fn from(variant: ERROREN_A) -> Self {
match variant {
ERROREN_A::ERROREN_0 => false,
ERROREN_A::ERROREN_1 => true,
}
}
}
#[doc = "Reader of field `ERROREN`"]
pub type ERROREN_R = crate::R<bool, ERROREN_A>;
impl ERROREN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> ERROREN_A {
match self.bits {
false => ERROREN_A::ERROREN_0,
true => ERROREN_A::ERROREN_1,
}
}
#[doc = "Checks if the value of the field is `ERROREN_0`"]
#[inline(always)]
pub fn is_erroren_0(&self) -> bool {
*self == ERROREN_A::ERROREN_0
}
#[doc = "Checks if the value of the field is `ERROREN_1`"]
#[inline(always)]
pub fn is_erroren_1(&self) -> bool {
*self == ERROREN_A::ERROREN_1
}
}
#[doc = "Write proxy for field `ERROREN`"]
pub struct ERROREN_W<'a> {
w: &'a mut W,
}
impl<'a> ERROREN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: ERROREN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables the ERROR interrupt."]
#[inline(always)]
pub fn erroren_0(self) -> &'a mut W {
self.variant(ERROREN_A::ERROREN_0)
}
#[doc = "Enables the ERROR interrupt."]
#[inline(always)]
pub fn erroren_1(self) -> &'a mut W {
self.variant(ERROREN_A::ERROREN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u8) & 0x01) << 1);
self.w
}
}
#[doc = "SOFTOK Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SOFTOKEN_A {
#[doc = "0: Disbles the SOFTOK interrupt."]
SOFTOKEN_0,
#[doc = "1: Enables the SOFTOK interrupt."]
SOFTOKEN_1,
}
impl From<SOFTOKEN_A> for bool {
#[inline(always)]
fn from(variant: SOFTOKEN_A) -> Self {
match variant {
SOFTOKEN_A::SOFTOKEN_0 => false,
SOFTOKEN_A::SOFTOKEN_1 => true,
}
}
}
#[doc = "Reader of field `SOFTOKEN`"]
pub type SOFTOKEN_R = crate::R<bool, SOFTOKEN_A>;
impl SOFTOKEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SOFTOKEN_A {
match self.bits {
false => SOFTOKEN_A::SOFTOKEN_0,
true => SOFTOKEN_A::SOFTOKEN_1,
}
}
#[doc = "Checks if the value of the field is `SOFTOKEN_0`"]
#[inline(always)]
pub fn is_softoken_0(&self) -> bool {
*self == SOFTOKEN_A::SOFTOKEN_0
}
#[doc = "Checks if the value of the field is `SOFTOKEN_1`"]
#[inline(always)]
pub fn is_softoken_1(&self) -> bool {
*self == SOFTOKEN_A::SOFTOKEN_1
}
}
#[doc = "Write proxy for field `SOFTOKEN`"]
pub struct SOFTOKEN_W<'a> {
w: &'a mut W,
}
impl<'a> SOFTOKEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SOFTOKEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disbles the SOFTOK interrupt."]
#[inline(always)]
pub fn softoken_0(self) -> &'a mut W {
self.variant(SOFTOKEN_A::SOFTOKEN_0)
}
#[doc = "Enables the SOFTOK interrupt."]
#[inline(always)]
pub fn softoken_1(self) -> &'a mut W {
self.variant(SOFTOKEN_A::SOFTOKEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u8) & 0x01) << 2);
self.w
}
}
#[doc = "TOKDNE Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum TOKDNEEN_A {
#[doc = "0: Disables the TOKDNE interrupt."]
TOKDNEEN_0,
#[doc = "1: Enables the TOKDNE interrupt."]
TOKDNEEN_1,
}
impl From<TOKDNEEN_A> for bool {
#[inline(always)]
fn from(variant: TOKDNEEN_A) -> Self {
match variant {
TOKDNEEN_A::TOKDNEEN_0 => false,
TOKDNEEN_A::TOKDNEEN_1 => true,
}
}
}
#[doc = "Reader of field `TOKDNEEN`"]
pub type TOKDNEEN_R = crate::R<bool, TOKDNEEN_A>;
impl TOKDNEEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> TOKDNEEN_A {
match self.bits {
false => TOKDNEEN_A::TOKDNEEN_0,
true => TOKDNEEN_A::TOKDNEEN_1,
}
}
#[doc = "Checks if the value of the field is `TOKDNEEN_0`"]
#[inline(always)]
pub fn is_tokdneen_0(&self) -> bool {
*self == TOKDNEEN_A::TOKDNEEN_0
}
#[doc = "Checks if the value of the field is `TOKDNEEN_1`"]
#[inline(always)]
pub fn is_tokdneen_1(&self) -> bool {
*self == TOKDNEEN_A::TOKDNEEN_1
}
}
#[doc = "Write proxy for field `TOKDNEEN`"]
pub struct TOKDNEEN_W<'a> {
w: &'a mut W,
}
impl<'a> TOKDNEEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: TOKDNEEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables the TOKDNE interrupt."]
#[inline(always)]
pub fn tokdneen_0(self) -> &'a mut W {
self.variant(TOKDNEEN_A::TOKDNEEN_0)
}
#[doc = "Enables the TOKDNE interrupt."]
#[inline(always)]
pub fn tokdneen_1(self) -> &'a mut W {
self.variant(TOKDNEEN_A::TOKDNEEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u8) & 0x01) << 3);
self.w
}
}
#[doc = "SLEEP Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum SLEEPEN_A {
#[doc = "0: Disables the SLEEP interrupt."]
SLEEPEN_0,
#[doc = "1: Enables the SLEEP interrupt."]
SLEEPEN_1,
}
impl From<SLEEPEN_A> for bool {
#[inline(always)]
fn from(variant: SLEEPEN_A) -> Self {
match variant {
SLEEPEN_A::SLEEPEN_0 => false,
SLEEPEN_A::SLEEPEN_1 => true,
}
}
}
#[doc = "Reader of field `SLEEPEN`"]
pub type SLEEPEN_R = crate::R<bool, SLEEPEN_A>;
impl SLEEPEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> SLEEPEN_A {
match self.bits {
false => SLEEPEN_A::SLEEPEN_0,
true => SLEEPEN_A::SLEEPEN_1,
}
}
#[doc = "Checks if the value of the field is `SLEEPEN_0`"]
#[inline(always)]
pub fn is_sleepen_0(&self) -> bool {
*self == SLEEPEN_A::SLEEPEN_0
}
#[doc = "Checks if the value of the field is `SLEEPEN_1`"]
#[inline(always)]
pub fn is_sleepen_1(&self) -> bool {
*self == SLEEPEN_A::SLEEPEN_1
}
}
#[doc = "Write proxy for field `SLEEPEN`"]
pub struct SLEEPEN_W<'a> {
w: &'a mut W,
}
impl<'a> SLEEPEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: SLEEPEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables the SLEEP interrupt."]
#[inline(always)]
pub fn sleepen_0(self) -> &'a mut W {
self.variant(SLEEPEN_A::SLEEPEN_0)
}
#[doc = "Enables the SLEEP interrupt."]
#[inline(always)]
pub fn sleepen_1(self) -> &'a mut W {
self.variant(SLEEPEN_A::SLEEPEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u8) & 0x01) << 4);
self.w
}
}
#[doc = "RESUME Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RESUMEEN_A {
#[doc = "0: Disables the RESUME interrupt."]
RESUMEEN_0,
#[doc = "1: Enables the RESUME interrupt."]
RESUMEEN_1,
}
impl From<RESUMEEN_A> for bool {
#[inline(always)]
fn from(variant: RESUMEEN_A) -> Self {
match variant {
RESUMEEN_A::RESUMEEN_0 => false,
RESUMEEN_A::RESUMEEN_1 => true,
}
}
}
#[doc = "Reader of field `RESUMEEN`"]
pub type RESUMEEN_R = crate::R<bool, RESUMEEN_A>;
impl RESUMEEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RESUMEEN_A {
match self.bits {
false => RESUMEEN_A::RESUMEEN_0,
true => RESUMEEN_A::RESUMEEN_1,
}
}
#[doc = "Checks if the value of the field is `RESUMEEN_0`"]
#[inline(always)]
pub fn is_resumeen_0(&self) -> bool {
*self == RESUMEEN_A::RESUMEEN_0
}
#[doc = "Checks if the value of the field is `RESUMEEN_1`"]
#[inline(always)]
pub fn is_resumeen_1(&self) -> bool {
*self == RESUMEEN_A::RESUMEEN_1
}
}
#[doc = "Write proxy for field `RESUMEEN`"]
pub struct RESUMEEN_W<'a> {
w: &'a mut W,
}
impl<'a> RESUMEEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RESUMEEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disables the RESUME interrupt."]
#[inline(always)]
pub fn resumeen_0(self) -> &'a mut W {
self.variant(RESUMEEN_A::RESUMEEN_0)
}
#[doc = "Enables the RESUME interrupt."]
#[inline(always)]
pub fn resumeen_1(self) -> &'a mut W {
self.variant(RESUMEEN_A::RESUMEEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u8) & 0x01) << 5);
self.w
}
}
#[doc = "STALL Interrupt Enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum STALLEN_A {
#[doc = "0: Diasbles the STALL interrupt."]
STALLEN_0,
#[doc = "1: Enables the STALL interrupt."]
STALLEN_1,
}
impl From<STALLEN_A> for bool {
#[inline(always)]
fn from(variant: STALLEN_A) -> Self {
match variant {
STALLEN_A::STALLEN_0 => false,
STALLEN_A::STALLEN_1 => true,
}
}
}
#[doc = "Reader of field `STALLEN`"]
pub type STALLEN_R = crate::R<bool, STALLEN_A>;
impl STALLEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> STALLEN_A {
match self.bits {
false => STALLEN_A::STALLEN_0,
true => STALLEN_A::STALLEN_1,
}
}
#[doc = "Checks if the value of the field is `STALLEN_0`"]
#[inline(always)]
pub fn is_stallen_0(&self) -> bool {
*self == STALLEN_A::STALLEN_0
}
#[doc = "Checks if the value of the field is `STALLEN_1`"]
#[inline(always)]
pub fn is_stallen_1(&self) -> bool {
*self == STALLEN_A::STALLEN_1
}
}
#[doc = "Write proxy for field `STALLEN`"]
pub struct STALLEN_W<'a> {
w: &'a mut W,
}
impl<'a> STALLEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: STALLEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Diasbles the STALL interrupt."]
#[inline(always)]
pub fn stallen_0(self) -> &'a mut W {
self.variant(STALLEN_A::STALLEN_0)
}
#[doc = "Enables the STALL interrupt."]
#[inline(always)]
pub fn stallen_1(self) -> &'a mut W {
self.variant(STALLEN_A::STALLEN_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u8) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - USBRST Interrupt Enable"]
#[inline(always)]
pub fn usbrsten(&self) -> USBRSTEN_R {
USBRSTEN_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - ERROR Interrupt Enable"]
#[inline(always)]
pub fn erroren(&self) -> ERROREN_R {
ERROREN_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - SOFTOK Interrupt Enable"]
#[inline(always)]
pub fn softoken(&self) -> SOFTOKEN_R {
SOFTOKEN_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - TOKDNE Interrupt Enable"]
#[inline(always)]
pub fn tokdneen(&self) -> TOKDNEEN_R {
TOKDNEEN_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - SLEEP Interrupt Enable"]
#[inline(always)]
pub fn sleepen(&self) -> SLEEPEN_R {
SLEEPEN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - RESUME Interrupt Enable"]
#[inline(always)]
pub fn resumeen(&self) -> RESUMEEN_R {
RESUMEEN_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 7 - STALL Interrupt Enable"]
#[inline(always)]
pub fn stallen(&self) -> STALLEN_R {
STALLEN_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - USBRST Interrupt Enable"]
#[inline(always)]
pub fn usbrsten(&mut self) -> USBRSTEN_W {
USBRSTEN_W { w: self }
}
#[doc = "Bit 1 - ERROR Interrupt Enable"]
#[inline(always)]
pub fn erroren(&mut self) -> ERROREN_W {
ERROREN_W { w: self }
}
#[doc = "Bit 2 - SOFTOK Interrupt Enable"]
#[inline(always)]
pub fn softoken(&mut self) -> SOFTOKEN_W {
SOFTOKEN_W { w: self }
}
#[doc = "Bit 3 - TOKDNE Interrupt Enable"]
#[inline(always)]
pub fn tokdneen(&mut self) -> TOKDNEEN_W {
TOKDNEEN_W { w: self }
}
#[doc = "Bit 4 - SLEEP Interrupt Enable"]
#[inline(always)]
pub fn sleepen(&mut self) -> SLEEPEN_W {
SLEEPEN_W { w: self }
}
#[doc = "Bit 5 - RESUME Interrupt Enable"]
#[inline(always)]
pub fn resumeen(&mut self) -> RESUMEEN_W {
RESUMEEN_W { w: self }
}
#[doc = "Bit 7 - STALL Interrupt Enable"]
#[inline(always)]
pub fn stallen(&mut self) -> STALLEN_W {
STALLEN_W { w: self }
}
}
| 27.031596 | 79 | 0.60692 |
f5492a784ecb92b187235607bb4b1f537b64b0c4
| 16,315 |
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Control"]
pub ctrl: CTRL,
#[doc = "0x01 - Status A"]
pub statusa: STATUSA,
#[doc = "0x02 - Status B"]
pub statusb: STATUSB,
_reserved3: [u8; 1usize],
#[doc = "0x04 - Address"]
pub addr: ADDR,
#[doc = "0x08 - Length"]
pub length: LENGTH,
#[doc = "0x0c - Data"]
pub data: DATA,
#[doc = "0x10 - Debug Communication Channel n"]
pub dcc: [DCC; 2],
#[doc = "0x18 - Device Identification"]
pub did: DID,
#[doc = "0x1c - Configuration"]
pub cfg: CFG,
_reserved9: [u8; 208usize],
#[doc = "0xf0 - Device Configuration"]
pub dcfg: [DCFG; 2],
_reserved10: [u8; 3848usize],
#[doc = "0x1000 - CoreSight ROM Table Entry 0"]
pub entry0: ENTRY0,
#[doc = "0x1004 - CoreSight ROM Table Entry 1"]
pub entry1: ENTRY1,
#[doc = "0x1008 - CoreSight ROM Table End"]
pub end: END,
_reserved13: [u8; 4032usize],
#[doc = "0x1fcc - CoreSight ROM Table Memory Type"]
pub memtype: MEMTYPE,
#[doc = "0x1fd0 - Peripheral Identification 4"]
pub pid4: PID4,
#[doc = "0x1fd4 - Peripheral Identification 5"]
pub pid5: PID5,
#[doc = "0x1fd8 - Peripheral Identification 6"]
pub pid6: PID6,
#[doc = "0x1fdc - Peripheral Identification 7"]
pub pid7: PID7,
#[doc = "0x1fe0 - Peripheral Identification 0"]
pub pid0: PID0,
#[doc = "0x1fe4 - Peripheral Identification 1"]
pub pid1: PID1,
#[doc = "0x1fe8 - Peripheral Identification 2"]
pub pid2: PID2,
#[doc = "0x1fec - Peripheral Identification 3"]
pub pid3: PID3,
#[doc = "0x1ff0 - Component Identification 0"]
pub cid0: CID0,
#[doc = "0x1ff4 - Component Identification 1"]
pub cid1: CID1,
#[doc = "0x1ff8 - Component Identification 2"]
pub cid2: CID2,
#[doc = "0x1ffc - Component Identification 3"]
pub cid3: CID3,
}
#[doc = "Control\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [ctrl](ctrl) module"]
pub type CTRL = crate::Reg<u8, _CTRL>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CTRL;
#[doc = "`write(|w| ..)` method takes [ctrl::W](ctrl::W) writer structure"]
impl crate::Writable for CTRL {}
#[doc = "Control"]
pub mod ctrl;
#[doc = "Status A\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [statusa](statusa) module"]
pub type STATUSA = crate::Reg<u8, _STATUSA>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _STATUSA;
#[doc = "`read()` method returns [statusa::R](statusa::R) reader structure"]
impl crate::Readable for STATUSA {}
#[doc = "`write(|w| ..)` method takes [statusa::W](statusa::W) writer structure"]
impl crate::Writable for STATUSA {}
#[doc = "Status A"]
pub mod statusa;
#[doc = "Status B\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [statusb](statusb) module"]
pub type STATUSB = crate::Reg<u8, _STATUSB>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _STATUSB;
#[doc = "`read()` method returns [statusb::R](statusb::R) reader structure"]
impl crate::Readable for STATUSB {}
#[doc = "Status B"]
pub mod statusb;
#[doc = "Address\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [addr](addr) module"]
pub type ADDR = crate::Reg<u32, _ADDR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ADDR;
#[doc = "`read()` method returns [addr::R](addr::R) reader structure"]
impl crate::Readable for ADDR {}
#[doc = "`write(|w| ..)` method takes [addr::W](addr::W) writer structure"]
impl crate::Writable for ADDR {}
#[doc = "Address"]
pub mod addr;
#[doc = "Length\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [length](length) module"]
pub type LENGTH = crate::Reg<u32, _LENGTH>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _LENGTH;
#[doc = "`read()` method returns [length::R](length::R) reader structure"]
impl crate::Readable for LENGTH {}
#[doc = "`write(|w| ..)` method takes [length::W](length::W) writer structure"]
impl crate::Writable for LENGTH {}
#[doc = "Length"]
pub mod length;
#[doc = "Data\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [data](data) module"]
pub type DATA = crate::Reg<u32, _DATA>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DATA;
#[doc = "`read()` method returns [data::R](data::R) reader structure"]
impl crate::Readable for DATA {}
#[doc = "`write(|w| ..)` method takes [data::W](data::W) writer structure"]
impl crate::Writable for DATA {}
#[doc = "Data"]
pub mod data;
#[doc = "Debug Communication Channel n\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dcc](dcc) module"]
pub type DCC = crate::Reg<u32, _DCC>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DCC;
#[doc = "`read()` method returns [dcc::R](dcc::R) reader structure"]
impl crate::Readable for DCC {}
#[doc = "`write(|w| ..)` method takes [dcc::W](dcc::W) writer structure"]
impl crate::Writable for DCC {}
#[doc = "Debug Communication Channel n"]
pub mod dcc;
#[doc = "Device Identification\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [did](did) module"]
pub type DID = crate::Reg<u32, _DID>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DID;
#[doc = "`read()` method returns [did::R](did::R) reader structure"]
impl crate::Readable for DID {}
#[doc = "Device Identification"]
pub mod did;
#[doc = "Configuration\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api)."]
pub type CFG = crate::Reg<u32, _CFG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CFG;
#[doc = "`read()` method returns [cfg::R](cfg::R) reader structure"]
impl crate::Readable for CFG {}
#[doc = "`write(|w| ..)` method takes [cfg::W](cfg::W) writer structure"]
impl crate::Writable for CFG {}
#[doc = "Configuration"]
pub mod cfg;
#[doc = "Device Configuration\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dcfg](dcfg) module"]
pub type DCFG = crate::Reg<u32, _DCFG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _DCFG;
#[doc = "`read()` method returns [dcfg::R](dcfg::R) reader structure"]
impl crate::Readable for DCFG {}
#[doc = "`write(|w| ..)` method takes [dcfg::W](dcfg::W) writer structure"]
impl crate::Writable for DCFG {}
#[doc = "Device Configuration"]
pub mod dcfg;
#[doc = "CoreSight ROM Table Entry 0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [entry0](entry0) module"]
pub type ENTRY0 = crate::Reg<u32, _ENTRY0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ENTRY0;
#[doc = "`read()` method returns [entry0::R](entry0::R) reader structure"]
impl crate::Readable for ENTRY0 {}
#[doc = "CoreSight ROM Table Entry 0"]
pub mod entry0;
#[doc = "CoreSight ROM Table Entry 1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [entry1](entry1) module"]
pub type ENTRY1 = crate::Reg<u32, _ENTRY1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _ENTRY1;
#[doc = "`read()` method returns [entry1::R](entry1::R) reader structure"]
impl crate::Readable for ENTRY1 {}
#[doc = "CoreSight ROM Table Entry 1"]
pub mod entry1;
#[doc = "CoreSight ROM Table End\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [end](end) module"]
pub type END = crate::Reg<u32, _END>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _END;
#[doc = "`read()` method returns [end::R](end::R) reader structure"]
impl crate::Readable for END {}
#[doc = "CoreSight ROM Table End"]
pub mod end;
#[doc = "CoreSight ROM Table Memory Type\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [memtype](memtype) module"]
pub type MEMTYPE = crate::Reg<u32, _MEMTYPE>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _MEMTYPE;
#[doc = "`read()` method returns [memtype::R](memtype::R) reader structure"]
impl crate::Readable for MEMTYPE {}
#[doc = "CoreSight ROM Table Memory Type"]
pub mod memtype;
#[doc = "Peripheral Identification 4\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid4](pid4) module"]
pub type PID4 = crate::Reg<u32, _PID4>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID4;
#[doc = "`read()` method returns [pid4::R](pid4::R) reader structure"]
impl crate::Readable for PID4 {}
#[doc = "Peripheral Identification 4"]
pub mod pid4;
#[doc = "Peripheral Identification 5\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid5](pid5) module"]
pub type PID5 = crate::Reg<u32, _PID5>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID5;
#[doc = "`read()` method returns [pid5::R](pid5::R) reader structure"]
impl crate::Readable for PID5 {}
#[doc = "Peripheral Identification 5"]
pub mod pid5;
#[doc = "Peripheral Identification 6\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid6](pid6) module"]
pub type PID6 = crate::Reg<u32, _PID6>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID6;
#[doc = "`read()` method returns [pid6::R](pid6::R) reader structure"]
impl crate::Readable for PID6 {}
#[doc = "Peripheral Identification 6"]
pub mod pid6;
#[doc = "Peripheral Identification 7\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid7](pid7) module"]
pub type PID7 = crate::Reg<u32, _PID7>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID7;
#[doc = "`read()` method returns [pid7::R](pid7::R) reader structure"]
impl crate::Readable for PID7 {}
#[doc = "Peripheral Identification 7"]
pub mod pid7;
#[doc = "Peripheral Identification 0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid0](pid0) module"]
pub type PID0 = crate::Reg<u32, _PID0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID0;
#[doc = "`read()` method returns [pid0::R](pid0::R) reader structure"]
impl crate::Readable for PID0 {}
#[doc = "Peripheral Identification 0"]
pub mod pid0;
#[doc = "Peripheral Identification 1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid1](pid1) module"]
pub type PID1 = crate::Reg<u32, _PID1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID1;
#[doc = "`read()` method returns [pid1::R](pid1::R) reader structure"]
impl crate::Readable for PID1 {}
#[doc = "Peripheral Identification 1"]
pub mod pid1;
#[doc = "Peripheral Identification 2\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid2](pid2) module"]
pub type PID2 = crate::Reg<u32, _PID2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID2;
#[doc = "`read()` method returns [pid2::R](pid2::R) reader structure"]
impl crate::Readable for PID2 {}
#[doc = "Peripheral Identification 2"]
pub mod pid2;
#[doc = "Peripheral Identification 3\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pid3](pid3) module"]
pub type PID3 = crate::Reg<u32, _PID3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _PID3;
#[doc = "`read()` method returns [pid3::R](pid3::R) reader structure"]
impl crate::Readable for PID3 {}
#[doc = "Peripheral Identification 3"]
pub mod pid3;
#[doc = "Component Identification 0\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cid0](cid0) module"]
pub type CID0 = crate::Reg<u32, _CID0>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CID0;
#[doc = "`read()` method returns [cid0::R](cid0::R) reader structure"]
impl crate::Readable for CID0 {}
#[doc = "Component Identification 0"]
pub mod cid0;
#[doc = "Component Identification 1\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cid1](cid1) module"]
pub type CID1 = crate::Reg<u32, _CID1>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CID1;
#[doc = "`read()` method returns [cid1::R](cid1::R) reader structure"]
impl crate::Readable for CID1 {}
#[doc = "Component Identification 1"]
pub mod cid1;
#[doc = "Component Identification 2\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cid2](cid2) module"]
pub type CID2 = crate::Reg<u32, _CID2>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CID2;
#[doc = "`read()` method returns [cid2::R](cid2::R) reader structure"]
impl crate::Readable for CID2 {}
#[doc = "Component Identification 2"]
pub mod cid2;
#[doc = "Component Identification 3\n\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [cid3](cid3) module"]
pub type CID3 = crate::Reg<u32, _CID3>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CID3;
#[doc = "`read()` method returns [cid3::R](cid3::R) reader structure"]
impl crate::Readable for CID3 {}
#[doc = "Component Identification 3"]
pub mod cid3;
| 52.799353 | 406 | 0.686117 |
01a7cc0ae708e9e5e14f1635c62675d23660ebcc
| 1,205 |
//! IMPLEMENTATION DETAILS USED BY MACROS
use core::fmt::{self, Write};
use riscv::interrupt;
use crate::hio::{self, HostStream};
static mut HSTDOUT: Option<HostStream> = None;
pub fn hstdout_str(s: &str) {
let _result = interrupt::free(|_| unsafe {
if HSTDOUT.is_none() {
HSTDOUT = Some(hio::hstdout()?);
}
HSTDOUT.as_mut().unwrap().write_str(s).map_err(drop)
});
}
pub fn hstdout_fmt(args: fmt::Arguments) {
let _result = interrupt::free(|_| unsafe {
if HSTDOUT.is_none() {
HSTDOUT = Some(hio::hstdout()?);
}
HSTDOUT.as_mut().unwrap().write_fmt(args).map_err(drop)
});
}
static mut HSTDERR: Option<HostStream> = None;
pub fn hstderr_str(s: &str) {
let _result = interrupt::free(|_| unsafe {
if HSTDERR.is_none() {
HSTDERR = Some(hio::hstderr()?);
}
HSTDERR.as_mut().unwrap().write_str(s).map_err(drop)
});
}
pub fn hstderr_fmt(args: fmt::Arguments) {
let _result = interrupt::free(|_| unsafe {
if HSTDERR.is_none() {
HSTDERR = Some(hio::hstderr()?);
}
HSTDERR.as_mut().unwrap().write_fmt(args).map_err(drop)
});
}
| 23.173077 | 63 | 0.584232 |
bb0f88e385604748f7eebb810258bc0db0eb09ac
| 2,308 |
// This file is part of caniuse-serde. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/caniuse-serde/master/COPYRIGHT. No part of predicator, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2017 The developers of caniuse-serde. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/caniuse-serde/master/COPYRIGHT.
/// Details about a particular version, of which only the release_date is particularly useful.
/// The era is a relative value which can change with releases of the caniuse.com database, and the global_usage can differ to that available in `RegionalUsage::WorldWide`.
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
pub struct VersionDetail
{
global_usage: UsagePercentage,
release_date: Option<DateTime<Utc>>,
era: i64,
prefix_override: Option<Prefix>,
}
impl VersionDetail
{
/// A global usage of this version; one of three measurements included in the caniuse.com database.
/// It is recommended to use that in the `RegionalUsage::WorldWide` database instead as RegionalUsage data has greater consistency.
#[inline(always)]
pub fn global_usage(&self) -> UsagePercentage
{
self.global_usage
}
/// A timestamp of when this particular version was released.
/// It is likely that the hours, minutes and seconds represent false precision.
/// If the release_date is None, then ordinarily this version has not yet been released and `self.era()` should be greater than zero (0).
#[inline(always)]
pub fn release_date(&self) -> Option<DateTime<Utc>>
{
self.release_date
}
/// Eras are the caniuse.com database's attempt to align different browsers by version.
/// Negative values are for not current versions.
/// Zero is for the current version.
/// The era is a relative value which which can change with releases of the caniuse.com database.
#[inline(always)]
pub fn era(&self) -> i64
{
self.era
}
/// Override of prefix; only specified for Opera
#[inline(always)]
pub fn prefix_override(&self) -> Option<&Prefix>
{
self.prefix_override.as_ref()
}
}
| 44.384615 | 391 | 0.758232 |
641516a2769eff155c693baa456d51812dc300bb
| 4,612 |
use crate::mdc_sys::MDCDialog;
use wasm_bindgen::{prelude::*, JsCast};
use web_sys::Element;
use yew::prelude::*;
pub mod actions;
pub use actions::Actions;
pub mod content;
pub use content::Content;
pub struct Dialog {
node_ref: NodeRef,
inner: Option<MDCDialog>,
close_callback: Closure<dyn FnMut(web_sys::Event)>,
props: Props,
link: ComponentLink<Self>,
}
#[derive(Properties, Clone, PartialEq)]
pub struct Props {
pub children: Children,
#[prop_or_default]
pub id: String,
#[prop_or_else(Callback::noop)]
pub onclosed: Callback<Option<String>>,
#[prop_or_default]
pub escape_key_action: Option<String>,
#[prop_or_default]
pub scrim_click_action: Option<String>,
#[prop_or_default]
pub auto_stack_buttons: bool,
#[prop_or_default]
pub title: String,
#[prop_or_default]
pub open: bool,
#[prop_or_default]
pub onkeydown: Callback<KeyboardEvent>,
}
pub enum Msg {
Closed { action: Option<String> },
KeyDown(KeyboardEvent),
}
impl Component for Dialog {
type Message = Msg;
type Properties = Props;
fn create(props: Self::Properties, link: ComponentLink<Self>) -> Self {
let callback = link.callback(|action: Option<String>| Msg::Closed { action });
let closure = Closure::wrap(Box::new(move |e: web_sys::Event| {
use std::borrow::ToOwned;
e.stop_propagation();
let action = e.dyn_ref::<web_sys::CustomEvent>().and_then(|e| {
e.detail()
.into_serde::<serde_json::Value>()
.ok()
.and_then(|v| {
v.get("action")
.and_then(|v| v.as_str())
.map(ToOwned::to_owned)
})
});
callback.emit(action);
}) as Box<dyn FnMut(web_sys::Event)>);
Self {
node_ref: NodeRef::default(),
inner: None,
close_callback: closure,
props,
link,
}
}
fn rendered(&mut self, first_render: bool) {
if first_render {
if let Some(old_inner) = self.inner.take() {
old_inner.unlisten("MDCDialog:closed", &self.close_callback);
old_inner.destroy();
}
if let Some(elem) = self.node_ref.cast::<Element>() {
let dialog = MDCDialog::new(elem);
if let Some(action) = &self.props.escape_key_action {
dialog.set_escape_key_action(action);
}
if let Some(action) = &self.props.scrim_click_action {
dialog.set_scrim_click_action(action);
}
dialog.set_auto_stack_buttons(self.props.auto_stack_buttons);
dialog.listen("MDCDialog:closed", &self.close_callback);
if self.props.open {
dialog.open();
}
self.inner = Some(dialog);
}
}
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
if props.open != self.props.open {
if let Some(inner) = &self.inner {
if props.open {
inner.open();
} else {
inner.close(None);
}
}
}
if self.props != props {
self.props = props;
true
} else {
false
}
}
fn update(&mut self, msg: Self::Message) -> ShouldRender {
match msg {
Msg::Closed { action } => {
self.props.onclosed.emit(action);
}
Msg::KeyDown(ev) => self.props.onkeydown.emit(ev),
}
false
}
fn view(&self) -> Html {
html! {
<div
class="mdc-dialog"
id=&self.props.id
ref=self.node_ref.clone()
onkeydown=self.link.callback(Msg::KeyDown)>
<div class="mdc-dialog__container">
<div class="mdc-dialog__surface">
<h2 class="mdc-dialog__title">{ &self.props.title }</h2>
{ self.props.children.clone() }
</div>
</div>
<div class="mdc-dialog__scrim"></div>
</div>
}
}
fn destroy(&mut self) {
if let Some(inner) = &self.inner {
inner.unlisten("MDCDialog:closed", &self.close_callback);
inner.destroy();
}
}
}
| 30.342105 | 86 | 0.508239 |
f84f13c0a91835ad6cc4e7a9a914562d5b3b1d8a
| 12,414 |
use liturgy::{Psalm, PsalmSection, PsalmVerse, Reference, Source};
lazy_static! {
pub static ref PSALM_105: Psalm = Psalm {
number: 105,
citation: None,
sections: vec![
PsalmSection {
reference: Reference {
source: Source::BCP1979,
page: 738
},
local_name: String::from("Psalm 105: Part I"),
latin_name: String::from("Confitemini Domino"),
verses: vec![
PsalmVerse {
number: 1,
a: String::from("Give thanks to the LORD and call upon his Name; *"),
b: String::from("make known his deeds among the peoples.")
},
PsalmVerse {
number: 2,
a: String::from("Sing to him, sing praises to him, *"),
b: String::from("and speak of all his marvelous works.")
},
PsalmVerse {
number: 3,
a: String::from("Glory in his holy Name; *"),
b: String::from("let the hearts of those who seek the LORD rejoice.")
},
PsalmVerse {
number: 4,
a: String::from("Search for the LORD and his strength; *"),
b: String::from("continually seek his face.")
},
PsalmVerse {
number: 5,
a: String::from("Remember the marvels he has done, *"),
b: String::from("his wonders and the judgments of his mouth,")
},
PsalmVerse {
number: 6,
a: String::from("O offspring of Abraham his servant, *"),
b: String::from("O children of Jacob his chosen.")
},
PsalmVerse {
number: 7,
a: String::from("He is the LORD our God; *"),
b: String::from("his judgments prevail in all the world.")
},
PsalmVerse {
number: 8,
a: String::from("He has always been mindful of his covenant, *"),
b: String::from("the promise he made for a thousand generations:")
},
PsalmVerse {
number: 9,
a: String::from("The covenant he made with Abraham, *"),
b: String::from("the oath that he swore to Isaac,")
},
PsalmVerse {
number: 10,
a: String::from("Which he established as a statute for Jacob, *"),
b: String::from("an everlasting covenant for Israel,")
},
PsalmVerse {
number: 11,
a: String::from("Saying, “To you will I give the land of Canaan *"),
b: String::from("to be your allotted inheritance.”")
},
PsalmVerse {
number: 12,
a: String::from("When they were few in number, *"),
b: String::from("of little account, and sojourners in the land,")
},
PsalmVerse {
number: 13,
a: String::from("Wandering from nation to nation *"),
b: String::from("and from one kingdom to another,")
},
PsalmVerse {
number: 14,
a: String::from("He let no one oppress them *"),
b: String::from("and rebuked kings for their sake,")
},
PsalmVerse {
number: 15,
a: String::from("Saying, “Do not touch my anointed *"),
b: String::from("and do my prophets no harm.”")
},
PsalmVerse {
number: 16,
a: String::from("Then he called for a famine in the land *"),
b: String::from("and destroyed the supply of bread.")
},
PsalmVerse {
number: 17,
a: String::from("He sent a man before them, *"),
b: String::from("Joseph, who was sold as a slave.")
},
PsalmVerse {
number: 18,
a: String::from("They bruised his feet in fetters; *"),
b: String::from("his neck they put in an iron collar.")
},
PsalmVerse {
number: 19,
a: String::from("Until his prediction came to pass, *"),
b: String::from("the word of the LORD tested him.")
},
PsalmVerse {
number: 20,
a: String::from("The king sent and released him; *"),
b: String::from("the ruler of the peoples set him free.")
},
PsalmVerse {
number: 21,
a: String::from("He set him as a master over his household, *"),
b: String::from("as a ruler over all his possessions,")
},
PsalmVerse {
number: 22,
a: String::from("To instruct his princes according to his will *"),
b: String::from("and to teach his elders wisdom.")
},
]
},
PsalmSection {
reference: Reference {
source: Source::BCP1979,
page: 739
},
local_name: String::from("Psalm 105: Part II"),
latin_name: String::from("Et intravit Israel"),
verses: vec![
PsalmVerse {
number: 23,
a: String::from("Israel came into Egypt, *"),
b: String::from("and Jacob became a sojourner in the land of Ham.")
},
PsalmVerse {
number: 24,
a: String::from("The LORD made his people exceedingly fruitful; *"),
b: String::from("he made them stronger than their enemies;")
},
PsalmVerse {
number: 25,
a: String::from("Whose heart he turned, so that they hated his people, *"),
b: String::from("and dealt unjustly with his servants.")
},
PsalmVerse {
number: 26,
a: String::from("He sent Moses his servant, *"),
b: String::from("and Aaron whom he had chosen.")
},
PsalmVerse {
number: 27,
a: String::from("They worked his signs among them, *"),
b: String::from("and portents in the land of Ham.")
},
PsalmVerse {
number: 28,
a: String::from("He sent darkness, and it grew dark; *"),
b: String::from("but the Egyptians rebelled against his words.")
},
PsalmVerse {
number: 29,
a: String::from("He turned their waters into blood *"),
b: String::from("and caused their fish to die.")
},
PsalmVerse {
number: 30,
a: String::from("Their land was overrun by frogs, *"),
b: String::from("in the very chambers of their kings.")
},
PsalmVerse {
number: 31,
a: String::from("He spoke, and there came swarms of insects *"),
b: String::from("and gnats within all their borders.")
},
PsalmVerse {
number: 32,
a: String::from("He gave them hailstones instead of rain, *"),
b: String::from("and flames of fire throughout their land.")
},
PsalmVerse {
number: 33,
a: String::from("He blasted their vines and their fig trees *"),
b: String::from("and shattered every tree in their country.")
},
PsalmVerse {
number: 34,
a: String::from("He spoke, and the locust came, *"),
b: String::from("and young locusts without number,")
},
PsalmVerse {
number: 35,
a: String::from("Which ate up all the green plants in their land *"),
b: String::from("and devoured the fruit of their soil.")
},
PsalmVerse {
number: 36,
a: String::from("He struck down the firstborn of their land, *"),
b: String::from("the firstfruits of all their strength.")
},
PsalmVerse {
number: 37,
a: String::from("He led out his people with silver and gold; *"),
b: String::from("in all their tribes there was not one that stumbled.")
},
PsalmVerse {
number: 38,
a: String::from("Egypt was glad of their going, *"),
b: String::from("because they were afraid of them.")
},
PsalmVerse {
number: 39,
a: String::from("He spread out a cloud for a covering *"),
b: String::from("and a fire to give light in the night season.")
},
PsalmVerse {
number: 40,
a: String::from("They asked, and quails appeared, *"),
b: String::from("and he satisfied them with bread from heaven.")
},
PsalmVerse {
number: 41,
a: String::from("He opened the rock, and water flowed, *"),
b: String::from("so the river ran in the dry places.")
},
PsalmVerse {
number: 42,
a: String::from("For God remembered his holy word *"),
b: String::from("and Abraham his servant.")
},
PsalmVerse {
number: 43,
a: String::from("So he led forth his people with gladness, *"),
b: String::from("his chosen with shouts of joy.")
},
PsalmVerse {
number: 44,
a: String::from("He gave his people the lands of the nations, *"),
b: String::from("and they took the fruit of others’ toil,")
},
PsalmVerse {
number: 45,
a: String::from("That they might keep his statutes *"),
b: String::from("and observe his laws.\n Hallelujah!")
},
]
}
]
};
}
| 48.682353 | 99 | 0.389238 |
f474841a9d31c8f12a57318149dea8cb2a0809e4
| 4,939 |
use rust_hdl::bsp::ok_core::prelude::*;
use rust_hdl::core::prelude::*;
use rust_hdl::widgets::prelude::*;
mod test_common;
use rust_hdl::bsp::ok_xem7010::pins::{xem_7010_leds, xem_7010_neg_clock, xem_7010_pos_clock};
use rust_hdl::bsp::ok_xem7010::sys_clock::OpalKellySystemClock7;
use rust_hdl::bsp::ok_xem7010::XEM7010;
#[cfg(feature = "frontpanel")]
use rust_hdl_ok_frontpanel_sys::{make_u16_buffer, OkError};
#[cfg(feature = "frontpanel")]
use test_common::pipe::*;
#[cfg(feature = "frontpanel")]
use test_common::tools::*;
declare_async_fifo!(OKTestAFIFO2, Bits<16>, 1024, 256);
#[derive(LogicBlock)]
pub struct OpalKellyBTPipeOut7Test {
pub hi: OpalKellyHostInterface,
pub ok_host: OpalKellyHost,
pub fifo_out: OKTestAFIFO2,
pub o_pipe: BTPipeOut,
pub delay_read: DFF<Bit>,
pub clock_p: Signal<In, Clock>,
pub clock_n: Signal<In, Clock>,
pub fast_clock: Signal<Local, Clock>,
pub clock_div: OpalKellySystemClock7,
pub counter: DFF<Bits<16>>,
pub strobe: Strobe<32>,
pub can_run: Signal<Local, Bit>,
pub led: Signal<Out, Bits<8>>,
}
impl Logic for OpalKellyBTPipeOut7Test {
#[hdl_gen]
fn update(&mut self) {
// Link the interfaces
self.hi.link(&mut self.ok_host.hi);
// Connect the clock up
self.clock_div.clock_p.next = self.clock_p.val();
self.clock_div.clock_n.next = self.clock_n.val();
self.fast_clock.next = self.clock_div.sys_clock.val();
// Connect the clocks
// Read side objects
self.fifo_out.read_clock.next = self.ok_host.ti_clk.val();
self.delay_read.clk.next = self.ok_host.ti_clk.val();
// Write side objects
self.fifo_out.write_clock.next = self.fast_clock.val();
self.counter.clk.next = self.fast_clock.val();
self.strobe.clock.next = self.fast_clock.val();
// Connect the ok1 and ok2 busses
self.o_pipe.ok1.next = self.ok_host.ok1.val();
self.ok_host.ok2.next = self.o_pipe.ok2.val();
self.can_run.next = !self.fifo_out.full.val();
// Set up the counter
self.counter.d.next =
self.counter.q.val() + (self.strobe.strobe.val() & self.can_run.val());
// Enable the strobe
self.strobe.enable.next = self.can_run.val();
// Connect the counter to the fifo
self.fifo_out.data_in.next = self.counter.q.val();
self.fifo_out.write.next = self.strobe.strobe.val() & self.can_run.val();
// Connect the delay counter for the fifo
self.delay_read.d.next = self.o_pipe.read.val();
self.fifo_out.read.next = self.delay_read.q.val();
// Connect the pipe to the output of the fifo
self.o_pipe.datain.next = self.fifo_out.data_out.val();
// Connect the enable for the pipe to the not-almost-empty for the fifo
self.o_pipe.ready.next = !self.fifo_out.almost_empty.val();
// Signal the LEDs
self.led.next = !(bit_cast::<8, 1>(self.fifo_out.empty.val().into())
| (bit_cast::<8, 1>(self.fifo_out.full.val().into()) << 1_usize)
| (bit_cast::<8, 1>(self.fifo_out.almost_empty.val().into()) << 2_usize)
| (bit_cast::<8, 1>(self.fifo_out.almost_full.val().into()) << 3_usize)
| (bit_cast::<8, 1>(self.fifo_out.overflow.val().into()) << 4_usize)
| (bit_cast::<8, 1>(self.fifo_out.underflow.val().into()) << 5_usize));
}
}
impl OpalKellyBTPipeOut7Test {
pub fn new() -> Self {
Self {
hi: OpalKellyHostInterface::xem_7010(),
ok_host: OpalKellyHost::xem_7010(),
fifo_out: Default::default(),
o_pipe: BTPipeOut::new(0xA0),
delay_read: Default::default(),
clock_p: xem_7010_pos_clock(),
clock_n: xem_7010_neg_clock(),
fast_clock: Default::default(),
clock_div: Default::default(),
counter: Default::default(),
strobe: Strobe::new(100_000_000, 1_000_000.0),
can_run: Default::default(),
led: xem_7010_leds(),
}
}
}
#[cfg(feature = "frontpanel")]
#[test]
fn test_opalkelly_xem_7010_synth_btpipe() {
let mut uut = OpalKellyBTPipeOut7Test::new();
uut.hi.link_connect_dest();
uut.connect_all();
XEM7010::synth(uut, target_path!("xem_7010/btpipe"));
test_opalkelly_xem_7010_btpipe_runtime().unwrap();
}
#[cfg(feature = "frontpanel")]
#[cfg(test)]
fn test_opalkelly_xem_7010_btpipe_runtime() -> Result<(), OkError> {
let hnd = ok_test_prelude(target_path!("xem_7010/btpipe/top.bit"))?;
// Read the data in 256*2 = 512 byte blocks
let mut data = vec![0_u8; 1024 * 128];
hnd.read_from_block_pipe_out(0xA0, 256, &mut data).unwrap();
let data_shorts = make_u16_buffer(&data);
for (ndx, val) in data_shorts.iter().enumerate() {
assert_eq!(((ndx as u128) & 0xFFFF_u128) as u16, *val);
}
Ok(())
}
| 36.858209 | 93 | 0.633934 |
7937d4aa8286b49eacd76181ba6b770e8c7882c5
| 1,058 |
use alephbft_fuzz::fuzz;
use std::{
io,
io::{BufReader, BufWriter},
};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(
name = "fuzz-helper",
about = "data generator for the purpose of fuzzing"
)]
struct Opt {
/// Verify data provided on stdin by calling member::run on it.
#[structopt(short, long)]
check_fuzz: bool,
/// Generate data for a given number of members.
/// When used with the 'check_fuzz' flag it verifies data assuming this number of members.
#[structopt(default_value = "4")]
members: usize,
/// Generate a given number of batches.
/// When used with the 'check_fuzz' flag it will verify if we are able to create at least this number of batches.
#[structopt(default_value = "30")]
batches: usize,
}
fn main() {
let opt = Opt::from_args();
if opt.check_fuzz {
fuzz::check_fuzz(BufReader::new(io::stdin()), opt.members, Some(opt.batches));
} else {
fuzz::generate_fuzz(BufWriter::new(io::stdout()), opt.members, opt.batches);
}
}
| 28.594595 | 117 | 0.651229 |
4a782cec70f98b76f860bea6921642f922c877c1
| 9,094 |
use self::JournalAmountTest::*;
use accounts::account::Type::*;
use accounts::chart_of_accounts::ChartOfAccounts;
use accounts::entry::Entry;
use accounts::journal_entry::*;
use accounts::money::Money;
use accounts::report::{ReportNode, Total};
use accounts::*;
use anyhow::Result;
use futures::stream::TryStreamExt;
use itertools::Itertools;
use std::collections::HashMap;
use std::convert::TryInto;
/// Test that a dir containing one entry per file parses without error
#[async_std::test]
async fn test_basic_entries() -> Result<()> {
let ledger = Ledger::new(Some("./tests/fixtures/entries_flat"));
let entries = ledger.entries().try_collect::<Vec<Entry>>().await?;
dbg!(&entries);
let count = entries.iter().map(|entry| entry.id()).unique().count();
assert_eq!(count, 2);
Ok(())
}
/// Test that a dir containing nested dirs parses without error
#[async_std::test]
async fn test_nested_dirs() -> Result<()> {
let ledger = Ledger::new(Some("./tests/fixtures/entries_nested_dirs"));
let entries = ledger.entries().try_collect::<Vec<Entry>>().await?;
dbg!(&entries);
let count = entries.iter().map(|entry| entry.id()).unique().count();
assert_eq!(count, 2);
Ok(())
}
/// Test that a dir with one file containing multiple entries parses without error
#[async_std::test]
async fn test_multiple_entries_in_one_file() -> Result<()> {
let ledger = Ledger::new(Some(
"./tests/fixtures/entries_multiple_entries_in_one_file",
));
let entries = ledger.entries().try_collect::<Vec<Entry>>().await?;
dbg!(&entries);
let count = entries.iter().map(|entry| entry.id()).unique().count();
assert_eq!(count, 2);
Ok(())
}
/// Test that journal entries from entries are correct
#[async_std::test]
async fn test_journal_from_entries() -> Result<()> {
let ledger = Ledger::new(Some("./tests/fixtures/entries"));
let journal_entries: Vec<JournalEntry> = ledger.journal().try_collect().await?;
assert_eq!(dbg!(&journal_entries).iter().count(), 16);
Expect(&journal_entries)
.contains("2020-01-01", "Operating Expenses", Debit(100.00))
.contains("2020-01-01", "Accounts Payable", Credit(100.00))
.contains("2020-01-02", "Accounts Payable", Debit(100.00))
.contains("2020-01-02", "Credit Card", Credit(100.00))
.contains("2020-01-03", "Operating Expenses", Debit(50.00))
.contains("2020-01-03", "Business Checking", Credit(50.00))
.contains("2020-01-04", "Operating Expenses", Debit(100.00))
.contains("2020-01-04", "Accounts Payable", Credit(100.00))
.contains("2020-01-05", "Accounts Receivable", Debit(10.00))
.contains("2020-01-05", "Widget Sales", Credit(10.00))
.contains("2020-01-06", "Business Checking", Debit(10.00))
.contains("2020-01-06", "Accounts Receivable", Credit(10.00))
.contains("2020-01-07", "Business Checking", Debit(5.00))
.contains("2020-01-07", "Widget Sales", Credit(5.00))
.contains("2020-01-08", "Accounts Receivable", Debit(10.00))
.contains("2020-01-08", "Widget Sales", Credit(10.00));
Ok(())
}
/// Test balances from entries
#[async_std::test]
async fn test_balance() -> Result<()> {
let ledger = Ledger::new(Some("./tests/fixtures/entries"));
let balances = ledger.balances().await?;
assert_eq!(balances.iter().count(), 6);
Expect(&balances)
.contains("Operating Expenses", Debit(250.00))
.contains("Accounts Payable", Credit(100.00))
.contains("Credit Card", Credit(100.00))
.contains("Business Checking", Credit(35.00))
.contains("Widget Sales", Credit(25.00))
.contains("Accounts Receivable", Debit(10.00));
Ok(())
}
/// Test journal entries from recurring entries
#[async_std::test]
async fn test_recurring() -> Result<()> {
let ledger = Ledger::new(Some("./tests/fixtures/entries_recurring"));
let journal_entries: Vec<JournalEntry> = ledger.journal().try_collect().await?;
assert_eq!(dbg!(&journal_entries).iter().count(), 12);
Expect(&journal_entries)
.contains("2020-01-01", "Operating Expenses", Debit(100.00))
.contains("2020-01-01", "Accounts Payable", Credit(100.00))
.contains("2020-01-02", "Accounts Payable", Debit(100.00))
.contains("2020-01-02", "Bank Account", Credit(100.00))
.contains("2020-02-01", "Operating Expenses", Debit(100.00))
.contains("2020-02-01", "Accounts Payable", Credit(100.00))
.contains("2020-02-03", "Accounts Payable", Debit(100.00))
.contains("2020-02-03", "Bank Account", Credit(100.00))
.contains("2020-03-01", "Operating Expenses", Debit(150.00))
.contains("2020-03-01", "Accounts Payable", Credit(150.00))
.contains("2020-03-02", "Accounts Payable", Debit(150.00))
.contains("2020-03-02", "Bank Account", Credit(150.00));
Ok(())
}
#[async_std::test]
async fn test_chart_of_accounts() -> Result<()> {
let chart_of_accounts =
ChartOfAccounts::from_file("./tests/fixtures/ChartOfAccounts.yaml").await?;
dbg!(&chart_of_accounts);
assert_eq!(
chart_of_accounts.get("Operating Expenses")?.acc_type,
Expense
);
assert_eq!(chart_of_accounts.get("Credit Card")?.acc_type, Liability);
assert_eq!(chart_of_accounts.get("Business Checking")?.acc_type, Asset);
assert_eq!(chart_of_accounts.get("Widget Sales")?.acc_type, Revenue);
Ok(())
}
#[async_std::test]
async fn test_report() -> Result<()> {
let report = ReportNode::from_file("./tests/fixtures/IncomeStatement.yaml").await?;
let items = report.items()?;
dbg!(&report);
dbg!(&items);
assert_eq!(
items[3].0,
vec!["Income Statement", "Expenses", "Indirect Expenses", "Rent"]
);
assert_eq!(
items[4].0,
vec!["Income Statement", "Expenses", "Direct Expenses"]
);
assert_eq!(
items[6].0,
vec!["Income Statement", "Revenue", "Direct Revenue"]
);
assert_eq!(
items[7].0,
vec!["Income Statement", "Revenue", "Indirect Revenue"]
);
Ok(())
}
#[async_std::test]
async fn test_run_report() -> Result<()> {
let ledger = Ledger::new(Some("./tests/fixtures/entries"));
let chart_of_accounts =
ChartOfAccounts::from_file("./tests/fixtures/ChartOfAccounts.yaml").await?;
let mut report = ReportNode::from_file("./tests/fixtures/IncomeStatement.yaml").await?;
ledger.run_report(&chart_of_accounts, &mut report).await?;
let items = report.items()?;
dbg!(&items);
println!("{}", report);
assert_eq!(items[0].0, vec!["Income Statement"],);
assert_eq!(items[0].2 .0, vec!["Operating Expenses", "Widget Sales"]);
assert_eq!(items[0].2 .1, JournalAmount::Debit(225.00.try_into()?));
assert_eq!(
items[4].0,
vec!["Income Statement", "Expenses", "Indirect Expenses", "Other"],
);
assert_eq!(items[4].2 .0, vec!["Operating Expenses"]);
assert_eq!(items[4].2 .1, JournalAmount::Debit(250.00.try_into()?));
assert_eq!(items[6].0, vec!["Income Statement", "Revenue"]);
assert_eq!(items[6].2 .0, vec!["Widget Sales"]);
assert_eq!(items[6].2 .1, JournalAmount::Credit(25.00.try_into()?));
assert_eq!(
items[7].0,
vec!["Income Statement", "Revenue", "Direct Revenue"]
);
assert_eq!(items[7].2 .0, vec!["Widget Sales"]);
assert_eq!(items[7].2 .1, JournalAmount::Credit(25.00.try_into()?));
assert_eq!(
items[8].0,
vec!["Income Statement", "Revenue", "Indirect Revenue"]
);
assert!(items[8].2 .0.is_empty());
assert_eq!(items[8].2 .1, JournalAmount::default());
Ok(())
}
#[derive(Debug)]
enum JournalAmountTest {
Debit(f64),
Credit(f64),
}
/// struct for special assertions
struct Expect<'a, T>(&'a T);
impl Expect<'_, Vec<JournalEntry>> {
fn contains(&self, date: &str, account: &str, amount: JournalAmountTest) -> &Self {
let expected = &&JournalEntry(
date.parse().unwrap(),
account.into(),
match amount {
Debit(m) => JournalAmount::Debit(m.try_into().unwrap()),
Credit(m) => JournalAmount::Credit(m.try_into().unwrap()),
},
);
assert!(
self.0.iter().find(|actual| actual == expected).is_some(),
"{:?} not found in {:?}",
expected,
self.0
);
self
}
}
impl Expect<'_, HashMap<JournalAccount, JournalAmount>> {
fn contains(&self, account: &str, amount: JournalAmountTest) -> &Self {
let amount = match amount {
Debit(m) => JournalAmount::Debit(m.try_into().unwrap()),
Credit(m) => JournalAmount::Credit(m.try_into().unwrap()),
};
assert!(
self.0
.iter()
.find(|actual| actual.0 == account && actual.1 == &amount)
.is_some(),
"({}: {:?}) not found in {:?}",
account,
amount,
self.0
);
self
}
}
| 36.522088 | 91 | 0.612602 |
64c3d77ce0826e0228ecb732e511e9f2923bc9d1
| 276 |
// --extern-location with a raw reference
// check-pass
// aux-crate:bar=bar.rs
// compile-flags:--extern-location bar=raw:in-the-test-file --error-format json -Z unstable-options
#![warn(unused_crate_dependencies)]
//~^ WARNING external crate `bar` unused in
fn main() {}
| 25.090909 | 99 | 0.717391 |
fe95fb78d66d57ba23d639055128830db3f9e14c
| 7,992 |
#![recursion_limit = "192"]
use std::iter;
use std::mem;
use std::str::FromStr;
use proc_macro2::{Ident, Span, TokenStream};
use proc_macro_error::{abort, proc_macro_error};
use quote::{quote, ToTokens};
use syn::{
parse_macro_input, AttrStyle, Attribute, Data, DeriveInput, Expr, Fields, Index, Meta,
NestedMeta, Type, TypeGenerics, TypePath,
};
#[proc_macro_derive(H5Type)]
#[proc_macro_error]
pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input = parse_macro_input!(input as DeriveInput);
let name = input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let body = impl_trait(&name, &input.data, &input.attrs, &ty_generics);
let dummy = Ident::new(&format!("_IMPL_H5TYPE_FOR_{}", name), Span::call_site());
let expanded = quote! {
#[allow(dead_code, unused_variables, unused_attributes)]
const #dummy: () = {
extern crate hdf5 as _h5;
#[automatically_derived]
unsafe impl #impl_generics _h5::types::H5Type for #name #ty_generics #where_clause {
#[inline]
fn type_descriptor() -> _h5::types::TypeDescriptor {
#body
}
}
};
};
proc_macro::TokenStream::from(expanded)
}
fn impl_compound<F>(
ty: &Ident, ty_generics: &TypeGenerics, fields: &[F], names: &[String], types: &[Type],
) -> TokenStream
where
F: ToTokens,
{
quote! {
let origin: *const #ty #ty_generics = ::std::ptr::null();
let mut fields = vec![#(
_h5::types::CompoundField {
name: #names.to_owned(),
ty: <#types as _h5::types::H5Type>::type_descriptor(),
offset: unsafe { &((*origin).#fields) as *const _ as _ },
index: 0,
}
),*];
for i in 0..fields.len() {
fields[i].index = i;
}
let size = ::std::mem::size_of::<#ty #ty_generics>();
_h5::types::TypeDescriptor::Compound(_h5::types::CompoundType { fields, size })
}
}
fn impl_transparent(ty: &Type) -> TokenStream {
quote! {
<#ty as _h5::types::H5Type>::type_descriptor()
}
}
fn impl_enum(names: &[Ident], values: &[Expr], repr: &Ident) -> TokenStream {
let size = Ident::new(
&format!(
"U{}",
usize::from_str(&repr.to_string()[1..]).unwrap_or(mem::size_of::<usize>() * 8) / 8
),
Span::call_site(),
);
let signed = repr.to_string().starts_with('i');
let repr = iter::repeat(repr);
quote! {
_h5::types::TypeDescriptor::Enum(
_h5::types::EnumType {
size: _h5::types::IntSize::#size,
signed: #signed,
members: vec![#(
_h5::types::EnumMember {
name: stringify!(#names).to_owned(),
value: (#values) as #repr as _,
}
),*],
}
)
}
}
fn is_phantom_data(ty: &Type) -> bool {
match *ty {
Type::Path(TypePath { qself: None, ref path }) => {
path.segments.iter().last().map_or(false, |x| x.ident == "PhantomData")
}
_ => false,
}
}
fn find_repr(attrs: &[Attribute], expected: &[&str]) -> Option<Ident> {
for attr in attrs.iter() {
if attr.style != AttrStyle::Outer {
continue;
}
let list = match attr.parse_meta() {
Ok(Meta::List(list)) => list,
_ => continue,
};
if !list.path.get_ident().map_or(false, |ident| ident == "repr") {
continue;
}
for item in list.nested.iter() {
let path = match item {
NestedMeta::Meta(Meta::Path(ref path)) => path,
_ => continue,
};
let ident = match path.get_ident() {
Some(ident) => ident,
_ => continue,
};
if expected.iter().any(|&s| ident == s) {
return Some(Ident::new(&ident.to_string(), Span::call_site()));
}
}
}
None
}
fn pluck<'a, I, F, T, S>(iter: I, func: F) -> Vec<S>
where
I: Iterator<Item = &'a T>,
F: Fn(&'a T) -> S,
T: 'a,
{
iter.map(func).collect()
}
fn impl_trait(
ty: &Ident, data: &Data, attrs: &[Attribute], ty_generics: &TypeGenerics,
) -> TokenStream {
match *data {
Data::Struct(ref data) => match data.fields {
Fields::Unit => {
abort!(ty, "cannot derive `H5Type` for unit structs");
}
Fields::Named(ref fields) => {
let fields: Vec<_> =
fields.named.iter().filter(|f| !is_phantom_data(&f.ty)).collect();
if fields.is_empty() {
abort!(ty, "cannot derive `H5Type` for empty structs");
}
let repr =
find_repr(attrs, &["C", "packed", "transparent"]).unwrap_or_else(|| {
abort!(ty,
"`H5Type` requires repr(C), repr(packed) or repr(transparent) for structs")
});
if repr == "transparent" {
assert_eq!(fields.len(), 1);
impl_transparent(&fields[0].ty)
} else {
let types = pluck(fields.iter(), |f| f.ty.clone());
let fields = pluck(fields.iter(), |f| f.ident.clone().unwrap());
let names = fields.iter().map(ToString::to_string).collect::<Vec<_>>();
impl_compound(ty, ty_generics, &fields, &names, &types)
}
}
Fields::Unnamed(ref fields) => {
let (index, fields): (Vec<Index>, Vec<_>) = fields
.unnamed
.iter()
.enumerate()
.filter(|&(_, f)| !is_phantom_data(&f.ty))
.map(|(i, f)| (Index::from(i), f))
.unzip();
if fields.is_empty() {
abort!(ty, "cannot derive `H5Type` for empty tuple structs")
}
let repr = find_repr(attrs, &["C", "packed", "transparent"]).unwrap_or_else(|| {
abort!(ty,
"`H5Type` requires repr(C), repr(packed) or repr(transparent) for tuple structs")
});
if repr == "transparent" {
assert_eq!(fields.len(), 1);
impl_transparent(&fields[0].ty)
} else {
let names = (0..fields.len()).map(|f| f.to_string()).collect::<Vec<_>>();
let types = pluck(fields.iter(), |f| f.ty.clone());
impl_compound(ty, ty_generics, &index, &names, &types)
}
}
},
Data::Enum(ref data) => {
let variants = &data.variants;
if variants.iter().any(|v| v.fields != Fields::Unit || v.discriminant.is_none()) {
abort!(ty, "`H5Type` can only be derived for enums with scalar discriminants")
}
if variants.is_empty() {
abort!(ty, "cannot derive `H5Type` for empty enums")
}
let enum_reprs =
&["i8", "i16", "i32", "i64", "u8", "u16", "u32", "u64", "isize", "usize"];
let repr = find_repr(attrs, enum_reprs).unwrap_or_else(|| {
abort!(ty, "`H5Type` can only be derived for enums with explicit representation")
});
let names = pluck(variants.iter(), |v| v.ident.clone());
let values = pluck(variants.iter(), |v| v.discriminant.clone().unwrap().1);
impl_enum(&names, &values, &repr)
}
Data::Union(_) => {
abort!(ty, "cannot derive `H5Type` for tagged unions");
}
}
}
| 35.52 | 101 | 0.490741 |
ff6f6d2c3e863324cb4d5eca731fa9f399310204
| 1,745 |
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//! Types related to parsing and encoding CoAP messages.
//!
use super::*;
/// Type for representing a CoAP message id.
pub type MsgId = u16;
mod read;
pub use read::AckMessage;
pub use read::MessageRead;
pub use read::ResetMessage;
mod write;
pub use write::MessageWrite;
mod msg_code;
pub use msg_code::MsgCode;
pub use msg_code::MsgCodeClass;
mod msg_type;
pub use msg_type::MsgType;
mod display;
pub use display::CoapByteDisplayFormatter;
pub use display::MessageDisplay;
mod null;
pub use null::NullMessageRead;
pub use null::NullMessageWrite;
mod std_encoder;
pub use std_encoder::BufferMessageEncoder;
pub use std_encoder::VecMessageEncoder;
mod std_parser;
pub use std_parser::OwnedImmutableMessage;
pub use std_parser::StandardMessageParser;
mod token;
pub use token::*;
pub mod codec;
#[allow(dead_code)]
const COAP_MSG_VER_MASK: u8 = 0b11000000;
#[allow(dead_code)]
const COAP_MSG_VER_OFFS: u8 = 6;
#[allow(dead_code)]
const COAP_MSG_T_MASK: u8 = 0b00110000;
#[allow(dead_code)]
const COAP_MSG_T_OFFS: u8 = 4;
#[allow(dead_code)]
const COAP_MSG_TKL_MASK: u8 = 0b00001111;
#[allow(dead_code)]
const COAP_MSG_TKL_OFFS: u8 = 0;
| 22.960526 | 75 | 0.75702 |
f70a3ec588a0028d20a5cc4b190183b9ed1b1709
| 931 |
#![no_std]
#![allow(dead_code)]
// #[macro_use(singleton)]
// extern crate cortex_m;
use cortex_m::asm::delay as delay_cycles;
use stm32h7xx_hal::time::{Hertz, MegaHertz};
pub const MILLI: u32 = 1_000;
pub const AUDIO_FRAME_RATE_HZ: u32 = 1_000;
pub const AUDIO_BLOCK_SIZE: u16 = 48;
pub const AUDIO_SAMPLE_RATE: usize = 48_000;
pub const AUDIO_SAMPLE_HZ: Hertz = Hertz(48_000);
pub const CLOCK_RATE_HZ: Hertz = Hertz(480_000_000_u32);
pub const MILICYCLES: u32 = CLOCK_RATE_HZ.0 / MILLI;
pub type FrameTimer = stm32h7xx_hal::timer::Timer<stm32h7xx_hal::stm32::TIM2>;
pub use stm32h7xx_hal as hal;
pub mod audio;
pub mod flash;
pub mod gpio;
pub mod hid;
pub mod logger;
pub mod mpu;
pub mod prelude;
pub mod sdmmc;
pub mod sdram;
pub mod system;
// Delay for ms, note if interrupts are active delay time will extend
pub fn delay_ms(ms: u32) {
delay_cycles(ms * MILICYCLES);
}
// pub fn ms_to_cycles(ms: u32) {
// }
| 21.651163 | 78 | 0.73362 |
4a2c5e2dbaad6028ff5525ffbf827321de542904
| 1,323 |
extern crate amethyst;
use amethyst::{
assets::{PrefabLoader, PrefabLoaderSystem, RonFormat},
core::transform::TransformBundle,
prelude::*,
renderer::{DrawPbm, PosNormTangTex},
utils::{application_root_dir, scene::BasicScenePrefab},
};
type MyPrefabData = BasicScenePrefab<Vec<PosNormTangTex>>;
struct Example;
impl SimpleState for Example {
fn on_start(&mut self, data: StateData<GameData>) {
let handle = data.world.exec(|loader: PrefabLoader<MyPrefabData>| {
loader.load("prefab/spotlights_scene.ron", RonFormat, (), ())
});
data.world.create_entity().with(handle).build();
}
}
fn main() -> amethyst::Result<()> {
amethyst::start_logger(Default::default());
let app_root = application_root_dir();
let display_config_path = format!(
"{}/examples/spotlights/resources/display_config.ron",
app_root
);
let resources = format!("{}/examples/assets/", app_root);
let game_data = GameDataBuilder::default()
.with(PrefabLoaderSystem::<MyPrefabData>::default(), "", &[])
.with_bundle(TransformBundle::new())?
.with_basic_renderer(display_config_path, DrawPbm::<PosNormTangTex>::new(), false)?;
let mut game = Application::new(resources, Example, game_data)?;
game.run();
Ok(())
}
| 30.068182 | 92 | 0.662887 |
236b7f50e098732155a72ee97d197618f5e42637
| 4,588 |
use super::types::OdbcType;
use odbc_safe::AutocommitMode;
use super::types::EncodedValue;
use {super::super::ffi, super::super::Handle, super::super::Raii, Result, super::super::Return, super::super::Statement};
use std::error::Error;
impl<'a, 'b, S, R, AC: AutocommitMode> Statement<'a, 'b, S, R, AC> {
/// Binds a parameter to a parameter marker in an SQL statement.
///
/// # Result
/// This method will destroy the statement and create a new one which may not outlive the bound
/// parameter. This is to ensure that the statement will not dereference an invalid pointer
/// during execution.
///
/// # Arguments
/// * `parameter_index` - Index of the marker to bind to the parameter. Starting at `1`
/// * `value` - Reference to bind to the marker
///
/// # Example
/// ```
/// # use odbc::*;
/// # fn do_odbc_stuff() -> std::result::Result<(), Box<std::error::Error>> {
// let env = create_environment_v3().map_err(|e| e.unwrap())?;
/// let conn = env.connect("TestDataSource", "", "")?;
/// let stmt = Statement::with_parent(&conn)?;
/// let param = 1968;
/// let stmt = stmt.bind_parameter(1, ¶m)?;
/// let sql_text = "SELECT TITLE FROM MOVIES WHERE YEAR = ?";
/// if let Data(mut stmt) = stmt.exec_direct(sql_text)? {
/// // ...
/// }
/// # Ok(())
/// # }
/// ```
pub fn bind_parameter<'c, T>(
mut self,
parameter_index: u16,
value: &'c T,
) -> Result<Statement<'a, 'c, S, R, AC>, Box<dyn Error>>
where
T: OdbcType<'c>,
T: ?Sized,
'b: 'c,
{
let ind = if value.value_ptr() == 0 as *const Self as ffi::SQLPOINTER {
ffi::SQL_NULL_DATA
} else {
value.column_size() as ffi::SQLLEN
};
let ind_ptr = self.param_ind_buffers.alloc(parameter_index as usize, ind);
//the result of value_ptr is changed per calling.
//binding and saving must have the same value.
let enc_value = value.encoded_value();
self.raii
.bind_input_parameter(parameter_index, value, ind_ptr, &enc_value)
.into_result(&self)?;
// save encoded value to avoid memory reuse.
if enc_value.has_value() {
self.encoded_values.push(enc_value);
}
Ok(self)
}
/// Releasing all parameter buffers set by `bind_parameter`. This method consumes the statement
/// and returns a new one those lifetime is no longer limited by the buffers bound.
pub fn reset_parameters(mut self) -> Result<Statement<'a, 'a, S, R, AC>, Box<dyn Error>> {
self.param_ind_buffers.clear();
self.encoded_values.clear();
self.raii.reset_parameters().into_result(&mut self)?;
Ok(Statement::with_raii(self.raii))
}
}
impl<'p> Raii<'p, ffi::Stmt> {
fn bind_input_parameter<'c, T>(
&mut self,
parameter_index: u16,
value: &'c T,
str_len_or_ind_ptr: *mut ffi::SQLLEN,
enc_value: &EncodedValue,
) -> Return<()>
where
T: OdbcType<'c>,
T: ?Sized,
{
//if encoded value exists, use it.
let (column_size, value_ptr) = if enc_value.has_value() {
(enc_value.column_size(), enc_value.value_ptr())
} else {
(value.column_size(), value.value_ptr())
};
match unsafe {
ffi::SQLBindParameter(
self.handle(),
parameter_index,
ffi::SQL_PARAM_INPUT,
T::c_data_type(),
T::sql_data_type(),
column_size,
value.decimal_digits(),
value_ptr,
0, // buffer length
str_len_or_ind_ptr, // Note that this ptr has to be valid until statement is executed
)
} {
ffi::SQL_SUCCESS => Return::Success(()),
ffi::SQL_SUCCESS_WITH_INFO => Return::SuccessWithInfo(()),
ffi::SQL_ERROR => Return::Error,
r => panic!("Unexpected return from SQLBindParameter: {:?}", r),
}
}
fn reset_parameters(&mut self) -> Return<()> {
match unsafe { ffi::SQLFreeStmt(self.handle(), ffi::SQL_RESET_PARAMS) } {
ffi::SQL_SUCCESS => Return::Success(()),
ffi::SQL_SUCCESS_WITH_INFO => Return::SuccessWithInfo(()),
ffi::SQL_ERROR => Return::Error,
r => panic!("SQLFreeStmt returned unexpected result: {:?}", r),
}
}
}
| 35.84375 | 121 | 0.55667 |
50b0037a3a3d076232ed17f70babe9fbc5ada4a9
| 9,087 |
use super::*;
fn build_app() -> App<'static> {
build_app_with_name("myapp")
}
fn build_app_with_name(s: &'static str) -> App<'static> {
App::new(s)
.version("3.0")
.about("Tests completions")
.arg(
Arg::new("file")
.value_hint(ValueHint::FilePath)
.about("some input file"),
)
.subcommand(
App::new("test").about("tests things").arg(
Arg::new("case")
.long("case")
.takes_value(true)
.about("the case to test"),
),
)
}
#[test]
fn fish() {
let mut app = build_app();
common::<Fish>(&mut app, "myapp", FISH);
}
static FISH: &str = r#"complete -c myapp -n "__fish_use_subcommand" -s h -l help -d 'Print help information'
complete -c myapp -n "__fish_use_subcommand" -s V -l version -d 'Print version information'
complete -c myapp -n "__fish_use_subcommand" -f -a "test" -d 'tests things'
complete -c myapp -n "__fish_use_subcommand" -f -a "help" -d 'Print this message or the help of the given subcommand(s)'
complete -c myapp -n "__fish_seen_subcommand_from test" -l case -d 'the case to test' -r
complete -c myapp -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help information'
complete -c myapp -n "__fish_seen_subcommand_from test" -s V -l version -d 'Print version information'
complete -c myapp -n "__fish_seen_subcommand_from help" -s h -l help -d 'Print help information'
complete -c myapp -n "__fish_seen_subcommand_from help" -s V -l version -d 'Print version information'
"#;
#[test]
fn fish_with_special_commands() {
let mut app = build_app_special_commands();
common::<Fish>(&mut app, "my_app", FISH_SPECIAL_CMDS);
}
fn build_app_special_commands() -> App<'static> {
build_app_with_name("my_app")
.subcommand(
App::new("some_cmd").about("tests other things").arg(
Arg::new("config")
.long("--config")
.takes_value(true)
.about("the other case to test"),
),
)
.subcommand(App::new("some-cmd-with-hypens").alias("hyphen"))
}
static FISH_SPECIAL_CMDS: &str = r#"complete -c my_app -n "__fish_use_subcommand" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_use_subcommand" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_use_subcommand" -f -a "test" -d 'tests things'
complete -c my_app -n "__fish_use_subcommand" -f -a "some_cmd" -d 'tests other things'
complete -c my_app -n "__fish_use_subcommand" -f -a "some-cmd-with-hypens"
complete -c my_app -n "__fish_use_subcommand" -f -a "help" -d 'Print this message or the help of the given subcommand(s)'
complete -c my_app -n "__fish_seen_subcommand_from test" -l case -d 'the case to test' -r
complete -c my_app -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from test" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd" -l config -d 'the other case to test' -r
complete -c my_app -n "__fish_seen_subcommand_from some_cmd" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_seen_subcommand_from some-cmd-with-hypens" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from some-cmd-with-hypens" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_seen_subcommand_from help" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from help" -s V -l version -d 'Print version information'
"#;
#[test]
fn fish_with_special_help() {
let mut app = build_app_special_help();
common::<Fish>(&mut app, "my_app", FISH_SPECIAL_HELP);
}
fn build_app_special_help() -> App<'static> {
App::new("my_app")
.version("3.0")
.arg(
Arg::new("single-quotes")
.long("single-quotes")
.about("Can be 'always', 'auto', or 'never'"),
)
.arg(
Arg::new("double-quotes")
.long("double-quotes")
.about("Can be \"always\", \"auto\", or \"never\""),
)
.arg(
Arg::new("backticks")
.long("backticks")
.about("For more information see `echo test`"),
)
.arg(Arg::new("backslash").long("backslash").about("Avoid '\\n'"))
.arg(
Arg::new("brackets")
.long("brackets")
.about("List packages [filter]"),
)
.arg(
Arg::new("expansions")
.long("expansions")
.about("Execute the shell command with $SHELL"),
)
}
static FISH_SPECIAL_HELP: &str = r#"complete -c my_app -s h -l help -d 'Print help information'
complete -c my_app -s V -l version -d 'Print version information'
complete -c my_app -l single-quotes -d 'Can be \'always\', \'auto\', or \'never\''
complete -c my_app -l double-quotes -d 'Can be "always", "auto", or "never"'
complete -c my_app -l backticks -d 'For more information see `echo test`'
complete -c my_app -l backslash -d 'Avoid \'\\n\''
complete -c my_app -l brackets -d 'List packages [filter]'
complete -c my_app -l expansions -d 'Execute the shell command with $SHELL'
"#;
#[test]
fn fish_with_aliases() {
let mut app = build_app_with_aliases();
common::<Fish>(&mut app, "cmd", FISH_ALIASES);
}
fn build_app_with_aliases() -> App<'static> {
App::new("cmd")
.version("3.0")
.about("testing bash completions")
.arg(
Arg::new("flag")
.short('f')
.visible_short_alias('F')
.long("flag")
.visible_alias("flg")
.about("cmd flag"),
)
.arg(
Arg::new("option")
.short('o')
.visible_short_alias('O')
.long("option")
.visible_alias("opt")
.about("cmd option")
.takes_value(true),
)
.arg(Arg::new("positional"))
}
static FISH_ALIASES: &str = r#"complete -c cmd -s o -s O -l option -l opt -d 'cmd option' -r
complete -c cmd -s h -l help -d 'Print help information'
complete -c cmd -s V -l version -d 'Print version information'
complete -c cmd -s f -s F -l flag -l flg -d 'cmd flag'
"#;
#[test]
fn fish_with_sub_subcommands() {
let mut app = build_app_sub_subcommands();
common::<Fish>(&mut app, "my_app", FISH_SUB_SUBCMDS);
}
fn build_app_sub_subcommands() -> App<'static> {
build_app_with_name("my_app").subcommand(
App::new("some_cmd")
.about("top level subcommand")
.subcommand(
App::new("sub_cmd").about("sub-subcommand").arg(
Arg::new("config")
.long("--config")
.takes_value(true)
.about("the other case to test"),
),
),
)
}
static FISH_SUB_SUBCMDS: &str = r#"complete -c my_app -n "__fish_use_subcommand" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_use_subcommand" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_use_subcommand" -f -a "test" -d 'tests things'
complete -c my_app -n "__fish_use_subcommand" -f -a "some_cmd" -d 'top level subcommand'
complete -c my_app -n "__fish_use_subcommand" -f -a "help" -d 'Print this message or the help of the given subcommand(s)'
complete -c my_app -n "__fish_seen_subcommand_from test" -l case -d 'the case to test' -r
complete -c my_app -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from test" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd; and not __fish_seen_subcommand_from sub_cmd" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd; and not __fish_seen_subcommand_from sub_cmd" -s V -l version -d 'Print version information'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd; and not __fish_seen_subcommand_from sub_cmd" -f -a "sub_cmd" -d 'sub-subcommand'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd; and __fish_seen_subcommand_from sub_cmd" -l config -d 'the other case to test' -r
complete -c my_app -n "__fish_seen_subcommand_from some_cmd; and __fish_seen_subcommand_from sub_cmd" -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from some_cmd; and __fish_seen_subcommand_from sub_cmd" -l version -d 'Print version information'
complete -c my_app -n "__fish_seen_subcommand_from help" -s h -l help -d 'Print help information'
complete -c my_app -n "__fish_seen_subcommand_from help" -s V -l version -d 'Print version information'
"#;
| 45.208955 | 152 | 0.634863 |
7a788f2de7f2f199936ebf371a3d44bbc07bc25b
| 35,420 |
use check::FnCtxt;
use rustc::infer::InferOk;
use rustc::traits::ObligationCause;
use syntax::ast;
use syntax::util::parser::PREC_POSTFIX;
use syntax_pos::Span;
use rustc::hir;
use rustc::hir::def::Def;
use rustc::hir::Node;
use rustc::hir::{Item, ItemKind, print};
use rustc::ty::{self, Ty, AssociatedItem};
use rustc::ty::adjustment::AllowTwoPhase;
use errors::{Applicability, DiagnosticBuilder, SourceMapper};
use super::method::probe;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// Requires that the two types unify, and prints an error message if
// they don't.
pub fn demand_suptype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
self.demand_suptype_diag(sp, expected, actual).map(|mut e| e.emit());
}
pub fn demand_suptype_diag(&self,
sp: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
let cause = &self.misc(sp);
match self.at(cause, self.param_env).sup(expected, actual) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
None
},
Err(e) => {
Some(self.report_mismatched_types(&cause, expected, actual, e))
}
}
}
pub fn demand_eqtype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) {
if let Some(mut err) = self.demand_eqtype_diag(sp, expected, actual) {
err.emit();
}
}
pub fn demand_eqtype_diag(&self,
sp: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
self.demand_eqtype_with_origin(&self.misc(sp), expected, actual)
}
pub fn demand_eqtype_with_origin(&self,
cause: &ObligationCause<'tcx>,
expected: Ty<'tcx>,
actual: Ty<'tcx>) -> Option<DiagnosticBuilder<'tcx>> {
match self.at(cause, self.param_env).eq(expected, actual) {
Ok(InferOk { obligations, value: () }) => {
self.register_predicates(obligations);
None
}
Err(e) => {
Some(self.report_mismatched_types(cause, expected, actual, e))
}
}
}
pub fn demand_coerce(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>,
allow_two_phase: AllowTwoPhase)
-> Ty<'tcx> {
let (ty, err) = self.demand_coerce_diag(expr, checked_ty, expected, allow_two_phase);
if let Some(mut err) = err {
err.emit();
}
ty
}
// Checks that the type of `expr` can be coerced to `expected`.
//
// N.B., this code relies on `self.diverges` to be accurate. In
// particular, assignments to `!` will be permitted if the
// diverges flag is currently "always".
pub fn demand_coerce_diag(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>,
allow_two_phase: AllowTwoPhase)
-> (Ty<'tcx>, Option<DiagnosticBuilder<'tcx>>) {
let expected = self.resolve_type_vars_with_obligations(expected);
let e = match self.try_coerce(expr, checked_ty, expected, allow_two_phase) {
Ok(ty) => return (ty, None),
Err(e) => e
};
let cause = self.misc(expr.span);
let expr_ty = self.resolve_type_vars_with_obligations(checked_ty);
let mut err = self.report_mismatched_types(&cause, expected, expr_ty, e);
// If the expected type is an enum (Issue #55250) with any variants whose
// sole field is of the found type, suggest such variants. (Issue #42764)
if let ty::Adt(expected_adt, substs) = expected.sty {
if expected_adt.is_enum() {
let mut compatible_variants = expected_adt.variants
.iter()
.filter(|variant| variant.fields.len() == 1)
.filter_map(|variant| {
let sole_field = &variant.fields[0];
let sole_field_ty = sole_field.ty(self.tcx, substs);
if self.can_coerce(expr_ty, sole_field_ty) {
let variant_path = self.tcx.item_path_str(variant.did);
// FIXME #56861: DRYer prelude filtering
Some(variant_path.trim_start_matches("std::prelude::v1::").to_string())
} else {
None
}
}).peekable();
if compatible_variants.peek().is_some() {
let expr_text = print::to_string(print::NO_ANN, |s| s.print_expr(expr));
let suggestions = compatible_variants
.map(|v| format!("{}({})", v, expr_text));
err.span_suggestions_with_applicability(
expr.span,
"try using a variant of the expected type",
suggestions,
Applicability::MaybeIncorrect,
);
}
}
}
self.suggest_ref_or_into(&mut err, expr, expected, expr_ty);
(expected, Some(err))
}
pub fn get_conversion_methods(&self, span: Span, expected: Ty<'tcx>, checked_ty: Ty<'tcx>)
-> Vec<AssociatedItem> {
let mut methods = self.probe_for_return_type(span,
probe::Mode::MethodCall,
expected,
checked_ty,
ast::DUMMY_NODE_ID);
methods.retain(|m| {
self.has_no_input_arg(m) &&
self.tcx.get_attrs(m.def_id).iter()
// This special internal attribute is used to whitelist
// "identity-like" conversion methods to be suggested here.
//
// FIXME (#46459 and #46460): ideally
// `std::convert::Into::into` and `std::borrow:ToOwned` would
// also be `#[rustc_conversion_suggestion]`, if not for
// method-probing false-positives and -negatives (respectively).
//
// FIXME? Other potential candidate methods: `as_ref` and
// `as_mut`?
.find(|a| a.check_name("rustc_conversion_suggestion")).is_some()
});
methods
}
// This function checks if the method isn't static and takes other arguments than `self`.
fn has_no_input_arg(&self, method: &AssociatedItem) -> bool {
match method.def() {
Def::Method(def_id) => {
self.tcx.fn_sig(def_id).inputs().skip_binder().len() == 1
}
_ => false,
}
}
/// Identify some cases where `as_ref()` would be appropriate and suggest it.
///
/// Given the following code:
/// ```
/// struct Foo;
/// fn takes_ref(_: &Foo) {}
/// let ref opt = Some(Foo);
///
/// opt.map(|arg| takes_ref(arg));
/// ```
/// Suggest using `opt.as_ref().map(|arg| takes_ref(arg));` instead.
///
/// It only checks for `Option` and `Result` and won't work with
/// ```
/// opt.map(|arg| { takes_ref(arg) });
/// ```
fn can_use_as_ref(&self, expr: &hir::Expr) -> Option<(Span, &'static str, String)> {
if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = expr.node {
if let hir::def::Def::Local(id) = path.def {
let parent = self.tcx.hir().get_parent_node(id);
if let Some(Node::Expr(hir::Expr {
id,
node: hir::ExprKind::Closure(_, decl, ..),
..
})) = self.tcx.hir().find(parent) {
let parent = self.tcx.hir().get_parent_node(*id);
if let (Some(Node::Expr(hir::Expr {
node: hir::ExprKind::MethodCall(path, span, expr),
..
})), 1) = (self.tcx.hir().find(parent), decl.inputs.len()) {
let self_ty = self.tables.borrow().node_id_to_type(expr[0].hir_id);
let self_ty = format!("{:?}", self_ty);
let name = path.ident.as_str();
let is_as_ref_able = (
self_ty.starts_with("&std::option::Option") ||
self_ty.starts_with("&std::result::Result") ||
self_ty.starts_with("std::option::Option") ||
self_ty.starts_with("std::result::Result")
) && (name == "map" || name == "and_then");
if is_as_ref_able {
return Some((span.shrink_to_lo(),
"consider using `as_ref` instead",
"as_ref().".into()));
}
}
}
}
}
None
}
/// This function is used to determine potential "simple" improvements or users' errors and
/// provide them useful help. For example:
///
/// ```
/// fn some_fn(s: &str) {}
///
/// let x = "hey!".to_owned();
/// some_fn(x); // error
/// ```
///
/// No need to find every potential function which could make a coercion to transform a
/// `String` into a `&str` since a `&` would do the trick!
///
/// In addition of this check, it also checks between references mutability state. If the
/// expected is mutable but the provided isn't, maybe we could just say "Hey, try with
/// `&mut`!".
pub fn check_ref(&self,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected: Ty<'tcx>)
-> Option<(Span, &'static str, String)> {
let cm = self.sess().source_map();
// Use the callsite's span if this is a macro call. #41858
let sp = cm.call_span_if_macro(expr.span);
if !cm.span_to_filename(sp).is_real() {
return None;
}
match (&expected.sty, &checked_ty.sty) {
(&ty::Ref(_, exp, _), &ty::Ref(_, check, _)) => match (&exp.sty, &check.sty) {
(&ty::Str, &ty::Array(arr, _)) |
(&ty::Str, &ty::Slice(arr)) if arr == self.tcx.types.u8 => {
if let hir::ExprKind::Lit(_) = expr.node {
if let Ok(src) = cm.span_to_snippet(sp) {
if src.starts_with("b\"") {
return Some((sp,
"consider removing the leading `b`",
src[1..].to_string()));
}
}
}
},
(&ty::Array(arr, _), &ty::Str) |
(&ty::Slice(arr), &ty::Str) if arr == self.tcx.types.u8 => {
if let hir::ExprKind::Lit(_) = expr.node {
if let Ok(src) = cm.span_to_snippet(sp) {
if src.starts_with("\"") {
return Some((sp,
"consider adding a leading `b`",
format!("b{}", src)));
}
}
}
}
_ => {}
},
(&ty::Ref(_, _, mutability), _) => {
// Check if it can work when put into a ref. For example:
//
// ```
// fn bar(x: &mut i32) {}
//
// let x = 0u32;
// bar(&x); // error, expected &mut
// ```
let ref_ty = match mutability {
hir::Mutability::MutMutable => self.tcx.mk_mut_ref(
self.tcx.mk_region(ty::ReStatic),
checked_ty),
hir::Mutability::MutImmutable => self.tcx.mk_imm_ref(
self.tcx.mk_region(ty::ReStatic),
checked_ty),
};
if self.can_coerce(ref_ty, expected) {
if let Ok(src) = cm.span_to_snippet(sp) {
let needs_parens = match expr.node {
// parenthesize if needed (Issue #46756)
hir::ExprKind::Cast(_, _) |
hir::ExprKind::Binary(_, _, _) => true,
// parenthesize borrows of range literals (Issue #54505)
_ if self.is_range_literal(expr) => true,
_ => false,
};
let sugg_expr = if needs_parens {
format!("({})", src)
} else {
src
};
if let Some(sugg) = self.can_use_as_ref(expr) {
return Some(sugg);
}
return Some(match mutability {
hir::Mutability::MutMutable => {
(sp, "consider mutably borrowing here", format!("&mut {}",
sugg_expr))
}
hir::Mutability::MutImmutable => {
(sp, "consider borrowing here", format!("&{}", sugg_expr))
}
});
}
}
}
(_, &ty::Ref(_, checked, _)) => {
// We have `&T`, check if what was expected was `T`. If so,
// we may want to suggest adding a `*`, or removing
// a `&`.
//
// (But, also check check the `expn_info()` to see if this is
// a macro; if so, it's hard to extract the text and make a good
// suggestion, so don't bother.)
if self.infcx.can_sub(self.param_env, checked, &expected).is_ok() &&
sp.ctxt().outer().expn_info().is_none() {
match expr.node {
// Maybe remove `&`?
hir::ExprKind::AddrOf(_, ref expr) => {
if !cm.span_to_filename(expr.span).is_real() {
return None;
}
if let Ok(code) = cm.span_to_snippet(expr.span) {
return Some((sp, "consider removing the borrow", code));
}
}
// Maybe add `*`? Only if `T: Copy`.
_ => {
if self.infcx.type_is_copy_modulo_regions(self.param_env,
checked,
sp) {
// do not suggest if the span comes from a macro (#52783)
if let (Ok(code),
true) = (cm.span_to_snippet(sp), sp == expr.span) {
return Some((
sp,
"consider dereferencing the borrow",
format!("*{}", code),
));
}
}
}
}
}
}
_ => {}
}
None
}
/// This function checks if the specified expression is a built-in range literal.
/// (See: `LoweringContext::lower_expr()` in `src/librustc/hir/lowering.rs`).
fn is_range_literal(&self, expr: &hir::Expr) -> bool {
use hir::{Path, QPath, ExprKind, TyKind};
// We support `::std::ops::Range` and `::core::ops::Range` prefixes
let is_range_path = |path: &Path| {
let mut segs = path.segments.iter()
.map(|seg| seg.ident.as_str());
if let (Some(root), Some(std_core), Some(ops), Some(range), None) =
(segs.next(), segs.next(), segs.next(), segs.next(), segs.next())
{
// "{{root}}" is the equivalent of `::` prefix in Path
root == "{{root}}" && (std_core == "std" || std_core == "core")
&& ops == "ops" && range.starts_with("Range")
} else {
false
}
};
let span_is_range_literal = |span: &Span| {
// Check whether a span corresponding to a range expression
// is a range literal, rather than an explicit struct or `new()` call.
let source_map = self.tcx.sess.source_map();
let end_point = source_map.end_point(*span);
if let Ok(end_string) = source_map.span_to_snippet(end_point) {
!(end_string.ends_with("}") || end_string.ends_with(")"))
} else {
false
}
};
match expr.node {
// All built-in range literals but `..=` and `..` desugar to Structs
ExprKind::Struct(QPath::Resolved(None, ref path), _, _) |
// `..` desugars to its struct path
ExprKind::Path(QPath::Resolved(None, ref path)) => {
return is_range_path(&path) && span_is_range_literal(&expr.span);
}
// `..=` desugars into `::std::ops::RangeInclusive::new(...)`
ExprKind::Call(ref func, _) => {
if let ExprKind::Path(QPath::TypeRelative(ref ty, ref segment)) = func.node {
if let TyKind::Path(QPath::Resolved(None, ref path)) = ty.node {
let call_to_new = segment.ident.as_str() == "new";
return is_range_path(&path) && span_is_range_literal(&expr.span)
&& call_to_new;
}
}
}
_ => {}
}
false
}
pub fn check_for_cast(&self,
err: &mut DiagnosticBuilder<'tcx>,
expr: &hir::Expr,
checked_ty: Ty<'tcx>,
expected_ty: Ty<'tcx>)
-> bool {
let parent_id = self.tcx.hir().get_parent_node(expr.id);
if let Some(parent) = self.tcx.hir().find(parent_id) {
// Shouldn't suggest `.into()` on `const`s.
if let Node::Item(Item { node: ItemKind::Const(_, _), .. }) = parent {
// FIXME(estebank): modify once we decide to suggest `as` casts
return false;
}
};
let will_truncate = "will truncate the source value";
let depending_on_isize = "will truncate or zero-extend depending on the bit width of \
`isize`";
let depending_on_usize = "will truncate or zero-extend depending on the bit width of \
`usize`";
let will_sign_extend = "will sign-extend the source value";
let will_zero_extend = "will zero-extend the source value";
// If casting this expression to a given numeric type would be appropriate in case of a type
// mismatch.
//
// We want to minimize the amount of casting operations that are suggested, as it can be a
// lossy operation with potentially bad side effects, so we only suggest when encountering
// an expression that indicates that the original type couldn't be directly changed.
//
// For now, don't suggest casting with `as`.
let can_cast = false;
let needs_paren = expr.precedence().order() < (PREC_POSTFIX as i8);
if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(expr.span) {
let msg = format!("you can cast an `{}` to `{}`", checked_ty, expected_ty);
let cast_suggestion = format!("{}{}{} as {}",
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
expected_ty);
let into_suggestion = format!(
"{}{}{}.into()",
if needs_paren { "(" } else { "" },
src,
if needs_paren { ")" } else { "" },
);
let literal_is_ty_suffixed = |expr: &hir::Expr| {
if let hir::ExprKind::Lit(lit) = &expr.node {
lit.node.is_suffixed()
} else {
false
}
};
let into_sugg = into_suggestion.clone();
let suggest_to_change_suffix_or_into = |err: &mut DiagnosticBuilder,
note: Option<&str>| {
let suggest_msg = if literal_is_ty_suffixed(expr) {
format!(
"change the type of the numeric literal from `{}` to `{}`",
checked_ty,
expected_ty,
)
} else {
match note {
Some(note) => format!("{}, which {}", msg, note),
_ => format!("{} in a lossless way", msg),
}
};
let suffix_suggestion = format!(
"{}{}{}{}",
if needs_paren { "(" } else { "" },
src.trim_end_matches(&checked_ty.to_string()),
expected_ty,
if needs_paren { ")" } else { "" },
);
err.span_suggestion_with_applicability(
expr.span,
&suggest_msg,
if literal_is_ty_suffixed(expr) {
suffix_suggestion
} else {
into_sugg
},
Applicability::MachineApplicable,
);
};
match (&expected_ty.sty, &checked_ty.sty) {
(&ty::Int(ref exp), &ty::Int(ref found)) => {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found > exp => {
if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
(None, _) | (_, None) => {
if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, depending_on_isize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
_ => {
suggest_to_change_suffix_or_into(
err,
Some(will_sign_extend),
);
}
}
true
}
(&ty::Uint(ref exp), &ty::Uint(ref found)) => {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found > exp => {
if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
(None, _) | (_, None) => {
if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, depending_on_usize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
}
_ => {
suggest_to_change_suffix_or_into(
err,
Some(will_zero_extend),
);
}
}
true
}
(&ty::Int(ref exp), &ty::Uint(ref found)) => {
if can_cast {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found > exp - 1 => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(None, None) => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(None, _) => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, depending_on_isize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(_, None) => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, depending_on_usize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
_ => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_zero_extend),
cast_suggestion,
Applicability::MachineApplicable
);
}
}
}
true
}
(&ty::Uint(ref exp), &ty::Int(ref found)) => {
if can_cast {
match (found.bit_width(), exp.bit_width()) {
(Some(found), Some(exp)) if found - 1 > exp => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_truncate),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(None, None) => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_sign_extend),
cast_suggestion,
Applicability::MachineApplicable // lossy conversion
);
}
(None, _) => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, depending_on_usize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
(_, None) => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, depending_on_isize),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
_ => {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, which {}", msg, will_sign_extend),
cast_suggestion,
Applicability::MachineApplicable
);
}
}
}
true
}
(&ty::Float(ref exp), &ty::Float(ref found)) => {
if found.bit_width() < exp.bit_width() {
suggest_to_change_suffix_or_into(
err,
None,
);
} else if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, producing the closest possible value", msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
true
}
(&ty::Uint(_), &ty::Float(_)) | (&ty::Int(_), &ty::Float(_)) => {
if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, rounding the float towards zero", msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
err.warn("casting here will cause undefined behavior if the rounded value \
cannot be represented by the target integer type, including \
`Inf` and `NaN` (this is a bug and will be fixed)");
}
true
}
(&ty::Float(ref exp), &ty::Uint(ref found)) => {
// if `found` is `None` (meaning found is `usize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, producing the floating point representation of the \
integer",
msg),
into_suggestion,
Applicability::MachineApplicable
);
} else if can_cast {
err.span_suggestion_with_applicability(expr.span,
&format!("{}, producing the floating point representation of the \
integer, rounded if necessary",
msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
true
}
(&ty::Float(ref exp), &ty::Int(ref found)) => {
// if `found` is `None` (meaning found is `isize`), don't suggest `.into()`
if exp.bit_width() > found.bit_width().unwrap_or(256) {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, producing the floating point representation of the \
integer",
msg),
into_suggestion,
Applicability::MachineApplicable
);
} else if can_cast {
err.span_suggestion_with_applicability(
expr.span,
&format!("{}, producing the floating point representation of the \
integer, rounded if necessary",
msg),
cast_suggestion,
Applicability::MaybeIncorrect // lossy conversion
);
}
true
}
_ => false,
}
} else {
false
}
}
}
| 46.059818 | 100 | 0.399068 |
7141e611ff2385db9e604973ee56bf5eff445580
| 1,116 |
use std::{
error,
fmt::{self, Debug, Display},
io, num,
};
#[derive(Debug)]
pub enum Error {
IoErr(io::Error),
ParseNumError(num::ParseIntError),
TryFromIntError(num::TryFromIntError),
NoAvailableInput,
UnknownValue,
}
impl error::Error for Error {}
impl Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Error::IoErr(e) => Display::fmt(&*e, f),
Error::ParseNumError(e) => Display::fmt(&*e, f),
Error::TryFromIntError(e) => Display::fmt(&*e, f),
Error::NoAvailableInput => f.write_str("no available input"),
Error::UnknownValue => f.write_str("unknown value"),
}
}
}
impl From<io::Error> for Error {
fn from(other: io::Error) -> Self {
Error::IoErr(other)
}
}
impl From<num::ParseIntError> for Error {
fn from(other: num::ParseIntError) -> Self {
Error::ParseNumError(other)
}
}
impl From<num::TryFromIntError> for Error {
fn from(other: num::TryFromIntError) -> Self {
Error::TryFromIntError(other)
}
}
| 23.744681 | 73 | 0.587814 |
088f0f1b514f9716cc1a589c2f2c1372ec409012
| 217 |
use std::thread;
fn main() {
let handle = thread::spawn(move || {
println!("Hello from the goblin in the spawned thread!");
});
let output = handle.join().unwrap();
println!("{:?}", output);
}
| 24.111111 | 65 | 0.56682 |
1d0dead1c323743992bb190cc2cfb9b9a3599a54
| 2,354 |
mod config;
mod models;
mod routes;
use std::{io, path::Path, path::PathBuf};
use actix_cors::Cors;
use actix_governor::{Governor, GovernorConfigBuilder};
use actix_web::{
web::{self, Data},
App, HttpServer,
};
use config::Config;
use sqlx::{migrate::Migrator, postgres::PgPoolOptions, PgPool};
use crate::routes::{
get_paste, get_raw_paste, get_stats, get_total_pastes_badge, get_version_badge, new_paste,
};
#[derive(Clone)]
pub struct AppState {
pub config: Config,
pub pool: PgPool,
}
pub async fn migrations(pool: &PgPool) -> Result<(), sqlx::Error> {
Migrator::new(Path::new("./migrations"))
.await?
.run(pool)
.await?;
Ok(())
}
#[actix_rt::main]
async fn main() -> io::Result<()> {
let config = config::load(PathBuf::from("config.json"));
let db_uri = &config.databases.postgres_uri.to_string();
let pool = PgPoolOptions::new()
.max_connections(100)
.connect(db_uri)
.await
.expect("Failed to connect to database");
migrations(&pool).await.expect("Failed to run migrations");
let address = format!(
"{}:{}",
config.server.backend_host, config.server.backend_port
);
let paste_governor = GovernorConfigBuilder::default()
.per_second(config.ratelimits.seconds_in_between_pastes)
.burst_size(config.ratelimits.allowed_pastes_before_ratelimit)
.finish()
.unwrap();
let state = AppState { config, pool };
println!("🚀 zer0bin is running on {address}");
HttpServer::new(move || {
let cors = Cors::default()
.allow_any_header()
.allow_any_method()
.allow_any_origin()
.send_wildcard()
.max_age(3600);
App::new()
.wrap(cors)
.app_data(Data::new(state.clone()))
.service(get_stats)
.service(
web::scope("/p")
.wrap(Governor::new(&paste_governor))
.service(get_paste)
.service(new_paste)
.service(get_raw_paste),
)
.service(
web::scope("/b")
.service(get_version_badge)
.service(get_total_pastes_badge),
)
})
.bind(address)?
.run()
.await
}
| 25.042553 | 94 | 0.570943 |
eb5d006a32eef87a48fb676813db276fd6b116c5
| 4,617 |
use errors::debugger::Debugger;
use rowan::{GreenNode, GreenNodeBuilder, Language};
use super::event::Event;
use crate::{ast::BoltLanguage,
lexer::{SyntaxKind, Token}};
pub(super) struct Sink<'input, 'l> {
builder: GreenNodeBuilder<'static>,
lexemes: &'l [Token<'input>],
cursor: usize,
events: Vec<Event<'input>>,
text_cursor: usize,
file: usize,
}
impl<'input, 'l> Sink<'input, 'l> {
pub(super) fn new(events: Vec<Event<'input>>, lexemes: &'l [Token<'input>], file: usize) -> Self {
Self { builder: GreenNodeBuilder::new(),
lexemes,
cursor: 0,
events,
text_cursor: 0,
file }
}
pub(super) fn finish(mut self, debugger: &mut Debugger) -> GreenNode {
for idx in 0..self.events.len() {
// self.eat_trivia();
let event = std::mem::replace(&mut self.events[idx], Event::Placeholder);
match event {
Event::StartNode { kind, forward_parent } => {
let mut kinds = vec![kind];
let mut idx = idx;
let mut forward_parent = forward_parent;
// Walk through the forward parent of the forward parent, and the forward parent
// of that, and of that, etc. until we reach a StartNode event without a forward
// parent.
while let Some(fp) = forward_parent {
idx += fp;
forward_parent = if let Event::StartNode { kind, forward_parent } = std::mem::replace(&mut self.events[idx], Event::Placeholder) {
kinds.push(kind);
forward_parent
} else {
unreachable!()
};
}
if kinds.first().map(|first| *first != SyntaxKind::Root).unwrap_or(true) {
self.eat_trivia();
}
for kind in kinds.into_iter().rev() {
self.builder.start_node(BoltLanguage::kind_to_raw(kind));
}
}
Event::AddToken { kind, text } => {
self.eat_trivia();
self.token(kind, text)
}
Event::FinishNode => {
self.builder.finish_node();
self.eat_trivia();
}
Event::Error(error) => {
self.eat_trivia();
let description = format!("{error}, found {}", token_specific(self.peek()));
let span = self.next_span();
debugger.throw_parse(description, (self.file, span));
}
Event::Placeholder => {}
}
}
self.builder.finish()
}
fn next_span(&self) -> (usize, usize) {
let sz = self.lexemes
.get(self.cursor)
.map(|lexeme| lexeme.source.len())
.unwrap_or(0);
(self.text_cursor, self.text_cursor + sz)
}
fn token(&mut self, kind: SyntaxKind, text: &str) {
self.builder.token(BoltLanguage::kind_to_raw(kind), text);
self.cursor += 1;
self.text_cursor += text.len();
}
fn peek(&self) -> Option<Token> { self.lexemes.get(self.cursor).cloned() }
fn eat_trivia(&mut self) {
while let Some(lexeme) = self.lexemes.get(self.cursor) {
if !lexeme.kind.is_trivia() {
break;
}
self.token(lexeme.kind, lexeme.source);
}
}
}
fn token_specific(token: Option<Token>) -> String {
let Some(token) = token else {
return "<eof>".to_string();
};
match &token.kind {
SyntaxKind::StructKw
| SyntaxKind::ImportKw
| SyntaxKind::FuncKw
| SyntaxKind::InitKw
| SyntaxKind::LetKw
| SyntaxKind::VarKw
| SyntaxKind::IfKw
| SyntaxKind::ElseKw
| SyntaxKind::ReturnKw
| SyntaxKind::StaticKw
| SyntaxKind::PublicKw
| SyntaxKind::InternalKw
| SyntaxKind::FilePrivateKw
| SyntaxKind::PrivateKw
| SyntaxKind::UnderscoreKw => format!("keyword `{}`", token.source),
SyntaxKind::Comment => "comment".to_string(),
SyntaxKind::Whitespace => "whitespace".to_string(),
SyntaxKind::Error => "error".to_string(),
_ => format!("`{}`", token.source),
}
}
| 33.456522 | 154 | 0.492094 |
f8ad1424b4f261c13da3d12037b6579d5c05780a
| 2,932 |
#[cfg(feature = "wasm")]
mod wasm_bindings;
#[cfg(feature = "wasm")]
pub use crate::wasm_bindings::*;
#[cfg(not(feature = "wasm"))]
mod bindings;
#[cfg(not(feature = "wasm"))]
pub use crate::bindings::*;
#[cfg(not(feature = "wasm"))]
mod bindings_impl;
#[cfg(not(feature = "wasm"))]
pub use crate::bindings_impl::*;
impl ImVec2 {
#[inline]
pub fn new(x: f32, y: f32) -> ImVec2 {
ImVec2 { x, y }
}
#[inline]
pub fn zero() -> ImVec2 {
ImVec2 { x: 0.0, y: 0.0 }
}
}
impl From<[f32; 2]> for ImVec2 {
#[inline]
fn from(array: [f32; 2]) -> ImVec2 {
ImVec2::new(array[0], array[1])
}
}
impl From<(f32, f32)> for ImVec2 {
#[inline]
fn from((x, y): (f32, f32)) -> ImVec2 {
ImVec2::new(x, y)
}
}
impl Into<[f32; 2]> for ImVec2 {
#[inline]
fn into(self) -> [f32; 2] {
[self.x, self.y]
}
}
impl Into<(f32, f32)> for ImVec2 {
#[inline]
fn into(self) -> (f32, f32) {
(self.x, self.y)
}
}
impl ImVec4 {
#[inline]
pub fn new(x: f32, y: f32, z: f32, w: f32) -> ImVec4 {
ImVec4 { x, y, z, w }
}
#[inline]
pub fn zero() -> ImVec4 {
ImVec4 {
x: 0.0,
y: 0.0,
z: 0.0,
w: 0.0,
}
}
}
impl From<[f32; 4]> for ImVec4 {
#[inline]
fn from(array: [f32; 4]) -> ImVec4 {
ImVec4::new(array[0], array[1], array[2], array[3])
}
}
impl From<(f32, f32, f32, f32)> for ImVec4 {
#[inline]
fn from((x, y, z, w): (f32, f32, f32, f32)) -> ImVec4 {
ImVec4::new(x, y, z, w)
}
}
impl Into<[f32; 4]> for ImVec4 {
#[inline]
fn into(self) -> [f32; 4] {
[self.x, self.y, self.z, self.w]
}
}
impl Into<(f32, f32, f32, f32)> for ImVec4 {
#[inline]
fn into(self) -> (f32, f32, f32, f32) {
(self.x, self.y, self.z, self.w)
}
}
#[test]
fn test_imvec2_memory_layout() {
use std::mem;
assert_eq!(mem::size_of::<ImVec2>(), mem::size_of::<[f32; 2]>());
assert_eq!(mem::align_of::<ImVec2>(), mem::align_of::<[f32; 2]>());
let test = ImVec2::new(1.0, 2.0);
let ref_a: &ImVec2 = &test;
let ref_b: &[f32; 2] = unsafe { mem::transmute(&test) };
assert_eq!(&ref_a.x as *const _, &ref_b[0] as *const _);
assert_eq!(&ref_a.y as *const _, &ref_b[1] as *const _);
}
#[test]
fn test_imvec4_memory_layout() {
use std::mem;
assert_eq!(mem::size_of::<ImVec4>(), mem::size_of::<[f32; 4]>());
assert_eq!(mem::align_of::<ImVec4>(), mem::align_of::<[f32; 4]>());
let test = ImVec4::new(1.0, 2.0, 3.0, 4.0);
let ref_a: &ImVec4 = &test;
let ref_b: &[f32; 4] = unsafe { mem::transmute(&test) };
assert_eq!(&ref_a.x as *const _, &ref_b[0] as *const _);
assert_eq!(&ref_a.y as *const _, &ref_b[1] as *const _);
assert_eq!(&ref_a.z as *const _, &ref_b[2] as *const _);
assert_eq!(&ref_a.w as *const _, &ref_b[3] as *const _);
}
| 23.086614 | 71 | 0.518417 |
5b915e551c02a60c3c299a8e7041a29e4f85b317
| 2,936 |
use steam_tradeoffers::{
TradeOfferManager,
response::{TradeOffer, Asset},
enums::TradeOfferState,
error::Error,
SteamID,
chrono::Duration,
};
use dotenv::dotenv;
use std::env;
fn assets_item_names<'a>(
assets: &'a Vec<Asset>,
) -> Vec<&'a str> {
assets
.iter()
.map(|item| item.classinfo.market_hash_name.as_ref())
.collect::<Vec<_>>()
}
async fn accept_offer(
manager: &TradeOfferManager,
offer: &mut TradeOffer,
) -> Result<(), Error> {
let accepted_offer = manager.accept_offer(offer).await?;
if accepted_offer.needs_mobile_confirmation {
manager.confirm_offer(&offer).await
} else {
Ok(())
}
}
fn get_session() -> (String, Vec<String>) {
let mut sessionid = None;
let mut cookies: Vec<String> = Vec::new();
let cookies_str = env::var("COOKIES")
.expect("COOKIES missing");
for cookie in cookies_str.split("&") {
let mut split = cookie.split("=");
if split.next().unwrap() == "sessionid" {
sessionid = Some(split.next().unwrap().to_string());
}
cookies.push(cookie.to_string());
}
(sessionid.unwrap(), cookies)
}
fn get_steamid(key: &str) -> SteamID {
let sid_str = env::var(key)
.expect(&format!("{} missing", key));
SteamID::from(sid_str.parse::<u64>().unwrap())
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
dotenv().ok();
let steamid = get_steamid("STEAMID");
let key = env::var("API_KEY").expect("API_KEY missing");
let manager = TradeOfferManager::builder(steamid, key)
.identity_secret(String::from("secret"))
.cancel_duration(Duration::minutes(30))
.build();
let (sessionid, cookies) = get_session();
manager.set_session(&sessionid, &cookies)?;
// gets changes to trade offers for account
for (mut offer, old_state) in manager.do_poll(true).await? {
if let Some(state) = old_state {
println!(
"Offer {} changed state: {} -> {}",
offer,
state,
offer.trade_offer_state
);
} else if
offer.trade_offer_state == TradeOfferState::Active &&
!offer.is_our_offer
{
println!("New offer {}", offer);
println!("Receiving: {:?}", assets_item_names(&offer.items_to_receive));
println!("Giving: {:?}", assets_item_names(&offer.items_to_give));
// free items
if offer.items_to_give.is_empty() {
if let Err(error) = accept_offer(&manager, &mut offer).await {
println!("Error accepting offer {}: {}", offer, error);
} else {
println!("Accepted offer {}", offer);
}
}
}
}
Ok(())
}
| 28.504854 | 84 | 0.547343 |
11d00694334e73e379ba9a0961635096f673d649
| 4,377 |
#![warn(rust_2018_idioms, single_use_lifetimes)]
use find_crate::{Dependencies, Manifest};
use semver::{Version, VersionReq};
#[test]
fn dependencies() {
const MANIFEST: &str = r#"
[dependencies]
foo = "0.1"
[dev-dependencies.foo]
version = "0.1.1"
[build-dependencies]
bar = "0.2"
"#;
const NAME1: &str = "foo";
const NAME2: &str = "bar";
const NAME3: &str = "baz";
let mut manifest = Manifest::from_toml(toml::from_str(MANIFEST).unwrap());
assert_eq!(Dependencies::Default, manifest.dependencies);
assert_eq!(NAME1, manifest.find(|s| s == NAME1).unwrap().name);
assert_eq!("0.1", manifest.find(|s| s == NAME1).unwrap().version);
manifest.dependencies = Dependencies::Dev;
assert_eq!(NAME1, manifest.find(|s| s == NAME1).unwrap().name);
assert_eq!("0.1.1", manifest.find(|s| s == NAME1).unwrap().version);
manifest.dependencies = Dependencies::Build;
assert_eq!(None, manifest.find(|s| s == NAME1));
assert_eq!(NAME2, manifest.find(|s| s == NAME2).unwrap().name);
assert_eq!("0.2", manifest.find(|s| s == NAME2).unwrap().version);
manifest.dependencies = Dependencies::Default;
assert_eq!(None, manifest.find(|s| s == NAME2));
manifest.dependencies = Dependencies::All;
assert_eq!(NAME2, manifest.find(|s| s == NAME2).unwrap().name);
assert_eq!("0.2", manifest.find(|s| s == NAME2).unwrap().version);
assert_eq!(None, manifest.find(|s| s == NAME3));
}
#[test]
fn renamed() {
const MANIFEST: &str = r#"
[dependencies]
foo-renamed = { package = "foo", version = "0.1" }
[dependencies.bar_renamed]
package = "bar"
version = "0.2"
"#;
const NAME1: &str = "foo";
const NAME2: &str = "bar";
let manifest = Manifest::from_toml(toml::from_str(MANIFEST).unwrap());
assert_eq!("foo_renamed", manifest.find(|s| s == NAME1).unwrap().name);
assert_eq!("0.1", manifest.find(|s| s == NAME1).unwrap().version);
assert_eq!("bar_renamed", manifest.find(|s| s == NAME2).unwrap().name);
assert_eq!("0.2", manifest.find(|s| s == NAME2).unwrap().version);
}
#[test]
fn target() {
const MANIFEST: &str = r#"
[target.'cfg(target_os = "linux")'.dependencies]
foo = "0.1"
[target.'cfg(target_os = "macos")'.dependencies]
bar = { version = "0.2" }
[target.x86_64-unknown-linux-gnu.dependencies.baz]
version = "0.3"
"#;
const NAME1: &str = "foo";
const NAME2: &str = "bar";
const NAME3: &str = "baz";
let manifest = Manifest::from_toml(toml::from_str(MANIFEST).unwrap());
assert_eq!(NAME1, manifest.find(|s| s == NAME1).unwrap().name);
assert_eq!("0.1", manifest.find(|s| s == NAME1).unwrap().version);
assert_eq!(NAME2, manifest.find(|s| s == NAME2).unwrap().name);
assert_eq!("0.2", manifest.find(|s| s == NAME2).unwrap().version);
assert_eq!(NAME3, manifest.find(|s| s == NAME3).unwrap().name);
assert_eq!("0.3", manifest.find(|s| s == NAME3).unwrap().version);
}
#[test]
fn find2() {
fn check(req: &str, version: &Version) -> bool {
VersionReq::parse(req).unwrap().matches(version)
}
const MANIFEST: &str = r#"
[dependencies]
foo = "0.1"
bar = "0.2"
baz = { path = ".." }
"#;
const NAME1: &str = "foo";
const NAME2: &str = "bar";
const NAME3: &str = "baz";
let manifest = Manifest::from_toml(toml::from_str(MANIFEST).unwrap());
let version = Version::parse("0.2.0").unwrap();
assert_eq!(None, manifest.find2(|s, v| s == NAME1 && check(v, &version)));
assert_eq!(NAME2, manifest.find2(|s, v| s == NAME2 && check(v, &version)).unwrap().name);
assert_eq!("0.2", manifest.find2(|s, v| s == NAME2 && check(v, &version)).unwrap().version);
assert_eq!(NAME3, manifest.find2(|s, v| s == NAME3 && check(v, &version)).unwrap().name);
assert_eq!("*", manifest.find2(|s, v| s == NAME3 && check(v, &version)).unwrap().version);
}
#[test]
fn crate_name() {
const MANIFEST: &str = r#"
[package]
name = "crate-name"
version = "0.1.0"
"#;
let manifest = Manifest::from_toml(toml::from_str(MANIFEST).unwrap());
let package = manifest.crate_package().unwrap();
assert_eq!("crate_name", package.name);
assert_eq!("0.1.0", package.version);
}
| 30.186207 | 96 | 0.593329 |
750f69fa5088f6fa45898030bfca0d56ee486a81
| 3,697 |
// run-rustfix
#![warn(clippy::match_like_matches_macro)]
#![allow(unreachable_patterns, dead_code, clippy::equatable_if_let)]
fn main() {
let x = Some(5);
// Lint
let _y = match x {
Some(0) => true,
_ => false,
};
// Lint
let _w = match x {
Some(_) => true,
_ => false,
};
// Turn into is_none
let _z = match x {
Some(_) => false,
None => true,
};
// Lint
let _zz = match x {
Some(r) if r == 0 => false,
_ => true,
};
// Lint
let _zzz = if let Some(5) = x { true } else { false };
// No lint
let _a = match x {
Some(_) => false,
_ => false,
};
// No lint
let _ab = match x {
Some(0) => false,
_ => true,
None => false,
};
enum E {
A(u32),
B(i32),
C,
D,
}
let x = E::A(2);
{
// lint
let _ans = match x {
E::A(_) => true,
E::B(_) => true,
_ => false,
};
}
{
// lint
let _ans = match x {
E::B(_) => false,
E::C => false,
_ => true,
};
}
{
// no lint
let _ans = match x {
E::A(_) => false,
E::B(_) => false,
E::C => true,
_ => true,
};
}
{
// no lint
let _ans = match x {
E::A(_) => true,
E::B(_) => false,
E::C => false,
_ => true,
};
}
{
// no lint
let _ans = match x {
E::A(a) if a < 10 => false,
E::B(a) if a < 10 => false,
_ => true,
};
}
{
// no lint
let _ans = match x {
E::A(_) => false,
E::B(a) if a < 10 => false,
_ => true,
};
}
{
// no lint
let _ans = match x {
E::A(a) => a == 10,
E::B(_) => false,
_ => true,
};
}
{
// no lint
let _ans = match x {
E::A(_) => false,
E::B(_) => true,
_ => false,
};
}
{
// should print "z" in suggestion (#6503)
let z = &Some(3);
let _z = match &z {
Some(3) => true,
_ => false,
};
}
{
// this could also print "z" in suggestion..?
let z = Some(3);
let _z = match &z {
Some(3) => true,
_ => false,
};
}
{
enum AnEnum {
X,
Y,
}
fn foo(_x: AnEnum) {}
fn main() {
let z = AnEnum::X;
// we can't remove the reference here!
let _ = match &z {
AnEnum::X => true,
_ => false,
};
foo(z);
}
}
{
struct S(i32);
fn fun(_val: Option<S>) {}
let val = Some(S(42));
// we need the reference here because later val is consumed by fun()
let _res = match &val {
&Some(ref _a) => true,
_ => false,
};
fun(val);
}
{
struct S(i32);
fn fun(_val: Option<S>) {}
let val = Some(S(42));
let _res = match &val {
&Some(ref _a) => true,
_ => false,
};
fun(val);
}
{
enum E {
A,
B,
C,
}
let _ = match E::A {
E::B => true,
#[cfg(feature = "foo")]
E::A => true,
_ => false,
};
}
}
| 18.485 | 76 | 0.328645 |
bbd29ce7f8704d13a7833639c13e2498709535f1
| 637 |
//! Displays a [`TabBar`](TabBar) to select the content to be displayed.
//!
//! You have to manage the logic to show the contend by yourself or you may want
//! to use the [`Tabs`](super::tabs) widget instead.
//!
//! *This API requires the following crate features to be activated: `tab_bar`*
use crate::native::tab_bar;
pub use crate::style::tab_bar::{Style, StyleSheet};
use iced_graphics::Renderer;
pub use tab_bar::tab_label::TabLabel;
/// A tab bar to show tabs.
///
/// This is an alias of an `iced_native` `TabBar` with an `iced_wgpu::Renderer`.
pub type TabBar<Message, Backend> = tab_bar::TabBar<Message, Renderer<Backend>>;
| 39.8125 | 80 | 0.715856 |
f43c7b105595a06a8d494cde2208f1d471a7bb17
| 36,771 |
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod framed_stream;
pub(crate) use self::framed_stream::{
FrameType, FramedStreamReader, FramedStreamWriter, MessageStats, ReadNextFrame,
};
use crate::{
coding::{self, decode_fidl_with_context, encode_fidl_with_context},
future_help::Observer,
labels::{ConnectionId, Endpoint, NodeId, TransferKey},
link::{LinkRouting, OutputQueue, RoutingDestination, RoutingTarget},
router::{ForwardingTable, FoundTransfer, Router},
};
use anyhow::{bail, format_err, Context as _, Error};
use async_utils::mutex_ticket::MutexTicket;
use cutex::{CutexGuard, CutexTicket};
use fidl::{Channel, HandleBased};
use fidl_fuchsia_overnet::ConnectionInfo;
use fidl_fuchsia_overnet_protocol::{
ChannelHandle, ConfigRequest, ConfigResponse, ConnectToService, ConnectToServiceOptions,
OpenTransfer, PeerConnectionDiagnosticInfo, PeerDescription, PeerMessage, PeerReply, StreamId,
ZirconHandle,
};
use fuchsia_async::{Task, TimeoutExt};
use futures::{
channel::{mpsc, oneshot},
lock::Mutex,
prelude::*,
ready,
};
use quic::{
AsyncConnection, AsyncQuicStreamReader, AsyncQuicStreamWriter, ConnState, ReadExact,
StreamProperties,
};
use std::{
convert::TryInto,
sync::{
atomic::{AtomicBool, Ordering},
Arc, Weak,
},
task::{Context, Poll},
time::Duration,
};
#[derive(Debug)]
struct Config {}
impl Config {
fn negotiate(_request: ConfigRequest) -> (Self, ConfigResponse) {
(Config {}, ConfigResponse::EMPTY)
}
fn from_response(_response: ConfigResponse) -> Self {
Config {}
}
}
#[derive(Debug)]
enum ClientPeerCommand {
ConnectToService(ConnectToService),
OpenTransfer(u64, TransferKey, oneshot::Sender<()>),
}
#[derive(Default)]
pub struct PeerConnStats {
pub config: MessageStats,
pub connect_to_service: MessageStats,
pub update_node_description: MessageStats,
pub update_link_status: MessageStats,
pub update_link_status_ack: MessageStats,
pub open_transfer: MessageStats,
pub ping: MessageStats,
pub pong: MessageStats,
}
#[derive(Clone)]
pub(crate) struct PeerConn {
conn: Arc<AsyncConnection>,
node_id: NodeId,
}
impl std::fmt::Debug for PeerConn {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_ref().fmt(f)
}
}
impl PeerConn {
pub fn from_quic(conn: Arc<AsyncConnection>, node_id: NodeId) -> Self {
PeerConn { conn, node_id }
}
pub fn as_ref(&self) -> PeerConnRef<'_> {
PeerConnRef { conn: &self.conn, node_id: self.node_id }
}
pub fn trace_id(&self) -> &str {
self.conn.trace_id()
}
}
#[derive(Clone, Copy)]
pub(crate) struct PeerConnRef<'a> {
conn: &'a Arc<AsyncConnection>,
node_id: NodeId,
}
impl<'a> std::fmt::Debug for PeerConnRef<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "PeerConn({}; {})", self.node_id.0, self.conn.trace_id())
}
}
impl<'a> PeerConnRef<'a> {
pub fn from_quic(conn: &'a Arc<AsyncConnection>, node_id: NodeId) -> Self {
PeerConnRef { conn, node_id }
}
pub fn into_peer_conn(&self) -> PeerConn {
PeerConn { conn: self.conn.clone(), node_id: self.node_id }
}
pub fn trace_id(&self) -> &str {
self.conn.trace_id()
}
pub fn endpoint(&self) -> Endpoint {
self.conn.endpoint()
}
pub fn peer_node_id(&self) -> NodeId {
self.node_id
}
pub fn alloc_uni(&self) -> FramedStreamWriter {
FramedStreamWriter::from_quic(self.conn.alloc_uni(), self.node_id)
}
pub fn alloc_bidi(&self) -> (FramedStreamWriter, FramedStreamReader) {
let (w, r) = self.conn.alloc_bidi();
(
FramedStreamWriter::from_quic(w, self.node_id),
FramedStreamReader::from_quic(r, self.node_id),
)
}
pub fn bind_uni_id(&self, id: u64) -> FramedStreamReader {
FramedStreamReader::from_quic(self.conn.bind_uni_id(id), self.node_id)
}
pub fn bind_id(&self, id: u64) -> (FramedStreamWriter, FramedStreamReader) {
let (w, r) = self.conn.bind_id(id);
(
FramedStreamWriter::from_quic(w, self.node_id),
FramedStreamReader::from_quic(r, self.node_id),
)
}
}
pub(crate) struct Peer {
node_id: NodeId,
endpoint: Endpoint,
conn_id: ConnectionId,
/// The QUIC connection itself
conn: Arc<AsyncConnection>,
commands: Option<mpsc::Sender<ClientPeerCommand>>,
conn_stats: Arc<PeerConnStats>,
channel_proxy_stats: Arc<MessageStats>,
_task: Task<()>,
shutdown: AtomicBool,
}
impl std::fmt::Debug for Peer {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.debug_id().fmt(f)
}
}
/// Future to perform one send from a peer to its current link.
struct OneSend<'a> {
/// Link upon which to send.
link: &'a LinkRouting,
/// QUIC connection that is forming frames to send.
conn: PeerConnRef<'a>,
/// Current lock state.
state: OneSendState<'a>,
}
/// Lock state to perform one send. This is trickier than I'd like.
/// To send we need to acquire both of the link and then the connection locks (in order).
/// The link lock needs to be acquired *only when* the link is ready to send a frame.
/// The connection lock may or may not yield a frame to be sent.
/// We don't want to occupy the link lock for longer than is necessary however, otherwise
/// other peers will be starved.
/// So:
/// 1. we acquire a ticket to wait for the link lock (in a sendable state)
/// 2. once we have the link locked, we try the same for the QUIC connection
/// 3. once acquired, we try to pull a frame from it
/// on success - we update the link state and relinquish the locks
/// on failure - we're queued as a waiter against the connection for fresh data
/// and we also relinquish the locks; when the connection is believed
/// to have fresh data, we'll be awoken and enter stage 1 again.
#[derive(Debug)]
enum OneSendState<'a> {
Idle,
LockingLink(CutexTicket<'a, 'static, OutputQueue>),
LockingConn(CutexGuard<'a, OutputQueue>, MutexTicket<'a, ConnState>),
}
impl<'a> OneSend<'a> {
fn poll(&mut self, ctx: &mut Context<'_>) -> Poll<Result<(), Error>> {
match std::mem::replace(&mut self.state, OneSendState::Idle) {
OneSendState::Idle => self.poll_idle(ctx),
OneSendState::LockingLink(poll_cutex) => self.poll_locking_link(ctx, poll_cutex),
OneSendState::LockingConn(cutex_guard, poll_mutex) => {
self.poll_locking_conn(ctx, cutex_guard, poll_mutex)
}
}
}
/// We're being polled and we aren't currently acquiring a lock... begin to acquire the
/// link lock and progress to the next state.
fn poll_idle(&mut self, ctx: &mut Context<'_>) -> Poll<Result<(), Error>> {
self.poll_locking_link(ctx, self.link.new_message_send_ticket())
}
/// We're being polled while trying to acquire the link lock - poll against that
/// on ready - move on to polling the connection lock
/// on pending - wait longer to lock the link
fn poll_locking_link(
&mut self,
ctx: &mut Context<'_>,
mut poll_cutex: CutexTicket<'a, 'static, OutputQueue>,
) -> Poll<Result<(), Error>> {
match poll_cutex.poll(ctx) {
Poll::Pending => {
self.state = OneSendState::LockingLink(poll_cutex);
Poll::Pending
}
Poll::Ready(cutex_guard) => {
self.poll_locking_conn(ctx, cutex_guard, self.conn.conn.poll_lock_state())
}
}
}
/// We're being polled while holding the link lock and attempting to acquire the connection
/// lock.
/// Poll the connection lock.
/// If we acquire it, try to pull a frame and send it via the link, then relinquish both locks
/// and move back to being idle.
/// If we cannot yet acquire the connection lock continue trying to do so.
fn poll_locking_conn(
&mut self,
ctx: &mut Context<'_>,
mut cutex_guard: CutexGuard<'a, OutputQueue>,
mut poll_mutex: MutexTicket<'a, ConnState>,
) -> Poll<Result<(), Error>> {
match poll_mutex.poll(ctx) {
Poll::Pending => {
self.state = OneSendState::LockingConn(cutex_guard, poll_mutex);
Poll::Pending
}
Poll::Ready(mut mutex_guard) => {
let mut send = cutex_guard.send(self.routing_target())?;
if let Some(n) = ready!(mutex_guard.poll_send(ctx, send.buffer()))? {
send.commit(n);
Poll::Ready(Ok(()))
} else {
Poll::Ready(Err(format_err!(
"QUIC connection {:?} closed",
self.conn.trace_id()
)))
}
}
}
}
fn routing_target(&self) -> RoutingTarget {
RoutingTarget {
src: self.link.own_node_id(),
dst: RoutingDestination::Message(self.conn.node_id),
}
}
}
/// Task to send frames produced by a peer to a designated link.
/// Effectively an infinite loop around `OneSend`.
async fn peer_to_link(conn: PeerConn, link: Arc<LinkRouting>) {
loop {
let mut one_send = OneSend { link: &*link, conn: conn.as_ref(), state: OneSendState::Idle };
if let Err(e) = future::poll_fn(move |ctx| one_send.poll(ctx)).await {
log::warn!(
"Sender for {:?} on link {:?} failed: {:?}",
conn.trace_id(),
link.debug_id(),
e
);
break;
}
}
}
/// Error from the run loops for a peer (client or server) - captures a little semantic detail
/// to help direct reactions to this peer disappearing.
#[derive(Debug)]
enum RunnerError {
NoRouteToPeer,
RouterGone,
ConnectionClosed,
BadFrameType(FrameType),
HandshakeError(Error),
ServiceError(Error),
ConnectionFailed(Error),
}
/// Returns the next link a peer should send to (as forwarding tables change).
/// If a peer becomes unroutable, returns an error (after verifying for a short time).
async fn next_link(
router: &Weak<Router>,
peer: NodeId,
observer: &mut Observer<ForwardingTable>,
) -> Result<Arc<LinkRouting>, RunnerError> {
// Helper: pulls the next forwarding table, and returns Some(link) if the peer is routable,
// or None if it is not.
async fn maybe_next_link(
router: &Weak<Router>,
peer: NodeId,
observer: &mut Observer<ForwardingTable>,
) -> Result<Option<Arc<LinkRouting>>, RunnerError> {
let get_router = move || Weak::upgrade(&router).ok_or(RunnerError::RouterGone);
let forwarding_table = observer.next().await.ok_or(RunnerError::RouterGone)?;
if let Some(node_link_id) = forwarding_table.route_for(peer) {
return Ok(get_router()?.get_link(node_link_id).await);
}
Ok(None)
}
if let Some(new_link) = maybe_next_link(router, peer, observer).await? {
return Ok(new_link);
}
// Route flap prevention: if we observe no new route, keep looking for a short time to see if
// one reappears before dropping the peer (and consequently any channels it carries).
async move {
loop {
if let Some(new_link) = maybe_next_link(router, peer, observer).await? {
return Ok(new_link);
}
}
}
.on_timeout(Duration::from_secs(5), || Err(RunnerError::NoRouteToPeer))
.await
}
/// Ensure connectivity to a peer.
/// Update the peer with a new link whenever needed.
/// Fail if there's no connectivity to a peer.
async fn check_connectivity(router: Weak<Router>, conn: PeerConn) -> Result<(), RunnerError> {
let mut sender_and_current_link: Option<(Task<()>, Arc<LinkRouting>)> = None;
let mut observer = Weak::upgrade(&router)
.ok_or_else(|| RunnerError::RouterGone)?
.new_forwarding_table_observer();
loop {
let new_link = next_link(&router, conn.node_id, &mut observer).await?;
if sender_and_current_link
.as_ref()
.map(|sender_and_current_link| !Arc::ptr_eq(&sender_and_current_link.1, &new_link))
.unwrap_or(true)
{
log::trace!(
"Peer {:?} route set to {:?} from {:?}",
conn,
new_link.debug_id(),
sender_and_current_link.map(|s_and_l| s_and_l.1.debug_id())
);
sender_and_current_link =
Some((Task::spawn(peer_to_link(conn.clone(), new_link.clone())), new_link));
}
}
}
impl Peer {
pub(crate) fn node_id(&self) -> NodeId {
self.node_id
}
pub(crate) fn endpoint(&self) -> Endpoint {
self.endpoint
}
pub(crate) fn debug_id(&self) -> impl std::fmt::Debug + std::cmp::PartialEq {
(self.node_id, self.endpoint, self.conn_id)
}
/// Construct a new client peer - spawns tasks to handle making control stream requests, and
/// publishing link metadata
pub(crate) fn new_client(
node_id: NodeId,
conn_id: ConnectionId,
config: &mut quiche::Config,
service_observer: Observer<Vec<String>>,
router: &Arc<Router>,
) -> Result<Arc<Self>, Error> {
log::trace!(
"[{:?}] NEW CLIENT: peer={:?} conn_id={:?}",
router.node_id(),
node_id,
conn_id,
);
let (command_sender, command_receiver) = mpsc::channel(1);
let conn = AsyncConnection::connect(None, &conn_id.to_array(), config)?;
let conn_stats = Arc::new(PeerConnStats::default());
let (conn_stream_writer, conn_stream_reader) = conn.alloc_bidi();
assert_eq!(conn_stream_writer.id(), 0);
Ok(Arc::new(Self {
endpoint: Endpoint::Client,
node_id,
conn_id,
commands: Some(command_sender.clone()),
conn_stats: conn_stats.clone(),
channel_proxy_stats: Arc::new(MessageStats::default()),
shutdown: AtomicBool::new(false),
_task: Task::spawn(Peer::runner(
Endpoint::Client,
Arc::downgrade(router),
conn_id,
futures::future::try_join3(
conn.clone().run().map_err(RunnerError::ConnectionFailed),
check_connectivity(
Arc::downgrade(router),
PeerConn::from_quic(conn.clone(), node_id),
),
client_conn_stream(
Arc::downgrade(router),
node_id,
conn_stream_writer,
conn_stream_reader,
command_receiver,
service_observer,
conn_stats,
),
)
.map_ok(drop),
)),
conn,
}))
}
/// Construct a new server peer - spawns tasks to handle responding to control stream requests
pub(crate) fn new_server(
node_id: NodeId,
conn_id: ConnectionId,
config: &mut quiche::Config,
router: &Arc<Router>,
) -> Result<Arc<Self>, Error> {
log::trace!(
"[{:?}] NEW SERVER: peer={:?} conn_id={:?}",
router.node_id(),
node_id,
conn_id,
);
let conn = AsyncConnection::accept(&conn_id.to_array(), config)?;
let conn_stats = Arc::new(PeerConnStats::default());
let (conn_stream_writer, conn_stream_reader) = conn.bind_id(0);
let channel_proxy_stats = Arc::new(MessageStats::default());
Ok(Arc::new(Self {
endpoint: Endpoint::Server,
node_id,
conn_id,
commands: None,
conn_stats: conn_stats.clone(),
channel_proxy_stats: channel_proxy_stats.clone(),
shutdown: AtomicBool::new(false),
_task: Task::spawn(Peer::runner(
Endpoint::Server,
Arc::downgrade(router),
conn_id,
futures::future::try_join3(
conn.clone().run().map_err(RunnerError::ConnectionFailed),
check_connectivity(
Arc::downgrade(router),
PeerConn::from_quic(conn.clone(), node_id),
),
server_conn_stream(
node_id,
conn_stream_writer,
conn_stream_reader,
Arc::downgrade(router),
conn_stats,
channel_proxy_stats,
),
)
.map_ok(drop),
)),
conn,
}))
}
async fn runner(
endpoint: Endpoint,
router: Weak<Router>,
conn_id: ConnectionId,
f: impl Future<Output = Result<(), RunnerError>>,
) {
let result = f.await;
let get_router_node_id = || {
Weak::upgrade(&router).map(|r| format!("{:?}", r.node_id())).unwrap_or_else(String::new)
};
if let Err(e) = &result {
log::info!(
"[{} conn:{:?}] {:?} runner error: {:?}",
get_router_node_id(),
conn_id,
endpoint,
e
);
} else {
log::trace!(
"[{} conn:{:?}] {:?} finished successfully",
get_router_node_id(),
conn_id,
endpoint
);
}
if let Some(router) = Weak::upgrade(&router) {
router.remove_peer(conn_id, matches!(result, Err(RunnerError::NoRouteToPeer))).await;
}
}
pub async fn shutdown(&self) {
self.shutdown.store(true, Ordering::Release);
self.conn.close().await
}
pub async fn receive_frame(&self, frame: &mut [u8]) -> Result<(), Error> {
self.conn.recv(frame).await
}
pub async fn new_stream(
&self,
service: &str,
chan: Channel,
router: &Arc<Router>,
) -> Result<(), Error> {
if let ZirconHandle::Channel(ChannelHandle { stream_ref, rights }) = router
.send_proxied(
chan.into_handle(),
self.peer_conn_ref(),
self.channel_proxy_stats.clone(),
)
.await?
{
self.commands
.as_ref()
.unwrap()
.clone()
.send(ClientPeerCommand::ConnectToService(ConnectToService {
service_name: service.to_string(),
stream_ref,
rights,
options: ConnectToServiceOptions::EMPTY,
}))
.await?;
Ok(())
} else {
unreachable!();
}
}
pub async fn send_open_transfer(
&self,
transfer_key: TransferKey,
) -> Option<(FramedStreamWriter, FramedStreamReader)> {
let io = self.peer_conn_ref().alloc_bidi();
let (tx, rx) = oneshot::channel();
self.commands
.as_ref()
.unwrap()
.clone()
.send(ClientPeerCommand::OpenTransfer(io.0.id(), transfer_key, tx))
.await
.ok()?;
rx.await.ok()?;
Some(io)
}
fn peer_conn_ref(&self) -> PeerConnRef<'_> {
PeerConnRef::from_quic(&self.conn, self.node_id)
}
pub async fn diagnostics(&self, source_node_id: NodeId) -> PeerConnectionDiagnosticInfo {
let stats = self.conn.stats().await;
PeerConnectionDiagnosticInfo {
source: Some(source_node_id.into()),
destination: Some(self.node_id.into()),
is_client: Some(self.endpoint == Endpoint::Client),
is_established: Some(self.conn.is_established().await),
received_packets: Some(stats.recv as u64),
sent_packets: Some(stats.sent as u64),
lost_packets: Some(stats.lost as u64),
messages_sent: Some(self.channel_proxy_stats.sent_messages()),
bytes_sent: Some(self.channel_proxy_stats.sent_bytes()),
connect_to_service_sends: Some(self.conn_stats.connect_to_service.sent_messages()),
connect_to_service_send_bytes: Some(self.conn_stats.connect_to_service.sent_bytes()),
update_node_description_sends: Some(
self.conn_stats.update_node_description.sent_messages(),
),
update_node_description_send_bytes: Some(
self.conn_stats.update_node_description.sent_bytes(),
),
update_link_status_sends: Some(self.conn_stats.update_link_status.sent_messages()),
update_link_status_send_bytes: Some(self.conn_stats.update_link_status.sent_bytes()),
update_link_status_ack_sends: Some(
self.conn_stats.update_link_status_ack.sent_messages(),
),
update_link_status_ack_send_bytes: Some(
self.conn_stats.update_link_status_ack.sent_bytes(),
),
round_trip_time_microseconds: Some(
stats.rtt.as_micros().try_into().unwrap_or(std::u64::MAX),
),
congestion_window_bytes: Some(stats.cwnd as u64),
..PeerConnectionDiagnosticInfo::EMPTY
}
}
}
async fn client_handshake(
my_node_id: NodeId,
peer_node_id: NodeId,
mut conn_stream_writer: AsyncQuicStreamWriter,
mut conn_stream_reader: AsyncQuicStreamReader,
conn_stats: Arc<PeerConnStats>,
) -> Result<(FramedStreamWriter, FramedStreamReader), Error> {
log::trace!("[{:?} clipeer:{:?}] client connection stream started", my_node_id, peer_node_id);
// Send FIDL header
log::trace!("[{:?} clipeer:{:?}] send fidl header", my_node_id, peer_node_id);
conn_stream_writer
.send(&mut [0, 0, 0, fidl::encoding::MAGIC_NUMBER_INITIAL], false)
.on_timeout(Duration::from_secs(60), || {
Err(format_err!("timeout initializing quic connection"))
})
.await?;
async move {
log::trace!("[{:?} clipeer:{:?}] send config request", my_node_id, peer_node_id);
// Send config request
let mut conn_stream_writer =
FramedStreamWriter::from_quic(conn_stream_writer, peer_node_id);
let coding_context = coding::DEFAULT_CONTEXT;
conn_stream_writer
.send(
FrameType::Data(coding_context),
&encode_fidl_with_context(coding_context, &mut ConfigRequest::EMPTY.clone())?,
false,
&conn_stats.config,
)
.await?;
// Receive FIDL header
log::trace!("[{:?} clipeer:{:?}] read fidl header", my_node_id, peer_node_id);
let mut fidl_hdr = [0u8; 4];
conn_stream_reader.read_exact(&mut fidl_hdr).await.context("reading FIDL header")?;
// Await config response
log::trace!("[{:?} clipeer:{:?}] read config", my_node_id, peer_node_id);
let mut conn_stream_reader =
FramedStreamReader::from_quic(conn_stream_reader, peer_node_id);
let _ = Config::from_response(
if let (FrameType::Data(coding_context), mut bytes, false) =
conn_stream_reader.next().await?
{
decode_fidl_with_context(coding_context, &mut bytes)?
} else {
bail!("Failed to read config response")
},
);
log::trace!("[{:?} clipeer:{:?}] handshake completed", my_node_id, peer_node_id);
Ok((conn_stream_writer, conn_stream_reader))
}
.on_timeout(Duration::from_secs(20), || Err(format_err!("timeout performing handshake")))
.await
}
struct TrackClientConnection {
router: Weak<Router>,
node_id: NodeId,
}
impl TrackClientConnection {
async fn new(router: &Arc<Router>, node_id: NodeId) -> TrackClientConnection {
router.service_map().add_client_connection(node_id).await;
TrackClientConnection { router: Arc::downgrade(router), node_id }
}
}
impl Drop for TrackClientConnection {
fn drop(&mut self) {
if let Some(router) = Weak::upgrade(&self.router) {
let node_id = self.node_id;
Task::spawn(
async move { router.service_map().remove_client_connection(node_id).await },
)
.detach();
}
}
}
async fn client_conn_stream(
router: Weak<Router>,
peer_node_id: NodeId,
conn_stream_writer: AsyncQuicStreamWriter,
conn_stream_reader: AsyncQuicStreamReader,
mut commands: mpsc::Receiver<ClientPeerCommand>,
mut services: Observer<Vec<String>>,
conn_stats: Arc<PeerConnStats>,
) -> Result<(), RunnerError> {
let get_router = move || Weak::upgrade(&router).ok_or_else(|| RunnerError::RouterGone);
let my_node_id = get_router()?.node_id();
let (conn_stream_writer, mut conn_stream_reader) = client_handshake(
my_node_id,
peer_node_id,
conn_stream_writer,
conn_stream_reader,
conn_stats.clone(),
)
.map_err(RunnerError::HandshakeError)
.await?;
let _track_connection = TrackClientConnection::new(&get_router()?, peer_node_id).await;
let on_link_status_ack = &Mutex::new(None);
let conn_stream_writer = &Mutex::new(conn_stream_writer);
let cmd_conn_stats = conn_stats.clone();
let svc_conn_stats = conn_stats;
let _: ((), (), ()) = futures::future::try_join3(
async move {
while let Some(command) = commands.next().await {
log::trace!(
"[{:?} clipeer:{:?}] handle command: {:?}",
my_node_id,
peer_node_id,
command
);
client_conn_handle_command(
command,
&mut *conn_stream_writer.lock().await,
cmd_conn_stats.clone(),
)
.await?;
}
log::trace!("[{:?} clipeer:{:?}] done commands", my_node_id, peer_node_id);
Ok(())
}
.map_err(RunnerError::ServiceError),
async move {
loop {
let (frame_type, mut bytes, fin) =
conn_stream_reader.next().await.map_err(RunnerError::ServiceError)?;
match frame_type {
FrameType::Hello | FrameType::Control(_) | FrameType::Signal(_) => {
return Err(RunnerError::BadFrameType(frame_type));
}
FrameType::Data(coding_context) => {
client_conn_handle_incoming_frame(
my_node_id,
peer_node_id,
&mut bytes,
on_link_status_ack,
coding_context,
)
.await
.map_err(RunnerError::ServiceError)?;
}
}
if fin {
return Err(RunnerError::ConnectionClosed);
}
}
},
async move {
loop {
let services = services.next().await;
log::trace!(
"[{:?} clipeer:{:?}] Send update node description with services: {:?}",
my_node_id,
peer_node_id,
services
);
let coding_context = coding::DEFAULT_CONTEXT;
conn_stream_writer
.lock()
.await
.send(
FrameType::Data(coding_context),
&encode_fidl_with_context(
coding_context,
&mut PeerMessage::UpdateNodeDescription(PeerDescription {
services,
..PeerDescription::EMPTY
}),
)?,
false,
&svc_conn_stats.update_node_description,
)
.await?;
}
}
.map_err(RunnerError::ServiceError),
)
.await?;
Ok(())
}
async fn client_conn_handle_command(
command: ClientPeerCommand,
conn_stream_writer: &mut FramedStreamWriter,
conn_stats: Arc<PeerConnStats>,
) -> Result<(), Error> {
match command {
ClientPeerCommand::ConnectToService(conn) => {
let coding_context = coding::DEFAULT_CONTEXT;
conn_stream_writer
.send(
FrameType::Data(coding_context),
&encode_fidl_with_context(
coding_context,
&mut PeerMessage::ConnectToService(conn),
)?,
false,
&conn_stats.connect_to_service,
)
.await?;
}
ClientPeerCommand::OpenTransfer(stream_id, transfer_key, sent) => {
let coding_context = coding::DEFAULT_CONTEXT;
conn_stream_writer
.send(
FrameType::Data(coding_context),
&encode_fidl_with_context(
coding_context,
&mut PeerMessage::OpenTransfer(OpenTransfer {
stream_id: StreamId { id: stream_id },
transfer_key,
}),
)?,
false,
&conn_stats.open_transfer,
)
.await?;
let _ = sent.send(());
}
}
Ok(())
}
async fn client_conn_handle_incoming_frame(
my_node_id: NodeId,
peer_node_id: NodeId,
bytes: &mut [u8],
on_link_status_ack: &Mutex<Option<oneshot::Sender<()>>>,
coding_context: coding::Context,
) -> Result<(), Error> {
let msg: PeerReply = decode_fidl_with_context(coding_context, bytes)?;
log::trace!("[{:?} clipeer:{:?}] got reply {:?}", my_node_id, peer_node_id, msg);
match msg {
PeerReply::UpdateLinkStatusAck(_) => {
on_link_status_ack
.lock()
.await
.take()
.ok_or_else(|| format_err!("Got link status ack without sending link status"))?
.send(())
.map_err(|_| format_err!("Failed to send link status ack"))?;
}
}
Ok(())
}
async fn server_handshake(
my_node_id: NodeId,
node_id: NodeId,
mut conn_stream_writer: AsyncQuicStreamWriter,
mut conn_stream_reader: AsyncQuicStreamReader,
conn_stats: Arc<PeerConnStats>,
) -> Result<(FramedStreamWriter, FramedStreamReader), Error> {
// Receive FIDL header
log::trace!("[{:?} svrpeer:{:?}] read fidl header", my_node_id, node_id);
let mut fidl_hdr = [0u8; 4];
conn_stream_reader.read_exact(&mut fidl_hdr).await.context("reading FIDL header")?;
let mut conn_stream_reader = FramedStreamReader::from_quic(conn_stream_reader, node_id);
// Send FIDL header
log::trace!("[{:?} svrpeer:{:?}] send fidl header", my_node_id, node_id);
conn_stream_writer.send(&mut [0, 0, 0, fidl::encoding::MAGIC_NUMBER_INITIAL], false).await?;
let mut conn_stream_writer = FramedStreamWriter::from_quic(conn_stream_writer, node_id);
// Await config request
log::trace!("[{:?} svrpeer:{:?}] read config", my_node_id, node_id);
let (_, mut response) = Config::negotiate(
if let (FrameType::Data(coding_context), mut bytes, false) =
conn_stream_reader.next().await?
{
decode_fidl_with_context(coding_context, &mut bytes)?
} else {
bail!("Failed to read config response")
},
);
// Send config response
log::trace!("[{:?} svrpeer:{:?}] send config", my_node_id, node_id);
let coding_context = coding::Context { use_persistent_header: false };
conn_stream_writer
.send(
FrameType::Data(coding_context),
&encode_fidl_with_context(coding_context, &mut response)?,
false,
&conn_stats.config,
)
.await?;
Ok((conn_stream_writer, conn_stream_reader))
}
async fn server_conn_stream(
node_id: NodeId,
conn_stream_writer: AsyncQuicStreamWriter,
conn_stream_reader: AsyncQuicStreamReader,
router: Weak<Router>,
conn_stats: Arc<PeerConnStats>,
channel_proxy_stats: Arc<MessageStats>,
) -> Result<(), RunnerError> {
let my_node_id = Weak::upgrade(&router).ok_or_else(|| RunnerError::RouterGone)?.node_id();
let (conn_stream_writer, mut conn_stream_reader) =
server_handshake(my_node_id, node_id, conn_stream_writer, conn_stream_reader, conn_stats)
.map_err(RunnerError::HandshakeError)
.await?;
loop {
log::trace!("[{:?} svrpeer:{:?}] await message", my_node_id, node_id);
let (frame_type, mut bytes, fin) =
conn_stream_reader.next().map_err(RunnerError::ServiceError).await?;
let router = Weak::upgrade(&router).ok_or_else(|| RunnerError::RouterGone)?;
match frame_type {
FrameType::Hello | FrameType::Control(_) | FrameType::Signal(_) => {
return Err(RunnerError::BadFrameType(frame_type));
}
FrameType::Data(coding_context) => {
let msg: PeerMessage = decode_fidl_with_context(coding_context, &mut bytes)
.map_err(RunnerError::ServiceError)?;
log::trace!("[{:?} svrpeer:{:?}] Got peer request: {:?}", my_node_id, node_id, msg);
match msg {
PeerMessage::ConnectToService(ConnectToService {
service_name,
stream_ref,
rights,
options: _,
}) => {
let app_channel = Channel::from_handle(
router
.recv_proxied(
ZirconHandle::Channel(ChannelHandle { stream_ref, rights }),
conn_stream_writer.conn(),
channel_proxy_stats.clone(),
)
.map_err(RunnerError::ServiceError)
.await?,
);
router
.service_map()
.connect(
&service_name,
app_channel,
ConnectionInfo {
peer: Some(node_id.into()),
..ConnectionInfo::EMPTY
},
)
.map_err(RunnerError::ServiceError)
.await?;
}
PeerMessage::UpdateNodeDescription(PeerDescription { services, .. }) => {
router
.service_map()
.update_node(node_id, services.unwrap_or(vec![]))
.map_err(RunnerError::ServiceError)
.await?;
}
PeerMessage::OpenTransfer(OpenTransfer {
stream_id: StreamId { id: stream_id },
transfer_key,
}) => {
let (tx, rx) = conn_stream_writer.conn().bind_id(stream_id);
router
.post_transfer(transfer_key, FoundTransfer::Remote(tx, rx))
.map_err(RunnerError::ServiceError)
.await?;
}
}
}
}
if fin {
return Err(RunnerError::ConnectionClosed);
}
}
}
| 36.443013 | 100 | 0.556281 |
673f502c3061b8993e0d9512778518fc4002e8d0
| 5,696 |
//
// Copyright 2021 Patrick Flynn
// This file is part of the Ida compiler.
// Ida is licensed under the BSD-3 license. See the COPYING file for more information.
//
use std::collections::HashMap;
use parser::ltac;
use parser::ltac::{LtacFile, LtacType, LtacArg};
use parser::Arch;
// Import any local modules
mod risc;
mod riscv;
use risc::*;
use riscv::*;
// Architectures
// 1-> x86-64
// 2-> AArch64
// 3-> Riscv64
// The main transformation function
pub fn run(file : &LtacFile, arch : Arch, use_c : bool, risc_mode : bool) -> Result<LtacFile, ()> {
let mut file2 = match check_builtins(file, use_c) {
Ok(ltac) => ltac,
Err(_e) => return Err(()),
};
if risc_mode || /*arch == Arch::AArch64 ||*/ arch == Arch::Riscv64 {
file2 = match risc_optimize(&file2) {
Ok(ltac) => ltac,
Err(_e) => return Err(()),
}
}
if arch == Arch::Riscv64 {
file2 = match riscv_optimize(&file2) {
Ok(ltac) => ltac,
Err(_e) => return Err(()),
}
}
Ok(file2)
}
// Scans the code for malloc, free, and exit instructions
// If we are using the C libraries, these are simply transforms to a function call
// Otherwise, we must transform them to a system call
fn check_builtins(file : &LtacFile, use_c : bool) -> Result<LtacFile, ()> {
let mut file2 = LtacFile {
name : file.name.clone(),
data : file.data.clone(),
code : Vec::new(),
};
let code = file.code.clone();
let mut index = 0;
let mut mm_map : HashMap<i32, i32> = HashMap::new();
for line in code.iter() {
match &line.instr_type {
// We have a separate exit type for two reasons
// First, when we exit, we want to make sure to de-allocate everything
// Second, because "exit" is a keyword, the corelib function has a different name
LtacType::Exit => {
let mut instr = ltac::create_instr(LtacType::PushArg);
instr.arg1 = line.arg1.clone();
instr.arg2_val = 1;
file2.code.push(instr);
if use_c {
instr = ltac::create_instr(LtacType::Call);
instr.name = "exit".to_string();
file2.code.push(instr);
} else {
instr = ltac::create_instr(LtacType::Call);
instr.name = "sys_exit".to_string();
file2.code.push(instr);
}
},
LtacType::Malloc => {
if use_c {
let mut instr = ltac::create_instr(LtacType::Call);
instr.name = "malloc".to_string();
file2.code.push(instr);
} else {
let size_instr = code.iter().nth(index-1).unwrap();
let pos_instr = code.iter().nth(index+1).unwrap();
file2.code.pop();
// Push the memory location and size to the hash map
let pos = match &pos_instr.arg1 {
LtacArg::Mem(pos) => *pos,
LtacArg::Ptr(pos) => *pos,
_ => 0,
};
let size = match &size_instr.arg1 {
LtacArg::I32(val) => *val,
_ => 0,
};
mm_map.insert(pos, size);
// Make the call
let mut instr = ltac::create_instr(LtacType::PushArg);
instr.arg1 = size_instr.arg1.clone();
instr.arg2_val = 1;
file2.code.push(instr.clone());
instr = ltac::create_instr(LtacType::Call);
instr.name = "malloc".to_string();
file2.code.push(instr);
}
},
LtacType::Free => {
if use_c {
let mut instr = ltac::create_instr(LtacType::Call);
instr.name = "free".to_string();
file2.code.push(instr);
} else {
let addr_instr = code.iter().nth(index-1).unwrap();
file2.code.pop();
// Address
let mut instr = ltac::create_instr(LtacType::PushArg);
instr.arg1 = addr_instr.arg1.clone();
instr.arg2_val = 1;
file2.code.push(instr.clone());
// Memory segment size
let pos = match &addr_instr.arg1 {
LtacArg::Ptr(pos) => *pos,
_ => 0,
};
match &mm_map.get(&pos) {
Some(size) => instr.arg1 = LtacArg::I32(**size),
None => instr.arg1 = LtacArg::I32(0),
}
instr.arg2_val = 2;
file2.code.push(instr.clone());
// The system call
instr = ltac::create_instr(LtacType::Call);
instr.name = "free".to_string();
file2.code.push(instr.clone());
}
},
_ => file2.code.push(line.clone()),
}
index += 1;
}
Ok(file2)
}
| 34.107784 | 99 | 0.440485 |
481d84ea40b448930a1f62e31e4f4a8ce0519bd7
| 2,308 |
//! Low level access to Cortex-M processors
//!
//! This crate provides:
//!
//! - Access to core peripherals like NVIC, SCB and SysTick.
//! - Access to core registers like CONTROL, MSP and PSR.
//! - Interrupt manipulation mechanisms
//! - Safe wrappers around Cortex-M specific instructions like `bkpt`
//!
//! # Optional features
//!
//! ## `inline-asm`
//!
//! When this feature is enabled the implementation of all the functions inside the `asm` and
//! `register` modules use inline assembly (`asm!`) instead of external assembly (FFI into separate
//! assembly files pre-compiled using `arm-none-eabi-gcc`). The advantages of enabling `inline-asm`
//! are:
//!
//! - Reduced overhead. FFI eliminates the possibility of inlining so all operations include a
//! function call overhead when `inline-asm` is not enabled.
//!
//! - Some of the `register` API only becomes available only when `inline-asm` is enabled. Check the
//! API docs for details.
//!
//! The disadvantage is that `inline-asm` requires a nightly toolchain.
//!
//! # Minimum Supported Rust Version (MSRV)
//!
//! This crate is guaranteed to compile on stable Rust 1.31 and up. It *might*
//! compile with older versions but that may change in any new patch release.
#![cfg_attr(feature = "inline-asm", feature(asm))]
#![deny(missing_docs)]
#![no_std]
#![allow(clippy::identity_op)]
#![allow(clippy::missing_safety_doc)]
// This makes clippy warn about public functions which are not #[inline].
//
// Almost all functions in this crate result in trivial or even no assembly.
// These functions should be #[inline].
//
// If you do add a function that's not supposed to be #[inline], you can add
// #[allow(clippy::missing_inline_in_public_items)] in front of it to add an
// exception to clippy's rules.
//
// This should be done in case of:
// - A function containing non-trivial logic (such as itm::write_all); or
// - A generated #[derive(Debug)] function (in which case the attribute needs
// to be applied to the struct).
#![deny(clippy::missing_inline_in_public_items)]
extern crate aligned;
extern crate bare_metal;
extern crate volatile_register;
#[macro_use]
mod macros;
pub mod asm;
pub mod interrupt;
#[cfg(not(armv6m))]
pub mod itm;
pub mod peripheral;
pub mod register;
pub use crate::peripheral::Peripherals;
| 33.941176 | 100 | 0.719237 |
d6848e885c8938717bcf7d9c8d21f6e6be590355
| 2,507 |
//! Types and utilities for working with `BufStream`.
mod chain;
mod collect;
mod from;
mod iter;
mod limit;
mod stream;
pub use self::chain::Chain;
pub use self::collect::Collect;
pub use self::from::FromBufStream;
pub use self::iter::iter;
pub use self::limit::Limit;
pub use self::stream::{stream, IntoStream};
pub mod error {
//! Error types
pub use super::collect::CollectError;
pub use super::from::{CollectBytesError, CollectVecError};
pub use super::limit::LimitError;
}
use crate::BufStream;
impl<T> BufStreamExt for T where T: BufStream {}
/// An extension trait for `BufStream`'s that provides a variety of convenient
/// adapters.
pub trait BufStreamExt: BufStream {
/// Takes two buf streams and creates a new buf stream over both in
/// sequence.
///
/// `chain()` returns a new `BufStream` value which will first yield all
/// data from `self` then all data from `other`.
///
/// In other words, it links two buf streams together, in a chain.
fn chain<T>(self, other: T) -> Chain<Self, T>
where
Self: Sized,
T: BufStream<Error = Self::Error>,
{
Chain::new(self, other)
}
/// Consumes all data from `self`, storing it in byte storage of type `T`.
///
/// `collect()` returns a future that buffers all data yielded from `self`
/// into storage of type of `T`. The future completes once `self` yield
/// `None`, returning the buffered data.
///
/// The collect future will yield an error if `self` yields an error or if
/// the collect operation errors. The collect error cases are dependent on
/// the target storage type.
fn collect<T>(self) -> Collect<Self, T>
where
Self: Sized,
T: FromBufStream<Self::Item>,
{
Collect::new(self)
}
/// Limit the number of bytes that the stream can yield.
///
/// `limit()` returns a new `BufStream` value which yields all the data from
/// `self` while ensuring that at most `amount` bytes are yielded.
///
/// If `self` can yield greater than `amount` bytes, the returned stream
/// will yield an error.
fn limit(self, amount: u64) -> Limit<Self>
where
Self: Sized,
{
Limit::new(self, amount)
}
/// Creates a `Stream` from a `BufStream`.
///
/// This produces a `Stream` of `BufStream::Items`.
fn into_stream(self) -> IntoStream<Self>
where
Self: Sized,
{
IntoStream::new(self)
}
}
| 28.488636 | 80 | 0.627842 |
acfa0f733ab7c14af021fb1de1223a72af0f7372
| 4,045 |
use egui_backend::{
egui,
fltk::{enums::*, prelude::*, *},
gl, DpiScaling,
};
use fltk_egui as egui_backend;
use std::rc::Rc;
use std::{cell::RefCell, time::Instant};
const SCREEN_WIDTH: u32 = 800;
const SCREEN_HEIGHT: u32 = 600;
fn main() {
let a = app::App::default().with_scheme(app::Scheme::Gtk);
app::get_system_colors();
app::set_font_size(20);
let mut main_win = window::Window::new(100, 100, SCREEN_WIDTH as _, SCREEN_HEIGHT as _, None);
let mut glut_win = window::GlWindow::new(5, 5, main_win.w() - 200, main_win.h() - 10, None);
glut_win.set_mode(Mode::Opengl3);
glut_win.end();
let mut col = group::Flex::default()
.column()
.with_size(185, 590)
.right_of(&glut_win, 5);
col.set_frame(FrameType::DownBox);
let mut frm = frame::Frame::default();
frm.set_color(Color::Red.inactive());
frm.set_frame(FrameType::FlatBox);
let mut slider = valuator::Slider::default().with_type(valuator::SliderType::HorizontalFill);
slider.set_slider_frame(FrameType::RFlatBox);
slider.set_slider_size(0.20);
slider.set_color(Color::Blue.inactive());
slider.set_selection_color(Color::Red);
col.set_size(&mut slider, 20);
col.end();
main_win.end();
main_win.make_resizable(true);
main_win.show();
glut_win.make_current();
let (painter, egui_input_state) =
egui_backend::with_fltk(&mut glut_win, DpiScaling::Custom(1.5));
let mut egui_ctx = egui::CtxRef::default();
let state_rc = Rc::from(RefCell::from(egui_input_state));
let painter_rc = Rc::from(RefCell::from(painter));
let state = state_rc.clone();
let painter = painter_rc.clone();
main_win.handle({
let mut w = glut_win.clone();
move |_, ev| match ev {
enums::Event::Push
| enums::Event::Released
| enums::Event::KeyDown
| enums::Event::KeyUp
| enums::Event::MouseWheel
| enums::Event::Resize
| enums::Event::Move
| enums::Event::Drag => {
let mut state = state.borrow_mut();
state.fuse_input(&mut w, ev, &mut painter.borrow_mut());
true
}
_ => false,
}
});
let start_time = Instant::now();
let mut name = String::new();
let mut age = 0;
let mut quit = false;
while a.wait() {
let mut state = state_rc.borrow_mut();
let mut painter = painter_rc.borrow_mut();
state.input.time = Some(start_time.elapsed().as_secs_f64());
egui_ctx.begin_frame(state.input.take());
frm.set_label(&format!("Hello {}", &name));
slider.set_value(age as f64 / 120.);
unsafe {
// Clear the screen to black
gl::ClearColor(0.6, 0.3, 0.3, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
}
egui::CentralPanel::default().show(&egui_ctx, |ui| {
ui.heading("My egui Application");
ui.horizontal(|ui| {
ui.label("Your name: ");
ui.text_edit_singleline(&mut name);
});
ui.add(egui::Slider::new(&mut age, 0..=120).text("age"));
if ui.button("Click each year").clicked() {
age += 1;
}
ui.label(format!("Hello '{}', age {}", name, age));
ui.separator();
if ui
.button("Quit?")
.on_hover_cursor(egui::CursorIcon::PointingHand)
.clicked()
{
quit = true;
}
});
let (egui_output, paint_cmds) = egui_ctx.end_frame();
state.fuse_output(&mut glut_win, &egui_output);
let paint_jobs = egui_ctx.tessellate(paint_cmds);
//Draw egui texture
painter.paint_jobs(None, paint_jobs, &egui_ctx.texture());
glut_win.swap_buffers();
glut_win.flush();
app::sleep(0.006);
app::awake();
if quit {
break;
}
}
}
| 32.36 | 98 | 0.557726 |
abac196ef191093ab6a7b226db00ad24cb4cc144
| 30,765 |
#[doc = "Reader of register DMA5CTL"]
pub type R = crate::R<u16, super::DMA5CTL>;
#[doc = "Writer for register DMA5CTL"]
pub type W = crate::W<u16, super::DMA5CTL>;
#[doc = "Register DMA5CTL `reset()`'s with value 0"]
impl crate::ResetValue for super::DMA5CTL {
type Type = u16;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "0:0\\]
DMA request\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMAREQ_A {
#[doc = "0: No DMA start"]
DMAREQ_0 = 0,
#[doc = "1: Start DMA"]
DMAREQ_1 = 1,
}
impl From<DMAREQ_A> for bool {
#[inline(always)]
fn from(variant: DMAREQ_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMAREQ`"]
pub type DMAREQ_R = crate::R<bool, DMAREQ_A>;
impl DMAREQ_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMAREQ_A {
match self.bits {
false => DMAREQ_A::DMAREQ_0,
true => DMAREQ_A::DMAREQ_1,
}
}
#[doc = "Checks if the value of the field is `DMAREQ_0`"]
#[inline(always)]
pub fn is_dmareq_0(&self) -> bool {
*self == DMAREQ_A::DMAREQ_0
}
#[doc = "Checks if the value of the field is `DMAREQ_1`"]
#[inline(always)]
pub fn is_dmareq_1(&self) -> bool {
*self == DMAREQ_A::DMAREQ_1
}
}
#[doc = "Write proxy for field `DMAREQ`"]
pub struct DMAREQ_W<'a> {
w: &'a mut W,
}
impl<'a> DMAREQ_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMAREQ_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No DMA start"]
#[inline(always)]
pub fn dmareq_0(self) -> &'a mut W {
self.variant(DMAREQ_A::DMAREQ_0)
}
#[doc = "Start DMA"]
#[inline(always)]
pub fn dmareq_1(self) -> &'a mut W {
self.variant(DMAREQ_A::DMAREQ_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u16) & 0x01);
self.w
}
}
#[doc = "1:1\\]
DMA abort\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMAABORT_A {
#[doc = "0: DMA transfer not interrupted"]
DMAABORT_0 = 0,
#[doc = "1: DMA transfer interrupted by NMI"]
DMAABORT_1 = 1,
}
impl From<DMAABORT_A> for bool {
#[inline(always)]
fn from(variant: DMAABORT_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMAABORT`"]
pub type DMAABORT_R = crate::R<bool, DMAABORT_A>;
impl DMAABORT_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMAABORT_A {
match self.bits {
false => DMAABORT_A::DMAABORT_0,
true => DMAABORT_A::DMAABORT_1,
}
}
#[doc = "Checks if the value of the field is `DMAABORT_0`"]
#[inline(always)]
pub fn is_dmaabort_0(&self) -> bool {
*self == DMAABORT_A::DMAABORT_0
}
#[doc = "Checks if the value of the field is `DMAABORT_1`"]
#[inline(always)]
pub fn is_dmaabort_1(&self) -> bool {
*self == DMAABORT_A::DMAABORT_1
}
}
#[doc = "Write proxy for field `DMAABORT`"]
pub struct DMAABORT_W<'a> {
w: &'a mut W,
}
impl<'a> DMAABORT_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMAABORT_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "DMA transfer not interrupted"]
#[inline(always)]
pub fn dmaabort_0(self) -> &'a mut W {
self.variant(DMAABORT_A::DMAABORT_0)
}
#[doc = "DMA transfer interrupted by NMI"]
#[inline(always)]
pub fn dmaabort_1(self) -> &'a mut W {
self.variant(DMAABORT_A::DMAABORT_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u16) & 0x01) << 1);
self.w
}
}
#[doc = "2:2\\]
DMA interrupt enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMAIE_A {
#[doc = "0: Disabled"]
DISABLE = 0,
#[doc = "1: Enabled"]
ENABLE = 1,
}
impl From<DMAIE_A> for bool {
#[inline(always)]
fn from(variant: DMAIE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMAIE`"]
pub type DMAIE_R = crate::R<bool, DMAIE_A>;
impl DMAIE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMAIE_A {
match self.bits {
false => DMAIE_A::DISABLE,
true => DMAIE_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool {
*self == DMAIE_A::DISABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
*self == DMAIE_A::ENABLE
}
}
#[doc = "Write proxy for field `DMAIE`"]
pub struct DMAIE_W<'a> {
w: &'a mut W,
}
impl<'a> DMAIE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMAIE_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disabled"]
#[inline(always)]
pub fn disable(self) -> &'a mut W {
self.variant(DMAIE_A::DISABLE)
}
#[doc = "Enabled"]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(DMAIE_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u16) & 0x01) << 2);
self.w
}
}
#[doc = "3:3\\]
DMA interrupt flag\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMAIFG_A {
#[doc = "0: No interrupt pending"]
DMAIFG_0 = 0,
#[doc = "1: Interrupt pending"]
DMAIFG_1 = 1,
}
impl From<DMAIFG_A> for bool {
#[inline(always)]
fn from(variant: DMAIFG_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMAIFG`"]
pub type DMAIFG_R = crate::R<bool, DMAIFG_A>;
impl DMAIFG_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMAIFG_A {
match self.bits {
false => DMAIFG_A::DMAIFG_0,
true => DMAIFG_A::DMAIFG_1,
}
}
#[doc = "Checks if the value of the field is `DMAIFG_0`"]
#[inline(always)]
pub fn is_dmaifg_0(&self) -> bool {
*self == DMAIFG_A::DMAIFG_0
}
#[doc = "Checks if the value of the field is `DMAIFG_1`"]
#[inline(always)]
pub fn is_dmaifg_1(&self) -> bool {
*self == DMAIFG_A::DMAIFG_1
}
}
#[doc = "Write proxy for field `DMAIFG`"]
pub struct DMAIFG_W<'a> {
w: &'a mut W,
}
impl<'a> DMAIFG_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMAIFG_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "No interrupt pending"]
#[inline(always)]
pub fn dmaifg_0(self) -> &'a mut W {
self.variant(DMAIFG_A::DMAIFG_0)
}
#[doc = "Interrupt pending"]
#[inline(always)]
pub fn dmaifg_1(self) -> &'a mut W {
self.variant(DMAIFG_A::DMAIFG_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u16) & 0x01) << 3);
self.w
}
}
#[doc = "4:4\\]
DMA enable\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMAEN_A {
#[doc = "0: Disabled"]
DISABLE = 0,
#[doc = "1: Enabled"]
ENABLE = 1,
}
impl From<DMAEN_A> for bool {
#[inline(always)]
fn from(variant: DMAEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMAEN`"]
pub type DMAEN_R = crate::R<bool, DMAEN_A>;
impl DMAEN_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMAEN_A {
match self.bits {
false => DMAEN_A::DISABLE,
true => DMAEN_A::ENABLE,
}
}
#[doc = "Checks if the value of the field is `DISABLE`"]
#[inline(always)]
pub fn is_disable(&self) -> bool {
*self == DMAEN_A::DISABLE
}
#[doc = "Checks if the value of the field is `ENABLE`"]
#[inline(always)]
pub fn is_enable(&self) -> bool {
*self == DMAEN_A::ENABLE
}
}
#[doc = "Write proxy for field `DMAEN`"]
pub struct DMAEN_W<'a> {
w: &'a mut W,
}
impl<'a> DMAEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMAEN_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Disabled"]
#[inline(always)]
pub fn disable(self) -> &'a mut W {
self.variant(DMAEN_A::DISABLE)
}
#[doc = "Enabled"]
#[inline(always)]
pub fn enable(self) -> &'a mut W {
self.variant(DMAEN_A::ENABLE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u16) & 0x01) << 4);
self.w
}
}
#[doc = "5:5\\]
DMA level\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMALEVEL_A {
#[doc = "0: Edge sensitive (rising edge)"]
EDGE = 0,
#[doc = "1: Level sensitive (high level)"]
LEVEL = 1,
}
impl From<DMALEVEL_A> for bool {
#[inline(always)]
fn from(variant: DMALEVEL_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMALEVEL`"]
pub type DMALEVEL_R = crate::R<bool, DMALEVEL_A>;
impl DMALEVEL_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMALEVEL_A {
match self.bits {
false => DMALEVEL_A::EDGE,
true => DMALEVEL_A::LEVEL,
}
}
#[doc = "Checks if the value of the field is `EDGE`"]
#[inline(always)]
pub fn is_edge(&self) -> bool {
*self == DMALEVEL_A::EDGE
}
#[doc = "Checks if the value of the field is `LEVEL`"]
#[inline(always)]
pub fn is_level(&self) -> bool {
*self == DMALEVEL_A::LEVEL
}
}
#[doc = "Write proxy for field `DMALEVEL`"]
pub struct DMALEVEL_W<'a> {
w: &'a mut W,
}
impl<'a> DMALEVEL_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMALEVEL_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Edge sensitive (rising edge)"]
#[inline(always)]
pub fn edge(self) -> &'a mut W {
self.variant(DMALEVEL_A::EDGE)
}
#[doc = "Level sensitive (high level)"]
#[inline(always)]
pub fn level(self) -> &'a mut W {
self.variant(DMALEVEL_A::LEVEL)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u16) & 0x01) << 5);
self.w
}
}
#[doc = "6:6\\]
DMA source byte\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMASRCBYTE_A {
#[doc = "0: Word"]
WORD = 0,
#[doc = "1: Byte"]
BYTE = 1,
}
impl From<DMASRCBYTE_A> for bool {
#[inline(always)]
fn from(variant: DMASRCBYTE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMASRCBYTE`"]
pub type DMASRCBYTE_R = crate::R<bool, DMASRCBYTE_A>;
impl DMASRCBYTE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMASRCBYTE_A {
match self.bits {
false => DMASRCBYTE_A::WORD,
true => DMASRCBYTE_A::BYTE,
}
}
#[doc = "Checks if the value of the field is `WORD`"]
#[inline(always)]
pub fn is_word(&self) -> bool {
*self == DMASRCBYTE_A::WORD
}
#[doc = "Checks if the value of the field is `BYTE`"]
#[inline(always)]
pub fn is_byte(&self) -> bool {
*self == DMASRCBYTE_A::BYTE
}
}
#[doc = "Write proxy for field `DMASRCBYTE`"]
pub struct DMASRCBYTE_W<'a> {
w: &'a mut W,
}
impl<'a> DMASRCBYTE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMASRCBYTE_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Word"]
#[inline(always)]
pub fn word(self) -> &'a mut W {
self.variant(DMASRCBYTE_A::WORD)
}
#[doc = "Byte"]
#[inline(always)]
pub fn byte(self) -> &'a mut W {
self.variant(DMASRCBYTE_A::BYTE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u16) & 0x01) << 6);
self.w
}
}
#[doc = "7:7\\]
DMA destination byte\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum DMADSTBYTE_A {
#[doc = "0: Word"]
WORD = 0,
#[doc = "1: Byte"]
BYTE = 1,
}
impl From<DMADSTBYTE_A> for bool {
#[inline(always)]
fn from(variant: DMADSTBYTE_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Reader of field `DMADSTBYTE`"]
pub type DMADSTBYTE_R = crate::R<bool, DMADSTBYTE_A>;
impl DMADSTBYTE_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMADSTBYTE_A {
match self.bits {
false => DMADSTBYTE_A::WORD,
true => DMADSTBYTE_A::BYTE,
}
}
#[doc = "Checks if the value of the field is `WORD`"]
#[inline(always)]
pub fn is_word(&self) -> bool {
*self == DMADSTBYTE_A::WORD
}
#[doc = "Checks if the value of the field is `BYTE`"]
#[inline(always)]
pub fn is_byte(&self) -> bool {
*self == DMADSTBYTE_A::BYTE
}
}
#[doc = "Write proxy for field `DMADSTBYTE`"]
pub struct DMADSTBYTE_W<'a> {
w: &'a mut W,
}
impl<'a> DMADSTBYTE_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMADSTBYTE_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "Word"]
#[inline(always)]
pub fn word(self) -> &'a mut W {
self.variant(DMADSTBYTE_A::WORD)
}
#[doc = "Byte"]
#[inline(always)]
pub fn byte(self) -> &'a mut W {
self.variant(DMADSTBYTE_A::BYTE)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u16) & 0x01) << 7);
self.w
}
}
#[doc = "9:8\\]
DMA source increment\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum DMASRCINCR_A {
#[doc = "0: Source address is unchanged"]
DMASRCINCR_0 = 0,
#[doc = "1: Source address is unchanged"]
DMASRCINCR_1 = 1,
#[doc = "2: Source address is decremented"]
DMASRCINCR_2 = 2,
#[doc = "3: Source address is incremented"]
DMASRCINCR_3 = 3,
}
impl From<DMASRCINCR_A> for u8 {
#[inline(always)]
fn from(variant: DMASRCINCR_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `DMASRCINCR`"]
pub type DMASRCINCR_R = crate::R<u8, DMASRCINCR_A>;
impl DMASRCINCR_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMASRCINCR_A {
match self.bits {
0 => DMASRCINCR_A::DMASRCINCR_0,
1 => DMASRCINCR_A::DMASRCINCR_1,
2 => DMASRCINCR_A::DMASRCINCR_2,
3 => DMASRCINCR_A::DMASRCINCR_3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `DMASRCINCR_0`"]
#[inline(always)]
pub fn is_dmasrcincr_0(&self) -> bool {
*self == DMASRCINCR_A::DMASRCINCR_0
}
#[doc = "Checks if the value of the field is `DMASRCINCR_1`"]
#[inline(always)]
pub fn is_dmasrcincr_1(&self) -> bool {
*self == DMASRCINCR_A::DMASRCINCR_1
}
#[doc = "Checks if the value of the field is `DMASRCINCR_2`"]
#[inline(always)]
pub fn is_dmasrcincr_2(&self) -> bool {
*self == DMASRCINCR_A::DMASRCINCR_2
}
#[doc = "Checks if the value of the field is `DMASRCINCR_3`"]
#[inline(always)]
pub fn is_dmasrcincr_3(&self) -> bool {
*self == DMASRCINCR_A::DMASRCINCR_3
}
}
#[doc = "Write proxy for field `DMASRCINCR`"]
pub struct DMASRCINCR_W<'a> {
w: &'a mut W,
}
impl<'a> DMASRCINCR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMASRCINCR_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Source address is unchanged"]
#[inline(always)]
pub fn dmasrcincr_0(self) -> &'a mut W {
self.variant(DMASRCINCR_A::DMASRCINCR_0)
}
#[doc = "Source address is unchanged"]
#[inline(always)]
pub fn dmasrcincr_1(self) -> &'a mut W {
self.variant(DMASRCINCR_A::DMASRCINCR_1)
}
#[doc = "Source address is decremented"]
#[inline(always)]
pub fn dmasrcincr_2(self) -> &'a mut W {
self.variant(DMASRCINCR_A::DMASRCINCR_2)
}
#[doc = "Source address is incremented"]
#[inline(always)]
pub fn dmasrcincr_3(self) -> &'a mut W {
self.variant(DMASRCINCR_A::DMASRCINCR_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 8)) | (((value as u16) & 0x03) << 8);
self.w
}
}
#[doc = "11:10\\]
DMA destination increment\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum DMADSTINCR_A {
#[doc = "0: Destination address is unchanged"]
DMADSTINCR_0 = 0,
#[doc = "1: Destination address is unchanged"]
DMADSTINCR_1 = 1,
#[doc = "2: Destination address is decremented"]
DMADSTINCR_2 = 2,
#[doc = "3: Destination address is incremented"]
DMADSTINCR_3 = 3,
}
impl From<DMADSTINCR_A> for u8 {
#[inline(always)]
fn from(variant: DMADSTINCR_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `DMADSTINCR`"]
pub type DMADSTINCR_R = crate::R<u8, DMADSTINCR_A>;
impl DMADSTINCR_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMADSTINCR_A {
match self.bits {
0 => DMADSTINCR_A::DMADSTINCR_0,
1 => DMADSTINCR_A::DMADSTINCR_1,
2 => DMADSTINCR_A::DMADSTINCR_2,
3 => DMADSTINCR_A::DMADSTINCR_3,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `DMADSTINCR_0`"]
#[inline(always)]
pub fn is_dmadstincr_0(&self) -> bool {
*self == DMADSTINCR_A::DMADSTINCR_0
}
#[doc = "Checks if the value of the field is `DMADSTINCR_1`"]
#[inline(always)]
pub fn is_dmadstincr_1(&self) -> bool {
*self == DMADSTINCR_A::DMADSTINCR_1
}
#[doc = "Checks if the value of the field is `DMADSTINCR_2`"]
#[inline(always)]
pub fn is_dmadstincr_2(&self) -> bool {
*self == DMADSTINCR_A::DMADSTINCR_2
}
#[doc = "Checks if the value of the field is `DMADSTINCR_3`"]
#[inline(always)]
pub fn is_dmadstincr_3(&self) -> bool {
*self == DMADSTINCR_A::DMADSTINCR_3
}
}
#[doc = "Write proxy for field `DMADSTINCR`"]
pub struct DMADSTINCR_W<'a> {
w: &'a mut W,
}
impl<'a> DMADSTINCR_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMADSTINCR_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Destination address is unchanged"]
#[inline(always)]
pub fn dmadstincr_0(self) -> &'a mut W {
self.variant(DMADSTINCR_A::DMADSTINCR_0)
}
#[doc = "Destination address is unchanged"]
#[inline(always)]
pub fn dmadstincr_1(self) -> &'a mut W {
self.variant(DMADSTINCR_A::DMADSTINCR_1)
}
#[doc = "Destination address is decremented"]
#[inline(always)]
pub fn dmadstincr_2(self) -> &'a mut W {
self.variant(DMADSTINCR_A::DMADSTINCR_2)
}
#[doc = "Destination address is incremented"]
#[inline(always)]
pub fn dmadstincr_3(self) -> &'a mut W {
self.variant(DMADSTINCR_A::DMADSTINCR_3)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 10)) | (((value as u16) & 0x03) << 10);
self.w
}
}
#[doc = "14:12\\]
DMA transfer mode\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(u8)]
pub enum DMADT_A {
#[doc = "0: Single transfer"]
DMADT_0 = 0,
#[doc = "1: Block transfer"]
DMADT_1 = 1,
#[doc = "2: Burst-block transfer"]
DMADT_2 = 2,
#[doc = "3: Burst-block transfer"]
DMADT_3 = 3,
#[doc = "4: Repeated single transfer"]
DMADT_4 = 4,
#[doc = "5: Repeated block transfer"]
DMADT_5 = 5,
#[doc = "6: Repeated burst-block transfer"]
DMADT_6 = 6,
#[doc = "7: Repeated burst-block transfer"]
DMADT_7 = 7,
}
impl From<DMADT_A> for u8 {
#[inline(always)]
fn from(variant: DMADT_A) -> Self {
variant as _
}
}
#[doc = "Reader of field `DMADT`"]
pub type DMADT_R = crate::R<u8, DMADT_A>;
impl DMADT_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> DMADT_A {
match self.bits {
0 => DMADT_A::DMADT_0,
1 => DMADT_A::DMADT_1,
2 => DMADT_A::DMADT_2,
3 => DMADT_A::DMADT_3,
4 => DMADT_A::DMADT_4,
5 => DMADT_A::DMADT_5,
6 => DMADT_A::DMADT_6,
7 => DMADT_A::DMADT_7,
_ => unreachable!(),
}
}
#[doc = "Checks if the value of the field is `DMADT_0`"]
#[inline(always)]
pub fn is_dmadt_0(&self) -> bool {
*self == DMADT_A::DMADT_0
}
#[doc = "Checks if the value of the field is `DMADT_1`"]
#[inline(always)]
pub fn is_dmadt_1(&self) -> bool {
*self == DMADT_A::DMADT_1
}
#[doc = "Checks if the value of the field is `DMADT_2`"]
#[inline(always)]
pub fn is_dmadt_2(&self) -> bool {
*self == DMADT_A::DMADT_2
}
#[doc = "Checks if the value of the field is `DMADT_3`"]
#[inline(always)]
pub fn is_dmadt_3(&self) -> bool {
*self == DMADT_A::DMADT_3
}
#[doc = "Checks if the value of the field is `DMADT_4`"]
#[inline(always)]
pub fn is_dmadt_4(&self) -> bool {
*self == DMADT_A::DMADT_4
}
#[doc = "Checks if the value of the field is `DMADT_5`"]
#[inline(always)]
pub fn is_dmadt_5(&self) -> bool {
*self == DMADT_A::DMADT_5
}
#[doc = "Checks if the value of the field is `DMADT_6`"]
#[inline(always)]
pub fn is_dmadt_6(&self) -> bool {
*self == DMADT_A::DMADT_6
}
#[doc = "Checks if the value of the field is `DMADT_7`"]
#[inline(always)]
pub fn is_dmadt_7(&self) -> bool {
*self == DMADT_A::DMADT_7
}
}
#[doc = "Write proxy for field `DMADT`"]
pub struct DMADT_W<'a> {
w: &'a mut W,
}
impl<'a> DMADT_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: DMADT_A) -> &'a mut W {
{
self.bits(variant.into())
}
}
#[doc = "Single transfer"]
#[inline(always)]
pub fn dmadt_0(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_0)
}
#[doc = "Block transfer"]
#[inline(always)]
pub fn dmadt_1(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_1)
}
#[doc = "Burst-block transfer"]
#[inline(always)]
pub fn dmadt_2(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_2)
}
#[doc = "Burst-block transfer"]
#[inline(always)]
pub fn dmadt_3(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_3)
}
#[doc = "Repeated single transfer"]
#[inline(always)]
pub fn dmadt_4(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_4)
}
#[doc = "Repeated block transfer"]
#[inline(always)]
pub fn dmadt_5(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_5)
}
#[doc = "Repeated burst-block transfer"]
#[inline(always)]
pub fn dmadt_6(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_6)
}
#[doc = "Repeated burst-block transfer"]
#[inline(always)]
pub fn dmadt_7(self) -> &'a mut W {
self.variant(DMADT_A::DMADT_7)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u16) & 0x07) << 12);
self.w
}
}
impl R {
#[doc = "Bit 0 - 0:0\\]
DMA request"]
#[inline(always)]
pub fn dmareq(&self) -> DMAREQ_R {
DMAREQ_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - 1:1\\]
DMA abort"]
#[inline(always)]
pub fn dmaabort(&self) -> DMAABORT_R {
DMAABORT_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - 2:2\\]
DMA interrupt enable"]
#[inline(always)]
pub fn dmaie(&self) -> DMAIE_R {
DMAIE_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - 3:3\\]
DMA interrupt flag"]
#[inline(always)]
pub fn dmaifg(&self) -> DMAIFG_R {
DMAIFG_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - 4:4\\]
DMA enable"]
#[inline(always)]
pub fn dmaen(&self) -> DMAEN_R {
DMAEN_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - 5:5\\]
DMA level"]
#[inline(always)]
pub fn dmalevel(&self) -> DMALEVEL_R {
DMALEVEL_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - 6:6\\]
DMA source byte"]
#[inline(always)]
pub fn dmasrcbyte(&self) -> DMASRCBYTE_R {
DMASRCBYTE_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - 7:7\\]
DMA destination byte"]
#[inline(always)]
pub fn dmadstbyte(&self) -> DMADSTBYTE_R {
DMADSTBYTE_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bits 8:9 - 9:8\\]
DMA source increment"]
#[inline(always)]
pub fn dmasrcincr(&self) -> DMASRCINCR_R {
DMASRCINCR_R::new(((self.bits >> 8) & 0x03) as u8)
}
#[doc = "Bits 10:11 - 11:10\\]
DMA destination increment"]
#[inline(always)]
pub fn dmadstincr(&self) -> DMADSTINCR_R {
DMADSTINCR_R::new(((self.bits >> 10) & 0x03) as u8)
}
#[doc = "Bits 12:14 - 14:12\\]
DMA transfer mode"]
#[inline(always)]
pub fn dmadt(&self) -> DMADT_R {
DMADT_R::new(((self.bits >> 12) & 0x07) as u8)
}
}
impl W {
#[doc = "Bit 0 - 0:0\\]
DMA request"]
#[inline(always)]
pub fn dmareq(&mut self) -> DMAREQ_W {
DMAREQ_W { w: self }
}
#[doc = "Bit 1 - 1:1\\]
DMA abort"]
#[inline(always)]
pub fn dmaabort(&mut self) -> DMAABORT_W {
DMAABORT_W { w: self }
}
#[doc = "Bit 2 - 2:2\\]
DMA interrupt enable"]
#[inline(always)]
pub fn dmaie(&mut self) -> DMAIE_W {
DMAIE_W { w: self }
}
#[doc = "Bit 3 - 3:3\\]
DMA interrupt flag"]
#[inline(always)]
pub fn dmaifg(&mut self) -> DMAIFG_W {
DMAIFG_W { w: self }
}
#[doc = "Bit 4 - 4:4\\]
DMA enable"]
#[inline(always)]
pub fn dmaen(&mut self) -> DMAEN_W {
DMAEN_W { w: self }
}
#[doc = "Bit 5 - 5:5\\]
DMA level"]
#[inline(always)]
pub fn dmalevel(&mut self) -> DMALEVEL_W {
DMALEVEL_W { w: self }
}
#[doc = "Bit 6 - 6:6\\]
DMA source byte"]
#[inline(always)]
pub fn dmasrcbyte(&mut self) -> DMASRCBYTE_W {
DMASRCBYTE_W { w: self }
}
#[doc = "Bit 7 - 7:7\\]
DMA destination byte"]
#[inline(always)]
pub fn dmadstbyte(&mut self) -> DMADSTBYTE_W {
DMADSTBYTE_W { w: self }
}
#[doc = "Bits 8:9 - 9:8\\]
DMA source increment"]
#[inline(always)]
pub fn dmasrcincr(&mut self) -> DMASRCINCR_W {
DMASRCINCR_W { w: self }
}
#[doc = "Bits 10:11 - 11:10\\]
DMA destination increment"]
#[inline(always)]
pub fn dmadstincr(&mut self) -> DMADSTINCR_W {
DMADSTINCR_W { w: self }
}
#[doc = "Bits 12:14 - 14:12\\]
DMA transfer mode"]
#[inline(always)]
pub fn dmadt(&mut self) -> DMADT_W {
DMADT_W { w: self }
}
}
| 28.1989 | 86 | 0.551276 |
167ca1ce668079cec031595b2c5368d01465c598
| 11,883 |
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use proc_macro::TokenStream;
use proc_macro2::Span;
use proc_macro2::TokenStream as TokenStream2;
use proc_macro_crate::crate_name;
use proc_macro_crate::FoundCrate;
use quote::quote;
use quote::ToTokens;
use syn::punctuated::Punctuated;
use syn::token::Comma;
use syn::FnArg;
use syn::GenericParam;
use syn::Ident;
// Identifier to the `deno_core` crate.
//
// If macro called in deno_core, `crate` is used.
// If macro called outside deno_core, `deno_core` OR the renamed
// version from Cargo.toml is used.
fn core_import() -> TokenStream2 {
let found_crate =
crate_name("deno_core").expect("deno_core not present in `Cargo.toml`");
match found_crate {
FoundCrate::Itself => {
// TODO(@littledivy): This won't work for `deno_core` examples
// since `crate` does not refer to `deno_core`.
// examples must re-export deno_core to make this work
// until Span inspection APIs are stabalized.
//
// https://github.com/rust-lang/rust/issues/54725
quote!(crate)
}
FoundCrate::Name(name) => {
let ident = Ident::new(&name, Span::call_site());
quote!(#ident)
}
}
}
#[derive(Copy, Clone, Debug, Default)]
struct MacroArgs {
is_unstable: bool,
is_v8: bool,
}
impl syn::parse::Parse for MacroArgs {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let vars =
syn::punctuated::Punctuated::<Ident, syn::Token![,]>::parse_terminated(
input,
)?;
let vars: Vec<_> = vars.iter().map(Ident::to_string).collect();
let vars: Vec<_> = vars.iter().map(String::as_str).collect();
for var in vars.iter() {
if !["unstable", "v8"].contains(var) {
return Err(syn::Error::new(
input.span(),
"Ops expect #[op] or #[op(unstable)]",
));
}
}
Ok(Self {
is_unstable: vars.contains(&"unstable"),
is_v8: vars.contains(&"v8"),
})
}
}
#[proc_macro_attribute]
pub fn op(attr: TokenStream, item: TokenStream) -> TokenStream {
let margs = syn::parse_macro_input!(attr as MacroArgs);
let MacroArgs { is_unstable, is_v8 } = margs;
let func = syn::parse::<syn::ItemFn>(item).expect("expected a function");
let name = &func.sig.ident;
let generics = &func.sig.generics;
let type_params = exclude_lifetime_params(&func.sig.generics.params);
let where_clause = &func.sig.generics.where_clause;
// Preserve the original func as op_foo::call()
let original_func = {
let mut func = func.clone();
func.sig.ident = quote::format_ident!("call");
func
};
let core = core_import();
let asyncness = func.sig.asyncness.is_some();
let is_async = asyncness || is_future(&func.sig.output);
let v8_body = if is_async {
codegen_v8_async(&core, &func, margs, asyncness)
} else {
codegen_v8_sync(&core, &func, margs)
};
let docline = format!("Use `{name}::decl()` to get an op-declaration");
// Generate wrapper
quote! {
#[allow(non_camel_case_types)]
#[doc="Auto-generated by `deno_ops`, i.e: `#[op]`"]
#[doc=""]
#[doc=#docline]
#[doc="you can include in a `deno_core::Extension`."]
pub struct #name;
#[doc(hidden)]
impl #name {
pub fn name() -> &'static str {
stringify!(#name)
}
pub fn v8_fn_ptr #generics () -> #core::v8::FunctionCallback #where_clause {
use #core::v8::MapFnTo;
Self::v8_func::<#type_params>.map_fn_to()
}
pub fn decl #generics () -> #core::OpDecl #where_clause {
#core::OpDecl {
name: Self::name(),
v8_fn_ptr: Self::v8_fn_ptr::<#type_params>(),
enabled: true,
is_async: #is_async,
is_unstable: #is_unstable,
is_v8: #is_v8,
}
}
#[inline]
#[allow(clippy::too_many_arguments)]
#original_func
pub fn v8_func #generics (
scope: &mut #core::v8::HandleScope,
args: #core::v8::FunctionCallbackArguments,
mut rv: #core::v8::ReturnValue,
) #where_clause {
#v8_body
}
}
}.into()
}
/// Generate the body of a v8 func for an async op
fn codegen_v8_async(
core: &TokenStream2,
f: &syn::ItemFn,
_margs: MacroArgs,
asyncness: bool,
) -> TokenStream2 {
let arg0 = f.sig.inputs.first();
let uses_opstate = arg0.map(is_rc_refcell_opstate).unwrap_or_default();
let args_head = if uses_opstate {
quote! { state, }
} else {
quote! {}
};
let rust_i0 = if uses_opstate { 1 } else { 0 };
let (arg_decls, args_tail) = codegen_args(core, f, rust_i0, 1);
let type_params = exclude_lifetime_params(&f.sig.generics.params);
let (pre_result, result_fut) = match asyncness {
true => (
quote! {},
quote! { Self::call::<#type_params>(#args_head #args_tail) },
),
false => (
quote! { let result_fut = Self::call::<#type_params>(#args_head #args_tail); },
quote! { result_fut },
),
};
let result_wrapper = match is_result(&f.sig.output) {
true => quote! {},
false => quote! { let result = Ok(result); },
};
quote! {
use #core::futures::FutureExt;
// SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime
let ctx = unsafe {
&*(#core::v8::Local::<#core::v8::External>::cast(args.data().unwrap_unchecked()).value()
as *const #core::_ops::OpCtx)
};
let op_id = ctx.id;
let promise_id = args.get(0);
let promise_id = #core::v8::Local::<#core::v8::Integer>::try_from(promise_id)
.map(|l| l.value() as #core::PromiseId)
.map_err(#core::anyhow::Error::from);
// Fail if promise id invalid (not an int)
let promise_id: #core::PromiseId = match promise_id {
Ok(promise_id) => promise_id,
Err(err) => {
#core::_ops::throw_type_error(scope, format!("invalid promise id: {}", err));
return;
}
};
#arg_decls
let state = ctx.state.clone();
// Track async call & get copy of get_error_class_fn
let get_class = {
let state = state.borrow();
state.tracker.track_async(op_id);
state.get_error_class_fn
};
#pre_result
#core::_ops::queue_async_op(scope, async move {
let result = #result_fut.await;
#result_wrapper
(promise_id, op_id, #core::_ops::to_op_result(get_class, result))
});
}
}
/// Generate the body of a v8 func for a sync op
fn codegen_v8_sync(
core: &TokenStream2,
f: &syn::ItemFn,
margs: MacroArgs,
) -> TokenStream2 {
let MacroArgs { is_v8, .. } = margs;
let scope_arg = |arg: &FnArg| {
if is_handle_scope(arg) {
Some(quote! { scope, })
} else {
None
}
};
let opstate_arg = |arg: &FnArg| match arg {
arg if is_rc_refcell_opstate(arg) => Some(quote! { ctx.state.clone(), }),
arg if is_mut_ref_opstate(arg) => {
Some(quote! { &mut ctx.state.borrow_mut(), })
}
_ => None,
};
let special_args = f
.sig
.inputs
.iter()
.map_while(|a| {
(if is_v8 { scope_arg(a) } else { None }).or_else(|| opstate_arg(a))
})
.collect::<Vec<_>>();
let rust_i0 = special_args.len();
let args_head = special_args.into_iter().collect::<TokenStream2>();
let (arg_decls, args_tail) = codegen_args(core, f, rust_i0, 0);
let ret = codegen_sync_ret(core, &f.sig.output);
let type_params = exclude_lifetime_params(&f.sig.generics.params);
quote! {
// SAFETY: #core guarantees args.data() is a v8 External pointing to an OpCtx for the isolates lifetime
let ctx = unsafe {
&*(#core::v8::Local::<#core::v8::External>::cast(args.data().unwrap_unchecked()).value()
as *const #core::_ops::OpCtx)
};
#arg_decls
let result = Self::call::<#type_params>(#args_head #args_tail);
let op_state = &mut ctx.state.borrow();
op_state.tracker.track_sync(ctx.id);
#ret
}
}
fn codegen_args(
core: &TokenStream2,
f: &syn::ItemFn,
rust_i0: usize, // Index of first generic arg in rust
v8_i0: usize, // Index of first generic arg in v8/js
) -> (TokenStream2, TokenStream2) {
let inputs = &f.sig.inputs.iter().skip(rust_i0).enumerate();
let ident_seq: TokenStream2 = inputs
.clone()
.map(|(i, _)| format!("arg_{i}"))
.collect::<Vec<_>>()
.join(", ")
.parse()
.unwrap();
let decls: TokenStream2 = inputs
.clone()
.map(|(i, arg)| {
codegen_arg(core, arg, format!("arg_{i}").as_ref(), v8_i0 + i)
})
.collect();
(decls, ident_seq)
}
fn codegen_arg(
core: &TokenStream2,
arg: &syn::FnArg,
name: &str,
idx: usize,
) -> TokenStream2 {
let ident = quote::format_ident!("{name}");
let pat = match arg {
syn::FnArg::Typed(pat) => &pat.pat,
_ => unreachable!(),
};
// Fast path if arg should be skipped
if matches!(**pat, syn::Pat::Wild(_)) {
return quote! { let #ident = (); };
}
// Otherwise deserialize it via serde_v8
quote! {
let #ident = args.get(#idx as i32);
let #ident = match #core::serde_v8::from_v8(scope, #ident) {
Ok(v) => v,
Err(err) => {
let msg = format!("Error parsing args at position {}: {}", #idx, #core::anyhow::Error::from(err));
return #core::_ops::throw_type_error(scope, msg);
}
};
}
}
fn codegen_sync_ret(
core: &TokenStream2,
output: &syn::ReturnType,
) -> TokenStream2 {
if is_void(output) {
return quote! {};
}
// Optimize Result<(), Err> to skip serde_v8 when Ok(...)
let ok_block = if is_unit_result(output) {
quote! {}
} else {
quote! {
match #core::serde_v8::to_v8(scope, result) {
Ok(ret) => rv.set(ret),
Err(err) => #core::_ops::throw_type_error(
scope,
format!("Error serializing return: {}", #core::anyhow::Error::from(err)),
),
};
}
};
if !is_result(output) {
return ok_block;
}
quote! {
match result {
Ok(result) => {
#ok_block
},
Err(err) => {
let err = #core::OpError::new(op_state.get_error_class_fn, err);
rv.set(#core::serde_v8::to_v8(scope, err).unwrap());
},
};
}
}
fn is_void(ty: impl ToTokens) -> bool {
tokens(ty).is_empty()
}
fn is_result(ty: impl ToTokens) -> bool {
let tokens = tokens(ty);
if tokens.trim_start_matches("-> ").starts_with("Result <") {
return true;
}
// Detect `io::Result<...>`, `anyhow::Result<...>`, etc...
// i.e: Result aliases/shorthands which are unfortunately "opaque" at macro-time
match tokens.find(":: Result <") {
Some(idx) => !tokens.split_at(idx).0.contains('<'),
None => false,
}
}
/// Detects if a type is of the form Result<(), Err>
fn is_unit_result(ty: impl ToTokens) -> bool {
is_result(&ty) && tokens(&ty).contains("Result < ()")
}
fn is_mut_ref_opstate(arg: &syn::FnArg) -> bool {
tokens(arg).ends_with(": & mut OpState")
|| tokens(arg).ends_with(": & mut deno_core :: OpState")
}
fn is_rc_refcell_opstate(arg: &syn::FnArg) -> bool {
tokens(arg).ends_with(": Rc < RefCell < OpState > >")
|| tokens(arg).ends_with(": Rc < RefCell < deno_core :: OpState > >")
}
fn is_handle_scope(arg: &syn::FnArg) -> bool {
tokens(arg).ends_with(": & mut v8 :: HandleScope")
|| tokens(arg).ends_with(": & mut v8 :: HandleScope < 'a >")
|| tokens(arg).ends_with(": & mut deno_core :: v8 :: HandleScope")
|| tokens(arg).ends_with(": & mut deno_core :: v8 :: HandleScope < 'a >")
}
fn is_future(ty: impl ToTokens) -> bool {
tokens(&ty).contains("impl Future < Output =")
}
fn tokens(x: impl ToTokens) -> String {
x.to_token_stream().to_string()
}
fn exclude_lifetime_params(
generic_params: &Punctuated<GenericParam, Comma>,
) -> Punctuated<GenericParam, Comma> {
generic_params
.iter()
.filter(|t| !tokens(t).starts_with('\''))
.cloned()
.collect::<Punctuated<GenericParam, Comma>>()
}
| 28.292857 | 107 | 0.609105 |
7639700eac9aa157751ea2e1ec9bac7ecb2a3f60
| 14,367 |
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations on tuples
//!
//! To access a single element of a tuple one can use the following
//! methods:
//!
//! * `valN` - returns a value of _N_-th element
//! * `refN` - returns a reference to _N_-th element
//! * `mutN` - returns a mutable reference to _N_-th element
//!
//! Indexing starts from zero, so `val0` returns first value, `val1`
//! returns second value, and so on. In general, a tuple with _S_
//! elements provides aforementioned methods suffixed with numbers
//! from `0` to `S-1`. Traits which contain these methods are
//! implemented for tuples with up to 12 elements.
//!
//! If every type inside a tuple implements one of the following
//! traits, then a tuple itself also implements it.
//!
//! * `Clone`
//! * `PartialEq`
//! * `TotalEq`
//! * `PartialOrd`
//! * `TotalOrd`
//! * `Default`
//!
//! # Examples
//!
//! Using methods:
//!
//! ```
//! let pair = ("pi", 3.14);
//! assert_eq!(pair.val0(), "pi");
//! assert_eq!(pair.val1(), 3.14);
//! ```
//!
//! Using traits implemented for tuples:
//!
//! ```
//! use std::default::Default;
//!
//! let a = (1, 2);
//! let b = (3, 4);
//! assert!(a != b);
//!
//! let c = b.clone();
//! assert!(b == c);
//!
//! let d : (u32, f32) = Default::default();
//! assert_eq!(d, (0u32, 0.0f32));
//! ```
use clone::Clone;
#[cfg(not(test))] use cmp::*;
#[cfg(not(test))] use default::Default;
// macro for implementing n-ary tuple functions and operations
macro_rules! tuple_impls {
($(
$Tuple:ident {
$(($valN:ident, $refN:ident, $mutN:ident) -> $T:ident {
($($x:ident),+) => $ret:expr
})+
}
)+) => {
$(
#[allow(missing_doc)]
pub trait $Tuple<$($T),+> {
$(fn $valN(self) -> $T;)+
$(fn $refN<'a>(&'a self) -> &'a $T;)+
$(fn $mutN<'a>(&'a mut self) -> &'a mut $T;)+
}
impl<$($T),+> $Tuple<$($T),+> for ($($T,)+) {
$(
#[inline]
#[allow(unused_variable)]
fn $valN(self) -> $T {
let ($($x,)+) = self; $ret
}
#[inline]
#[allow(unused_variable)]
fn $refN<'a>(&'a self) -> &'a $T {
let ($(ref $x,)+) = *self; $ret
}
#[inline]
#[allow(unused_variable)]
fn $mutN<'a>(&'a mut self) -> &'a mut $T {
let ($(ref mut $x,)+) = *self; $ret
}
)+
}
impl<$($T:Clone),+> Clone for ($($T,)+) {
fn clone(&self) -> ($($T,)+) {
($(self.$refN().clone(),)+)
}
}
#[cfg(not(test))]
impl<$($T:PartialEq),+> PartialEq for ($($T,)+) {
#[inline]
fn eq(&self, other: &($($T,)+)) -> bool {
$(*self.$refN() == *other.$refN())&&+
}
#[inline]
fn ne(&self, other: &($($T,)+)) -> bool {
$(*self.$refN() != *other.$refN())||+
}
}
#[cfg(not(test))]
impl<$($T:TotalEq),+> TotalEq for ($($T,)+) {}
#[cfg(not(test))]
impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+) {
#[inline]
fn lt(&self, other: &($($T,)+)) -> bool {
lexical_ord!(lt, $(self.$refN(), other.$refN()),+)
}
#[inline]
fn le(&self, other: &($($T,)+)) -> bool {
lexical_ord!(le, $(self.$refN(), other.$refN()),+)
}
#[inline]
fn ge(&self, other: &($($T,)+)) -> bool {
lexical_ord!(ge, $(self.$refN(), other.$refN()),+)
}
#[inline]
fn gt(&self, other: &($($T,)+)) -> bool {
lexical_ord!(gt, $(self.$refN(), other.$refN()),+)
}
}
#[cfg(not(test))]
impl<$($T:TotalOrd),+> TotalOrd for ($($T,)+) {
#[inline]
fn cmp(&self, other: &($($T,)+)) -> Ordering {
lexical_cmp!($(self.$refN(), other.$refN()),+)
}
}
#[cfg(not(test))]
impl<$($T:Default),+> Default for ($($T,)+) {
#[inline]
fn default() -> ($($T,)+) {
($({ let x: $T = Default::default(); x},)+)
}
}
)+
}
}
// Constructs an expression that performs a lexical ordering using method $rel.
// The values are interleaved, so the macro invocation for
// `(a1, a2, a3) < (b1, b2, b3)` would be `lexical_ord!(lt, a1, b1, a2, b2,
// a3, b3)` (and similarly for `lexical_cmp`)
macro_rules! lexical_ord {
($rel: ident, $a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
if *$a != *$b { lexical_ord!($rel, $a, $b) }
else { lexical_ord!($rel, $($rest_a, $rest_b),+) }
};
($rel: ident, $a:expr, $b:expr) => { (*$a) . $rel ($b) };
}
macro_rules! lexical_cmp {
($a:expr, $b:expr, $($rest_a:expr, $rest_b:expr),+) => {
match ($a).cmp($b) {
Equal => lexical_cmp!($($rest_a, $rest_b),+),
ordering => ordering
}
};
($a:expr, $b:expr) => { ($a).cmp($b) };
}
tuple_impls! {
Tuple1 {
(val0, ref0, mut0) -> A { (a) => a }
}
Tuple2 {
(val0, ref0, mut0) -> A { (a, b) => a }
(val1, ref1, mut1) -> B { (a, b) => b }
}
Tuple3 {
(val0, ref0, mut0) -> A { (a, b, c) => a }
(val1, ref1, mut1) -> B { (a, b, c) => b }
(val2, ref2, mut2) -> C { (a, b, c) => c }
}
Tuple4 {
(val0, ref0, mut0) -> A { (a, b, c, d) => a }
(val1, ref1, mut1) -> B { (a, b, c, d) => b }
(val2, ref2, mut2) -> C { (a, b, c, d) => c }
(val3, ref3, mut3) -> D { (a, b, c, d) => d }
}
Tuple5 {
(val0, ref0, mut0) -> A { (a, b, c, d, e) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e) => e }
}
Tuple6 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f) => f }
}
Tuple7 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g) => f }
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g) => g }
}
Tuple8 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h) => f }
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h) => g }
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h) => h }
}
Tuple9 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i) => f }
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i) => g }
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i) => h }
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i) => i }
}
Tuple10 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i, j) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i, j) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i, j) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i, j) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i, j) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i, j) => f }
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i, j) => g }
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i, j) => h }
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i, j) => i }
(val9, ref9, mut9) -> J { (a, b, c, d, e, f, g, h, i, j) => j }
}
Tuple11 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i, j, k) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i, j, k) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i, j, k) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i, j, k) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i, j, k) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i, j, k) => f }
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i, j, k) => g }
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i, j, k) => h }
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i, j, k) => i }
(val9, ref9, mut9) -> J { (a, b, c, d, e, f, g, h, i, j, k) => j }
(val10, ref10, mut10) -> K { (a, b, c, d, e, f, g, h, i, j, k) => k }
}
Tuple12 {
(val0, ref0, mut0) -> A { (a, b, c, d, e, f, g, h, i, j, k, l) => a }
(val1, ref1, mut1) -> B { (a, b, c, d, e, f, g, h, i, j, k, l) => b }
(val2, ref2, mut2) -> C { (a, b, c, d, e, f, g, h, i, j, k, l) => c }
(val3, ref3, mut3) -> D { (a, b, c, d, e, f, g, h, i, j, k, l) => d }
(val4, ref4, mut4) -> E { (a, b, c, d, e, f, g, h, i, j, k, l) => e }
(val5, ref5, mut5) -> F { (a, b, c, d, e, f, g, h, i, j, k, l) => f }
(val6, ref6, mut6) -> G { (a, b, c, d, e, f, g, h, i, j, k, l) => g }
(val7, ref7, mut7) -> H { (a, b, c, d, e, f, g, h, i, j, k, l) => h }
(val8, ref8, mut8) -> I { (a, b, c, d, e, f, g, h, i, j, k, l) => i }
(val9, ref9, mut9) -> J { (a, b, c, d, e, f, g, h, i, j, k, l) => j }
(val10, ref10, mut10) -> K { (a, b, c, d, e, f, g, h, i, j, k, l) => k }
(val11, ref11, mut11) -> L { (a, b, c, d, e, f, g, h, i, j, k, l) => l }
}
}
#[cfg(test)]
mod tests {
use super::*;
use clone::Clone;
use cmp::*;
use realstd::str::Str;
#[test]
fn test_clone() {
let a = (1, "2");
let b = a.clone();
assert_eq!(a, b);
}
#[test]
fn test_getters() {
macro_rules! test_getter(
($x:expr, $valN:ident, $refN:ident, $mutN:ident,
$init:expr, $incr:expr, $result:expr) => ({
assert_eq!($x.$valN(), $init);
assert_eq!(*$x.$refN(), $init);
*$x.$mutN() += $incr;
assert_eq!(*$x.$refN(), $result);
})
)
let mut x = (0u8, 1u16, 2u32, 3u64, 4u, 5i8, 6i16, 7i32, 8i64, 9i, 10f32, 11f64);
test_getter!(x, val0, ref0, mut0, 0, 1, 1);
test_getter!(x, val1, ref1, mut1, 1, 1, 2);
test_getter!(x, val2, ref2, mut2, 2, 1, 3);
test_getter!(x, val3, ref3, mut3, 3, 1, 4);
test_getter!(x, val4, ref4, mut4, 4, 1, 5);
test_getter!(x, val5, ref5, mut5, 5, 1, 6);
test_getter!(x, val6, ref6, mut6, 6, 1, 7);
test_getter!(x, val7, ref7, mut7, 7, 1, 8);
test_getter!(x, val8, ref8, mut8, 8, 1, 9);
test_getter!(x, val9, ref9, mut9, 9, 1, 10);
test_getter!(x, val10, ref10, mut10, 10.0, 1.0, 11.0);
test_getter!(x, val11, ref11, mut11, 11.0, 1.0, 12.0);
}
#[test]
fn test_tuple_cmp() {
let (small, big) = ((1u, 2u, 3u), (3u, 2u, 1u));
let nan = 0.0/0.0;
// PartialEq
assert_eq!(small, small);
assert_eq!(big, big);
assert!(small != big);
assert!(big != small);
// PartialOrd
assert!(small < big);
assert!(!(small < small));
assert!(!(big < small));
assert!(!(big < big));
assert!(small <= small);
assert!(big <= big);
assert!(big > small);
assert!(small >= small);
assert!(big >= small);
assert!(big >= big);
assert!(!((1.0, 2.0) < (nan, 3.0)));
assert!(!((1.0, 2.0) <= (nan, 3.0)));
assert!(!((1.0, 2.0) > (nan, 3.0)));
assert!(!((1.0, 2.0) >= (nan, 3.0)));
assert!(((1.0, 2.0) < (2.0, nan)));
assert!(!((2.0, 2.0) < (2.0, nan)));
// TotalOrd
assert!(small.cmp(&small) == Equal);
assert!(big.cmp(&big) == Equal);
assert!(small.cmp(&big) == Less);
assert!(big.cmp(&small) == Greater);
}
#[test]
fn test_show() {
let s = format!("{}", (1,));
assert_eq!(s.as_slice(), "(1,)");
let s = format!("{}", (1, true));
assert_eq!(s.as_slice(), "(1, true)");
let s = format!("{}", (1, "hi", true));
assert_eq!(s.as_slice(), "(1, hi, true)");
}
}
| 37.609948 | 89 | 0.395002 |
22d7c22d05ee99fa467005ff539e1181a944ff78
| 23,286 |
use std::io::Write;
use std::vec::Vec;
use ansi_term::Colour::{Fixed, Green, Red, Yellow};
use ansi_term::Style;
use console::AnsiCodeIterator;
use syntect::easy::HighlightLines;
use syntect::highlighting::Color;
use syntect::highlighting::Theme;
use syntect::parsing::SyntaxSet;
use content_inspector::ContentType;
use encoding::all::{UTF_16BE, UTF_16LE};
use encoding::{DecoderTrap, Encoding};
use unicode_width::UnicodeWidthChar;
use crate::assets::HighlightingAssets;
use crate::config::Config;
#[cfg(feature = "git")]
use crate::decorations::LineChangesDecoration;
use crate::decorations::{Decoration, GridBorderDecoration, LineNumberDecoration};
#[cfg(feature = "git")]
use crate::diff::LineChanges;
use crate::error::*;
use crate::input::OpenedInput;
use crate::line_range::RangeCheckResult;
use crate::preprocessor::{expand_tabs, replace_nonprintable};
use crate::terminal::{as_terminal_escaped, to_ansi_color};
use crate::wrapping::WrappingMode;
pub(crate) trait Printer {
fn print_header(
&mut self,
handle: &mut dyn Write,
input: &OpenedInput,
add_header_padding: bool,
) -> Result<()>;
fn print_footer(&mut self, handle: &mut dyn Write, input: &OpenedInput) -> Result<()>;
fn print_snip(&mut self, handle: &mut dyn Write) -> Result<()>;
fn print_line(
&mut self,
out_of_range: bool,
handle: &mut dyn Write,
line_number: usize,
line_buffer: &[u8],
) -> Result<()>;
}
pub struct SimplePrinter<'a> {
config: &'a Config<'a>,
}
impl<'a> SimplePrinter<'a> {
pub fn new(config: &'a Config) -> Self {
SimplePrinter { config }
}
}
impl<'a> Printer for SimplePrinter<'a> {
fn print_header(
&mut self,
_handle: &mut dyn Write,
_input: &OpenedInput,
_add_header_padding: bool,
) -> Result<()> {
Ok(())
}
fn print_footer(&mut self, _handle: &mut dyn Write, _input: &OpenedInput) -> Result<()> {
Ok(())
}
fn print_snip(&mut self, _handle: &mut dyn Write) -> Result<()> {
Ok(())
}
fn print_line(
&mut self,
out_of_range: bool,
handle: &mut dyn Write,
_line_number: usize,
line_buffer: &[u8],
) -> Result<()> {
if !out_of_range {
if self.config.show_nonprintable {
let line = replace_nonprintable(line_buffer, self.config.tab_width);
write!(handle, "{}", line)?;
} else {
handle.write_all(line_buffer)?
};
}
Ok(())
}
}
pub(crate) struct InteractivePrinter<'a> {
colors: Colors,
config: &'a Config<'a>,
decorations: Vec<Box<dyn Decoration>>,
panel_width: usize,
ansi_prefix_sgr: String,
content_type: Option<ContentType>,
#[cfg(feature = "git")]
pub line_changes: &'a Option<LineChanges>,
highlighter: Option<HighlightLines<'a>>,
syntax_set: &'a SyntaxSet,
background_color_highlight: Option<Color>,
}
impl<'a> InteractivePrinter<'a> {
pub(crate) fn new(
config: &'a Config,
assets: &'a HighlightingAssets,
input: &mut OpenedInput,
#[cfg(feature = "git")] line_changes: &'a Option<LineChanges>,
) -> Result<Self> {
let theme = assets.get_theme(&config.theme);
let background_color_highlight = theme.settings.line_highlight;
let colors = if config.colored_output {
Colors::colored(theme, config.true_color)
} else {
Colors::plain()
};
// Create decorations.
let mut decorations: Vec<Box<dyn Decoration>> = Vec::new();
if config.style_components.numbers() {
decorations.push(Box::new(LineNumberDecoration::new(&colors)));
}
#[cfg(feature = "git")]
{
if config.style_components.changes() {
decorations.push(Box::new(LineChangesDecoration::new(&colors)));
}
}
let mut panel_width: usize =
decorations.len() + decorations.iter().fold(0, |a, x| a + x.width());
// The grid border decoration isn't added until after the panel_width calculation, since the
// print_horizontal_line, print_header, and print_footer functions all assume the panel
// width is without the grid border.
if config.style_components.grid() && !decorations.is_empty() {
decorations.push(Box::new(GridBorderDecoration::new(&colors)));
}
// Disable the panel if the terminal is too small (i.e. can't fit 5 characters with the
// panel showing).
if config.term_width
< (decorations.len() + decorations.iter().fold(0, |a, x| a + x.width())) + 5
{
decorations.clear();
panel_width = 0;
}
let highlighter = if input
.reader
.content_type
.map_or(false, |c| c.is_binary() && !config.show_nonprintable)
{
None
} else {
// Determine the type of syntax for highlighting
let syntax = match assets.get_syntax(config.language, input, &config.syntax_mapping) {
Ok(syntax) => syntax,
Err(Error(ErrorKind::UndetectedSyntax(_), _)) => {
assets.get_syntax_set()?.find_syntax_plain_text()
}
Err(e) => return Err(e),
};
Some(HighlightLines::new(syntax, theme))
};
Ok(InteractivePrinter {
panel_width,
colors,
config,
decorations,
content_type: input.reader.content_type,
ansi_prefix_sgr: String::new(),
#[cfg(feature = "git")]
line_changes,
highlighter,
syntax_set: assets.get_syntax_set()?,
background_color_highlight,
})
}
fn print_horizontal_line_term(&mut self, handle: &mut dyn Write, style: Style) -> Result<()> {
writeln!(
handle,
"{}",
style.paint("─".repeat(self.config.term_width))
)?;
Ok(())
}
fn print_horizontal_line(&mut self, handle: &mut dyn Write, grid_char: char) -> Result<()> {
if self.panel_width == 0 {
self.print_horizontal_line_term(handle, self.colors.grid)?;
} else {
let hline = "─".repeat(self.config.term_width - (self.panel_width + 1));
let hline = format!("{}{}{}", "─".repeat(self.panel_width), grid_char, hline);
writeln!(handle, "{}", self.colors.grid.paint(hline))?;
}
Ok(())
}
fn create_fake_panel(&self, text: &str) -> String {
if self.panel_width == 0 {
"".to_string()
} else {
let text_truncated: String = text.chars().take(self.panel_width - 1).collect();
let text_filled: String = format!(
"{}{}",
text_truncated,
" ".repeat(self.panel_width - 1 - text_truncated.len())
);
if self.config.style_components.grid() {
format!("{} │ ", text_filled)
} else {
text_filled
}
}
}
fn preprocess(&self, text: &str, cursor: &mut usize) -> String {
if self.config.tab_width > 0 {
expand_tabs(text, self.config.tab_width, cursor)
} else {
*cursor += text.len();
text.to_string()
}
}
}
impl<'a> Printer for InteractivePrinter<'a> {
fn print_header(
&mut self,
handle: &mut dyn Write,
input: &OpenedInput,
add_header_padding: bool,
) -> Result<()> {
if add_header_padding && self.config.style_components.rule() {
self.print_horizontal_line_term(handle, self.colors.rule)?;
}
if !self.config.style_components.header() {
if Some(ContentType::BINARY) == self.content_type && !self.config.show_nonprintable {
writeln!(
handle,
"{}: Binary content from {} will not be printed to the terminal \
(but will be present if the output of 'bat' is piped). You can use 'bat -A' \
to show the binary file contents.",
Yellow.paint("[bat warning]"),
input.description.summary(),
)?;
} else if self.config.style_components.grid() {
self.print_horizontal_line(handle, '┬')?;
}
return Ok(());
}
if self.config.style_components.grid() {
self.print_horizontal_line(handle, '┬')?;
write!(
handle,
"{}{}",
" ".repeat(self.panel_width),
self.colors
.grid
.paint(if self.panel_width > 0 { "│ " } else { "" }),
)?;
} else {
// Only pad space between files, if we haven't already drawn a horizontal rule
if add_header_padding && !self.config.style_components.rule() {
writeln!(handle)?;
}
write!(handle, "{}", " ".repeat(self.panel_width))?;
}
let mode = match self.content_type {
Some(ContentType::BINARY) => " <BINARY>",
Some(ContentType::UTF_16LE) => " <UTF-16LE>",
Some(ContentType::UTF_16BE) => " <UTF-16BE>",
None => " <EMPTY>",
_ => "",
};
let description = &input.description;
writeln!(
handle,
"{}{}{}",
description
.kind()
.map(|kind| format!("{}: ", kind))
.unwrap_or_else(|| "".into()),
self.colors.filename.paint(description.title()),
mode
)?;
if self.config.style_components.grid() {
if self.content_type.map_or(false, |c| c.is_text()) || self.config.show_nonprintable {
self.print_horizontal_line(handle, '┼')?;
} else {
self.print_horizontal_line(handle, '┴')?;
}
}
Ok(())
}
fn print_footer(&mut self, handle: &mut dyn Write, _input: &OpenedInput) -> Result<()> {
if self.config.style_components.grid()
&& (self.content_type.map_or(false, |c| c.is_text()) || self.config.show_nonprintable)
{
self.print_horizontal_line(handle, '┴')
} else {
Ok(())
}
}
fn print_snip(&mut self, handle: &mut dyn Write) -> Result<()> {
let panel = self.create_fake_panel(" ...");
let panel_count = panel.chars().count();
let title = "8<";
let title_count = title.chars().count();
let snip_left = "─ ".repeat((self.config.term_width - panel_count - (title_count / 2)) / 4);
let snip_left_count = snip_left.chars().count(); // Can't use .len() with Unicode.
let snip_right =
" ─".repeat((self.config.term_width - panel_count - snip_left_count - title_count) / 2);
writeln!(
handle,
"{}",
self.colors
.grid
.paint(format!("{}{}{}{}", panel, snip_left, title, snip_right))
)?;
Ok(())
}
fn print_line(
&mut self,
out_of_range: bool,
handle: &mut dyn Write,
line_number: usize,
line_buffer: &[u8],
) -> Result<()> {
let line = if self.config.show_nonprintable {
replace_nonprintable(&line_buffer, self.config.tab_width)
} else {
match self.content_type {
Some(ContentType::BINARY) | None => {
return Ok(());
}
Some(ContentType::UTF_16LE) => UTF_16LE
.decode(&line_buffer, DecoderTrap::Replace)
.map_err(|_| "Invalid UTF-16LE")?,
Some(ContentType::UTF_16BE) => UTF_16BE
.decode(&line_buffer, DecoderTrap::Replace)
.map_err(|_| "Invalid UTF-16BE")?,
_ => String::from_utf8_lossy(&line_buffer).to_string(),
}
};
let regions = {
let highlighter = match self.highlighter {
Some(ref mut highlighter) => highlighter,
_ => {
return Ok(());
}
};
highlighter.highlight(line.as_ref(), self.syntax_set)
};
if out_of_range {
return Ok(());
}
let mut cursor: usize = 0;
let mut cursor_max: usize = self.config.term_width;
let mut cursor_total: usize = 0;
let mut panel_wrap: Option<String> = None;
// Line highlighting
let highlight_this_line =
self.config.highlighted_lines.0.check(line_number) == RangeCheckResult::InRange;
let background_color = self
.background_color_highlight
.filter(|_| highlight_this_line);
// Line decorations.
if self.panel_width > 0 {
let decorations = self
.decorations
.iter()
.map(|ref d| d.generate(line_number, false, self))
.collect::<Vec<_>>();
for deco in decorations {
write!(handle, "{} ", deco.text)?;
cursor_max -= deco.width + 1;
}
}
// Line contents.
if matches!(self.config.wrapping_mode, WrappingMode::NoWrapping(_)) {
let true_color = self.config.true_color;
let colored_output = self.config.colored_output;
let italics = self.config.use_italic_text;
for &(style, region) in regions.iter() {
let text = &*self.preprocess(region, &mut cursor_total);
let text_trimmed = text.trim_end_matches(|c| c == '\r' || c == '\n');
write!(
handle,
"{}",
as_terminal_escaped(
style,
text_trimmed,
true_color,
colored_output,
italics,
background_color
)
)?;
if text.len() != text_trimmed.len() {
if let Some(background_color) = background_color {
let ansi_style = Style {
background: to_ansi_color(background_color, true_color),
..Default::default()
};
let width = if cursor_total <= cursor_max {
cursor_max - cursor_total + 1
} else {
0
};
write!(handle, "{}", ansi_style.paint(" ".repeat(width)))?;
}
write!(handle, "{}", &text[text_trimmed.len()..])?;
}
}
if !self.config.style_components.plain() && line.bytes().next_back() != Some(b'\n') {
writeln!(handle)?;
}
} else {
for &(style, region) in regions.iter() {
let ansi_iterator = AnsiCodeIterator::new(region);
let mut ansi_prefix: String = String::new();
for chunk in ansi_iterator {
match chunk {
// ANSI escape passthrough.
(text, true) => {
let is_ansi_csi = text.starts_with("\x1B[");
if is_ansi_csi && text.ends_with('m') {
// It's an ANSI SGR sequence.
// We should be mostly safe to just append these together.
ansi_prefix.push_str(text);
if text == "\x1B[0m" {
self.ansi_prefix_sgr = "\x1B[0m".to_owned();
} else {
self.ansi_prefix_sgr.push_str(text);
}
} else if is_ansi_csi {
// It's a regular CSI sequence.
// We should be mostly safe to just append these together.
ansi_prefix.push_str(text);
} else {
// It's probably a VT100 code.
// Passing it through is the safest bet.
write!(handle, "{}", text)?;
}
}
// Regular text.
(text, false) => {
let text = self.preprocess(
text.trim_end_matches(|c| c == '\r' || c == '\n'),
&mut cursor_total,
);
let mut max_width = cursor_max - cursor;
// line buffer (avoid calling write! for every character)
let mut line_buf = String::with_capacity(max_width * 4);
// Displayed width of line_buf
let mut current_width = 0;
for c in text.chars() {
// calculate the displayed width for next character
let cw = c.width().unwrap_or(0);
current_width += cw;
// if next character cannot be printed on this line,
// flush the buffer.
if current_width > max_width {
// Generate wrap padding if not already generated.
if panel_wrap.is_none() {
panel_wrap = if self.panel_width > 0 {
Some(format!(
"{} ",
self.decorations
.iter()
.map(|ref d| d
.generate(line_number, true, self)
.text)
.collect::<Vec<String>>()
.join(" ")
))
} else {
Some("".to_string())
}
}
// It wraps.
write!(
handle,
"{}\n{}",
as_terminal_escaped(
style,
&*format!(
"{}{}{}",
self.ansi_prefix_sgr, ansi_prefix, line_buf
),
self.config.true_color,
self.config.colored_output,
self.config.use_italic_text,
background_color
),
panel_wrap.clone().unwrap()
)?;
cursor = 0;
max_width = cursor_max;
line_buf.clear();
current_width = cw;
}
line_buf.push(c);
}
// flush the buffer
cursor += current_width;
write!(
handle,
"{}",
as_terminal_escaped(
style,
&*format!(
"{}{}{}",
self.ansi_prefix_sgr, ansi_prefix, line_buf
),
self.config.true_color,
self.config.colored_output,
self.config.use_italic_text,
background_color
)
)?;
// Clear the ANSI prefix buffer.
ansi_prefix.clear();
}
}
}
}
if let Some(background_color) = background_color {
let ansi_style = Style {
background: to_ansi_color(background_color, self.config.true_color),
..Default::default()
};
write!(
handle,
"{}",
ansi_style.paint(" ".repeat(cursor_max - cursor))
)?;
}
writeln!(handle)?;
}
Ok(())
}
}
const DEFAULT_GUTTER_COLOR: u8 = 238;
#[derive(Debug, Default)]
pub struct Colors {
pub grid: Style,
pub rule: Style,
pub filename: Style,
pub git_added: Style,
pub git_removed: Style,
pub git_modified: Style,
pub line_number: Style,
}
impl Colors {
fn plain() -> Self {
Colors::default()
}
fn colored(theme: &Theme, true_color: bool) -> Self {
let gutter_style = Style {
foreground: match theme.settings.gutter_foreground {
// If the theme provides a gutter foreground color, use it.
// Note: It might be the special value #00000001, in which case
// to_ansi_color returns None and we use an empty Style
// (resulting in the terminal's default foreground color).
Some(c) => to_ansi_color(c, true_color),
// Otherwise, use a specific fallback color.
None => Some(Fixed(DEFAULT_GUTTER_COLOR)),
},
..Style::default()
};
Colors {
grid: gutter_style,
rule: gutter_style,
filename: Style::new().bold(),
git_added: Green.normal(),
git_removed: Red.normal(),
git_modified: Yellow.normal(),
line_number: gutter_style,
}
}
}
| 35.605505 | 100 | 0.454264 |
9ce86a0861b367e5ce07a5133ba9d006cf5216a4
| 6,629 |
use crate::api::TwitterApi;
use crate::api_result::ApiResult;
use crate::authorization::Authorization;
use crate::data::{ReplySettings, Tweet};
use crate::id::{IntoNumericId, IntoStringId, StringId};
use reqwest::Method;
use serde::{Deserialize, Serialize};
use std::time::Duration;
use url::Url;
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
struct DraftTweetGeo {
pub place_id: StringId,
}
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
struct DraftTweetMedia {
pub media_ids: Vec<String>,
pub tagged_user_ids: Vec<String>,
}
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
struct DraftTweetPoll {
pub options: Vec<String>,
pub duration_minutes: u64,
}
#[derive(Clone, Debug, Serialize, Deserialize, Eq, PartialEq)]
struct DraftTweetReply {
#[serde(skip_serializing_if = "Option::is_none")]
pub exclude_reply_user_ids: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub in_reply_to_tweet_id: Option<String>,
}
#[derive(Clone, Default, Debug, Serialize, Deserialize, Eq, PartialEq)]
struct DraftTweet {
#[serde(skip_serializing_if = "Option::is_none")]
pub direct_message_deep_link: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub for_super_followers_only: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub geo: Option<DraftTweetGeo>,
#[serde(skip_serializing_if = "Option::is_none")]
pub media: Option<DraftTweetMedia>,
#[serde(skip_serializing_if = "Option::is_none")]
pub poll: Option<DraftTweetPoll>,
#[serde(skip_serializing_if = "Option::is_none")]
pub quote_tweet_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reply: Option<DraftTweetReply>,
#[serde(skip_serializing_if = "Option::is_none")]
pub reply_settings: Option<ReplySettings>,
#[serde(skip_serializing_if = "Option::is_none")]
pub text: Option<String>,
}
#[derive(Debug)]
pub struct TweetBuilder<A> {
client: TwitterApi<A>,
url: Url,
tweet: DraftTweet,
}
impl<A> TweetBuilder<A>
where
A: Authorization,
{
pub(crate) fn new(client: &TwitterApi<A>, url: Url) -> Self {
Self {
client: client.clone(),
url,
tweet: Default::default(),
}
}
pub fn text(&mut self, text: String) -> &mut Self {
self.tweet.text = Some(text);
self
}
pub fn direct_message_deep_link(&mut self, direct_message_deep_link: String) -> &mut Self {
self.tweet.direct_message_deep_link = Some(direct_message_deep_link);
self
}
pub fn for_super_followers_only(&mut self, for_super_followers_only: bool) -> &mut Self {
self.tweet.for_super_followers_only = Some(for_super_followers_only);
self
}
pub fn place_id(&mut self, place_id: impl IntoStringId) -> &mut Self {
if let Some(geo) = self.tweet.geo.as_mut() {
geo.place_id = place_id.into_id();
} else {
self.tweet.geo = Some(DraftTweetGeo {
place_id: place_id.into_id(),
});
}
self
}
pub fn add_media(
&mut self,
media_ids: impl IntoIterator<Item = impl IntoNumericId>,
tagged_user_ids: impl IntoIterator<Item = impl IntoNumericId>,
) -> &mut Self {
if let Some(media) = self.tweet.media.as_mut() {
media
.media_ids
.extend(media_ids.into_iter().map(|id| id.to_string()));
media
.tagged_user_ids
.extend(tagged_user_ids.into_iter().map(|id| id.to_string()));
} else {
self.tweet.media = Some(DraftTweetMedia {
media_ids: media_ids.into_iter().map(|id| id.to_string()).collect(),
tagged_user_ids: tagged_user_ids
.into_iter()
.map(|id| id.to_string())
.collect(),
});
}
self
}
pub fn poll(
&mut self,
options: impl IntoIterator<Item = impl ToString>,
duration: Duration,
) -> &mut Self {
self.tweet.poll = Some(DraftTweetPoll {
options: options
.into_iter()
.map(|option| option.to_string())
.collect::<Vec<_>>(),
duration_minutes: duration.as_secs() / 60,
});
self
}
pub fn quote_tweet_id(&mut self, id: impl IntoNumericId) -> &mut Self {
self.tweet.quote_tweet_id = Some(id.to_string());
self
}
pub fn add_exclude_reply_user_id(&mut self, user_id: impl IntoNumericId) -> &mut Self {
self.add_exclude_reply_user_ids([user_id])
}
pub fn add_exclude_reply_user_ids(
&mut self,
user_ids: impl IntoIterator<Item = impl IntoNumericId>,
) -> &mut Self {
let mut user_ids = user_ids
.into_iter()
.map(|id| id.to_string())
.collect::<Vec<_>>();
if let Some(reply) = self.tweet.reply.as_mut() {
if let Some(exclude_reply_user_ids) = reply.exclude_reply_user_ids.as_mut() {
exclude_reply_user_ids.append(&mut user_ids)
} else {
reply.exclude_reply_user_ids = Some(user_ids);
}
} else {
self.tweet.reply = Some(DraftTweetReply {
exclude_reply_user_ids: Some(user_ids),
in_reply_to_tweet_id: None,
});
}
self
}
pub fn in_reply_to_tweet_id(&mut self, user_id: impl IntoNumericId) -> &mut Self {
if let Some(reply) = self.tweet.reply.as_mut() {
reply.in_reply_to_tweet_id = Some(user_id.to_string());
} else {
self.tweet.reply = Some(DraftTweetReply {
exclude_reply_user_ids: None,
in_reply_to_tweet_id: Some(user_id.to_string()),
});
}
self
}
pub fn reply_settings(&mut self, reply_settings: ReplySettings) -> &mut Self {
self.tweet.reply_settings = Some(reply_settings);
self
}
pub async fn send(&self) -> ApiResult<A, Tweet, ()> {
self.client
.send(
self.client
.request(Method::POST, self.url.clone())
.json(&self.tweet),
)
.await
}
}
impl<A> Clone for TweetBuilder<A> {
fn clone(&self) -> Self {
Self {
client: self.client.clone(),
url: self.url.clone(),
tweet: self.tweet.clone(),
}
}
}
| 33.311558 | 95 | 0.593755 |
50eb8ae30b042c0fef5e6ec60f7fb13355e53201
| 10,789 |
use std::{
collections::HashMap,
fmt::Debug,
io::{self, BufRead, BufReader, Read},
path::Path,
};
use thiserror::Error;
use zip::{read::ZipFile, result::ZipError};
/// Types of entries in a GedcomxFile.
#[derive(Debug)]
pub enum GedcomxFileEntry<R: Read> {
/// A JSON or XML document in GEDCOM X format that has been deserialized.
Gedcomx(gedcomx::Gedcomx),
/// A GedcomxFile manifest.
Manifest(GedcomxManifest),
/// Any other filetypes are returned as a type implementing Read.
Reader(R),
}
const MANIFEST_STR: &str = "META-INF/MANIFEST.MF";
pub const GEDCOMX_RESOURCE_NAME: &str = "main";
/// A file containing a bundle of genealogical resources.
#[derive(Debug)]
pub struct GedcomxFile<R> {
inner: zip::ZipArchive<R>,
}
impl<R: io::Read + io::Seek> GedcomxFile<R> {
/// Create from a reader. This is usually a std::fs::File.
pub fn from_reader(reader: R) -> Result<Self, GedcomxFileError> {
let zip = zip::ZipArchive::new(reader)?;
Ok(Self { inner: zip })
}
/// Number of files contained in this GedcomxFile.
pub fn len(&self) -> usize {
self.inner.len()
}
/// Whether this GedcomxFile contains no files.
pub fn is_empty(&self) -> bool {
self.inner.is_empty()
}
/// Get the resource entries by index in this GEDCOM X file.
pub fn by_index(
&mut self,
file_number: usize,
) -> Result<GedcomxFileEntry<impl Read + '_>, GedcomxFileError> {
let entry = self.inner.by_index(file_number)?;
Self::file_entry_from_entry(entry)
}
/// Get the names of the files in this GedcomxFile. These can be used as arguments for `by_name`.
pub fn file_names(&self) -> impl Iterator<Item = &str> {
self.inner.file_names()
}
/// Get the resource entries by name in this GEDCOM X file.
pub fn by_name(
&mut self,
name: &str,
) -> Result<GedcomxFileEntry<impl Read + '_>, GedcomxFileError> {
let entry = self.inner.by_name(name)?;
Self::file_entry_from_entry(entry)
}
fn file_entry_from_entry(
mut entry: ZipFile,
) -> Result<GedcomxFileEntry<impl Read + '_>, GedcomxFileError> {
if entry.enclosed_name() == Some(Path::new(MANIFEST_STR)) {
return Ok(GedcomxFileEntry::Manifest(GedcomxManifest::from_reader(
entry,
)?));
}
match entry
.enclosed_name()
.and_then(|n| n.extension())
.and_then(|e| e.to_str())
{
Some("json") => match gedcomx::Gedcomx::from_json_reader(&mut entry) {
Ok(gx) => Ok(GedcomxFileEntry::Gedcomx(gx)),
Err(e) => Err(GedcomxFileError::GedcomxError(e)),
},
Some("xml") => match gedcomx::Gedcomx::from_xml_reader(&mut entry) {
Ok(gx) => Ok(GedcomxFileEntry::Gedcomx(gx)),
Err(e) => Err(GedcomxFileError::GedcomxError(e)),
},
_ => Ok(GedcomxFileEntry::Reader(entry)),
}
}
/// Get the manifest, or return an error if it's missing or unreadable.
pub fn manifest(&mut self) -> Result<GedcomxManifest, GedcomxFileError> {
match self.by_name(MANIFEST_STR)? {
GedcomxFileEntry::Manifest(m) => Ok(m),
_ => Err(GedcomxFileError::MissingManifest),
}
}
/// Get the attributes for a resource specified by name. Use `GEDCOMX_RESOURCE_NAME` to get the attributes for the GEDCOM X file itself.
pub fn attributes_by_name(
&mut self,
name: &str,
) -> Result<HashMap<String, String>, GedcomxFileError> {
let manifest = self.manifest()?;
manifest
.attributes_by_name(name)
.ok_or(GedcomxFileError::ZipError(ZipError::FileNotFound))
}
/// Get the attributes for a resource specified by index.
pub fn attributes_by_index(
&mut self,
file_number: usize,
) -> Result<HashMap<String, String>, GedcomxFileError> {
let name = {
let entry = self.inner.by_index(file_number)?;
let name = entry.name().to_string();
name
};
self.attributes_by_name(&name)
}
}
/// Required entry in a GEDCOM X file that provides metadata about the file and each of the resources in the file.
#[derive(Debug)]
pub struct GedcomxManifest {
inner: HashMap<String, HashMap<String, String>>,
}
impl GedcomxManifest {
fn from_reader<R>(reader: R) -> Result<Self, GedcomxFileError>
where
R: Read,
{
let mut sections = HashMap::new();
let mut current_section = {
let mut m = HashMap::new();
m.insert("Name".to_string(), GEDCOMX_RESOURCE_NAME.to_string());
m
};
let buf_reader = BufReader::new(reader);
for line in buf_reader.lines() {
let line = line.map_err(|_| GedcomxFileError::InvalidManifest)?;
if line.is_empty() {
// New section, save the old one.
if !current_section.is_empty() {
let name = current_section
.get("Name")
.ok_or(GedcomxFileError::InvalidManifest)?
.to_string();
sections.insert(name, current_section.clone());
current_section.clear();
}
} else if let Some((key, value)) = line.split_once(":") {
current_section.insert(key.trim().to_string(), value.trim().to_string());
}
}
Ok(Self { inner: sections })
}
/// Attributes in the form of key -> value mappings for a given resource name. Use `GEDCOMX_RESOURCE_NAME` as the name to get the attributes of the GEDCOM X file itself.
pub fn attributes_by_name(&self, name: &str) -> Option<HashMap<String, String>> {
self.inner.get(name).cloned()
}
}
/// Errors produced by the crate.
#[derive(Error, Debug)]
pub enum GedcomxFileError {
/// Error while zipping / unzipping a file.
#[error("zip error")]
ZipError(#[from] zip::result::ZipError),
/// Error while parsing the contents of a GEDCOM X file.
#[error("gedcomx error")]
GedcomxError(#[from] gedcomx::GedcomxError),
/// No manifest file was found in this GedcomxFile.
#[error("no manifest in gedcomx file")]
MissingManifest,
/// The manifest did not have the correct format.
#[error("invalid manifest")]
InvalidManifest,
}
#[cfg(test)]
mod tests {
use super::*;
use std::{fs::File, iter::FromIterator, path::Path};
#[test]
fn read_by_name() {
let fp = Path::new("data/sample.gedx");
let f = File::open(fp).unwrap();
let mut gxf = GedcomxFile::from_reader(f).unwrap();
let names: Vec<_> = gxf.file_names().map(|s| s.to_string()).collect();
for name in names {
let gx = gxf.by_name(name.as_str()).unwrap();
match gx {
GedcomxFileEntry::Gedcomx(g) => println!("{:?}", g),
GedcomxFileEntry::Manifest(m) => println!("Manifest: {:?}", m),
GedcomxFileEntry::Reader(_) => println!("Reader"),
}
}
}
#[test]
fn read_by_index() {
let fp = Path::new("data/sample.gedx");
let f = File::open(fp).unwrap();
let mut gxf = GedcomxFile::from_reader(f).unwrap();
for index in 0..gxf.len() {
let gx = gxf.by_index(index).unwrap();
match gx {
GedcomxFileEntry::Gedcomx(g) => println!("Gedcomx {:?}", g),
GedcomxFileEntry::Manifest(m) => println!("Manifest: {:?}", m),
GedcomxFileEntry::Reader(_) => println!("Reader"),
}
}
}
#[test]
fn gedcomx_resource_constant() {
let fp = Path::new("data/sample.gedx");
let f = File::open(fp).unwrap();
let mut gxf = GedcomxFile::from_reader(f).unwrap();
let manifest = gxf.manifest().unwrap();
let name = manifest.attributes_by_name(GEDCOMX_RESOURCE_NAME).unwrap();
assert_eq!(name.get("Name").unwrap(), "main");
}
#[test]
fn manifest() {
let fp = Path::new("data/sample.gedx");
let f = File::open(fp).unwrap();
let mut gxf = GedcomxFile::from_reader(f).unwrap();
let expected = {
let main = HashMap::<_, _>::from_iter([
("Name".to_string(), GEDCOMX_RESOURCE_NAME.to_string()),
("Manifest-Version".to_string(), "1.0".to_string()),
(
"Created-By".to_string(),
"FamilySearch Platform API 0.1".to_string(),
),
]);
let person1 = HashMap::<_, _>::from_iter([
("Name".to_string(), "person1.png".to_string()),
("Content-Type".to_string(), "image/png".to_string()),
(
"X-DC-modified".to_string(),
"2014-10-07T21:15:57.161Z".to_string(),
),
]);
let person2 = HashMap::<_, _>::from_iter([
("Name".to_string(), "person2.png".to_string()),
("Content-Type".to_string(), "image/png".to_string()),
(
"X-DC-modified".to_string(),
"2014-10-07T21:15:57.162Z".to_string(),
),
]);
let tree = HashMap::<_, _>::from_iter([
("Name".to_string(), "tree.xml".to_string()),
(
"Content-Type".to_string(),
"application/x-gedcomx-v1+xml".to_string(),
),
(
"X-DC-modified".to_string(),
"2014-10-07T21:15:57.148Z".to_string(),
),
]);
HashMap::<_, _>::from_iter([
(GEDCOMX_RESOURCE_NAME.to_string(), main),
("person1.png".to_string(), person1),
("person2.png".to_string(), person2),
("tree.xml".to_string(), tree),
])
};
let actual = gxf.manifest().unwrap().inner;
// Outer keys
assert!(actual.keys().all(|k| expected.contains_key(k)));
assert!(expected.keys().all(|k| actual.contains_key(k)));
// Inner keys and values
for (name, section) in &actual {
for (k, v) in section {
assert_eq!(v, expected.get(name).unwrap().get(k).unwrap());
}
}
for (name, section) in &expected {
for (k, v) in section {
assert_eq!(v, actual.get(name).unwrap().get(k).unwrap());
}
}
}
}
| 34.359873 | 173 | 0.548151 |
0e257ebf42d3483971f3ec70d47a063ea6ff2a78
| 17,371 |
// Copyright 2018-2022 the Deno authors. All rights reserved. MIT license.
use super::client::Client;
use super::logging::lsp_log;
use crate::fs_util;
use deno_core::error::AnyError;
use deno_core::serde::Deserialize;
use deno_core::serde::Serialize;
use deno_core::serde_json;
use deno_core::serde_json::Value;
use deno_core::ModuleSpecifier;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::sync::Arc;
use tower_lsp::lsp_types as lsp;
pub const SETTINGS_SECTION: &str = "deno";
#[derive(Debug, Clone, Default)]
pub struct ClientCapabilities {
pub code_action_disabled_support: bool,
pub line_folding_only: bool,
pub status_notification: bool,
/// The client provides the `experimental.testingApi` capability, which is
/// built around VSCode's testing API. It indicates that the server should
/// send notifications about tests discovered in modules.
pub testing_api: bool,
pub workspace_configuration: bool,
pub workspace_did_change_watched_files: bool,
}
fn is_true() -> bool {
true
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct CodeLensSettings {
/// Flag for providing implementation code lenses.
#[serde(default)]
pub implementations: bool,
/// Flag for providing reference code lenses.
#[serde(default)]
pub references: bool,
/// Flag for providing reference code lens on all functions. For this to have
/// an impact, the `references` flag needs to be `true`.
#[serde(default)]
pub references_all_functions: bool,
/// Flag for providing test code lens on `Deno.test` statements. There is
/// also the `test_args` setting, but this is not used by the server.
#[serde(default = "is_true")]
pub test: bool,
}
impl Default for CodeLensSettings {
fn default() -> Self {
Self {
implementations: false,
references: false,
references_all_functions: false,
test: true,
}
}
}
#[derive(Debug, Clone, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct CodeLensSpecifierSettings {
/// Flag for providing test code lens on `Deno.test` statements. There is
/// also the `test_args` setting, but this is not used by the server.
#[serde(default = "is_true")]
pub test: bool,
}
impl Default for CodeLensSpecifierSettings {
fn default() -> Self {
Self { test: true }
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct CompletionSettings {
#[serde(default)]
pub complete_function_calls: bool,
#[serde(default = "is_true")]
pub names: bool,
#[serde(default = "is_true")]
pub paths: bool,
#[serde(default = "is_true")]
pub auto_imports: bool,
#[serde(default)]
pub imports: ImportCompletionSettings,
}
impl Default for CompletionSettings {
fn default() -> Self {
Self {
complete_function_calls: false,
names: true,
paths: true,
auto_imports: true,
imports: ImportCompletionSettings::default(),
}
}
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct ImportCompletionSettings {
/// A flag that indicates if non-explicitly set origins should be checked for
/// supporting import suggestions.
#[serde(default = "is_true")]
pub auto_discover: bool,
/// A map of origins which have had explicitly set if import suggestions are
/// enabled.
#[serde(default)]
pub hosts: HashMap<String, bool>,
}
impl Default for ImportCompletionSettings {
fn default() -> Self {
Self {
auto_discover: true,
hosts: HashMap::default(),
}
}
}
/// Deno language server specific settings that can be applied uniquely to a
/// specifier.
#[derive(Debug, Default, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SpecifierSettings {
/// A flag that indicates if Deno is enabled for this specifier or not.
pub enable: bool,
/// A list of paths, using the workspace folder as a base that should be Deno
/// enabled.
#[serde(default)]
pub enable_paths: Vec<String>,
/// Code lens specific settings for the resource.
#[serde(default)]
pub code_lens: CodeLensSpecifierSettings,
}
#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct TestingSettings {
/// A vector of arguments which should be used when running the tests for
/// a workspace.
#[serde(default)]
pub args: Vec<String>,
/// Enable or disable the testing API if the client is capable of supporting
/// the testing API.
#[serde(default = "is_true")]
pub enable: bool,
}
impl Default for TestingSettings {
fn default() -> Self {
Self {
args: vec!["--allow-all".to_string(), "--no-check".to_string()],
enable: true,
}
}
}
/// Deno language server specific settings that are applied to a workspace.
#[derive(Debug, Default, Clone, Deserialize, Serialize, PartialEq, Eq)]
#[serde(rename_all = "camelCase")]
pub struct WorkspaceSettings {
/// A flag that indicates if Deno is enabled for the workspace.
#[serde(default)]
pub enable: bool,
/// A list of paths, using the root_uri as a base that should be Deno enabled.
#[serde(default)]
pub enable_paths: Vec<String>,
/// An option that points to a path string of the path to utilise as the
/// cache/DENO_DIR for the language server.
pub cache: Option<String>,
/// Override the default stores used to validate certificates. This overrides
/// the environment variable `DENO_TLS_CA_STORE` if present.
pub certificate_stores: Option<Vec<String>>,
/// An option that points to a path string of the config file to apply to
/// code within the workspace.
pub config: Option<String>,
/// An option that points to a path string of the import map to apply to the
/// code within the workspace.
pub import_map: Option<String>,
/// Code lens specific settings for the workspace.
#[serde(default)]
pub code_lens: CodeLensSettings,
/// A flag that indicates if internal debug logging should be made available.
#[serde(default)]
pub internal_debug: bool,
/// A flag that indicates if linting is enabled for the workspace.
#[serde(default)]
pub lint: bool,
/// A flag that indicates if Dene should validate code against the unstable
/// APIs for the workspace.
#[serde(default)]
pub suggest: CompletionSettings,
/// Testing settings for the workspace.
#[serde(default)]
pub testing: TestingSettings,
/// An option which sets the cert file to use when attempting to fetch remote
/// resources. This overrides `DENO_CERT` if present.
pub tls_certificate: Option<String>,
/// An option, if set, will unsafely ignore certificate errors when fetching
/// remote resources.
#[serde(default)]
pub unsafely_ignore_certificate_errors: Option<Vec<String>>,
#[serde(default)]
pub unstable: bool,
}
impl WorkspaceSettings {
/// Determine if any code lenses are enabled at all. This allows short
/// circuiting when there are no code lenses enabled.
pub fn enabled_code_lens(&self) -> bool {
self.code_lens.implementations || self.code_lens.references
}
}
#[derive(Debug, Clone, Default)]
pub struct ConfigSnapshot {
pub client_capabilities: ClientCapabilities,
pub enabled_paths: HashMap<String, Vec<String>>,
pub settings: Settings,
}
impl ConfigSnapshot {
/// Determine if the provided specifier is enabled or not.
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> bool {
if !self.enabled_paths.is_empty() {
let specifier_str = specifier.to_string();
for (workspace, enabled_paths) in self.enabled_paths.iter() {
if specifier_str.starts_with(workspace) {
return enabled_paths
.iter()
.any(|path| specifier_str.starts_with(path));
}
}
}
if let Some((_, SpecifierSettings { enable, .. })) =
self.settings.specifiers.get(specifier)
{
*enable
} else {
self.settings.workspace.enable
}
}
}
#[derive(Debug, Clone)]
pub struct SpecifierWithClientUri {
pub specifier: ModuleSpecifier,
pub client_uri: ModuleSpecifier,
}
#[derive(Debug, Default, Clone)]
pub struct Settings {
pub specifiers:
BTreeMap<ModuleSpecifier, (ModuleSpecifier, SpecifierSettings)>,
pub workspace: WorkspaceSettings,
}
#[derive(Debug)]
pub struct Config {
pub client_capabilities: ClientCapabilities,
enabled_paths: HashMap<String, Vec<String>>,
pub root_uri: Option<ModuleSpecifier>,
settings: Settings,
pub workspace_folders: Option<Vec<(ModuleSpecifier, lsp::WorkspaceFolder)>>,
}
impl Config {
pub fn new() -> Self {
Self {
client_capabilities: ClientCapabilities::default(),
enabled_paths: Default::default(),
/// Root provided by the initialization parameters.
root_uri: None,
settings: Default::default(),
workspace_folders: None,
}
}
pub fn get_workspace_settings(&self) -> WorkspaceSettings {
self.settings.workspace.clone()
}
/// Set the workspace settings directly, which occurs during initialization
/// and when the client does not support workspace configuration requests
pub fn set_workspace_settings(
&mut self,
value: Value,
) -> Result<(), AnyError> {
let workspace_settings = serde_json::from_value(value)?;
self.settings.workspace = workspace_settings;
Ok(())
}
pub fn snapshot(&self) -> Arc<ConfigSnapshot> {
Arc::new(ConfigSnapshot {
client_capabilities: self.client_capabilities.clone(),
enabled_paths: self.enabled_paths.clone(),
settings: self.settings.clone(),
})
}
pub fn has_specifier_settings(&self, specifier: &ModuleSpecifier) -> bool {
self.settings.specifiers.contains_key(specifier)
}
pub fn specifier_enabled(&self, specifier: &ModuleSpecifier) -> bool {
if !self.enabled_paths.is_empty() {
let specifier_str = specifier.to_string();
for (workspace, enabled_paths) in self.enabled_paths.iter() {
if specifier_str.starts_with(workspace) {
return enabled_paths
.iter()
.any(|path| specifier_str.starts_with(path));
}
}
}
self
.settings
.specifiers
.get(specifier)
.map(|(_, s)| s.enable)
.unwrap_or_else(|| self.settings.workspace.enable)
}
pub fn specifier_code_lens_test(&self, specifier: &ModuleSpecifier) -> bool {
let value = self
.settings
.specifiers
.get(specifier)
.map(|(_, s)| s.code_lens.test)
.unwrap_or_else(|| self.settings.workspace.code_lens.test);
value
}
pub fn update_capabilities(
&mut self,
capabilities: &lsp::ClientCapabilities,
) {
if let Some(experimental) = &capabilities.experimental {
self.client_capabilities.status_notification = experimental
.get("statusNotification")
.and_then(|it| it.as_bool())
== Some(true);
self.client_capabilities.testing_api =
experimental.get("testingApi").and_then(|it| it.as_bool())
== Some(true);
}
if let Some(workspace) = &capabilities.workspace {
self.client_capabilities.workspace_configuration =
workspace.configuration.unwrap_or(false);
self.client_capabilities.workspace_did_change_watched_files = workspace
.did_change_watched_files
.and_then(|it| it.dynamic_registration)
.unwrap_or(false);
}
if let Some(text_document) = &capabilities.text_document {
self.client_capabilities.line_folding_only = text_document
.folding_range
.as_ref()
.and_then(|it| it.line_folding_only)
.unwrap_or(false);
self.client_capabilities.code_action_disabled_support = text_document
.code_action
.as_ref()
.and_then(|it| it.disabled_support)
.unwrap_or(false);
}
}
/// Given the configured workspaces or root URI and the their settings,
/// update and resolve any paths that should be enabled
pub async fn update_enabled_paths(&mut self, client: Client) -> bool {
if let Some(workspace_folders) = self.workspace_folders.clone() {
let mut touched = false;
for (workspace, folder) in workspace_folders {
if let Ok(settings) = client.specifier_configuration(&folder.uri).await
{
if self.update_enabled_paths_entry(&workspace, settings.enable_paths)
{
touched = true;
}
}
}
touched
} else if let Some(root_uri) = self.root_uri.clone() {
self.update_enabled_paths_entry(
&root_uri,
self.settings.workspace.enable_paths.clone(),
)
} else {
false
}
}
/// Update a specific entry in the enabled paths for a given workspace.
fn update_enabled_paths_entry(
&mut self,
workspace: &ModuleSpecifier,
enabled_paths: Vec<String>,
) -> bool {
let workspace = fs_util::ensure_directory_specifier(workspace.clone());
let key = workspace.to_string();
let mut touched = false;
if !enabled_paths.is_empty() {
if let Ok(workspace_path) = fs_util::specifier_to_file_path(&workspace) {
let mut paths = Vec::new();
for path in &enabled_paths {
let fs_path = workspace_path.join(path);
match ModuleSpecifier::from_file_path(fs_path) {
Ok(path_uri) => {
paths.push(path_uri.to_string());
}
Err(_) => {
lsp_log!("Unable to resolve a file path for `deno.enablePath` from \"{}\" for workspace \"{}\".", path, workspace);
}
}
}
if !paths.is_empty() {
touched = true;
self.enabled_paths.insert(key, paths);
}
}
} else {
touched = true;
self.enabled_paths.remove(&key);
}
touched
}
pub fn get_specifiers_with_client_uris(&self) -> Vec<SpecifierWithClientUri> {
self
.settings
.specifiers
.iter()
.map(|(s, (u, _))| SpecifierWithClientUri {
specifier: s.clone(),
client_uri: u.clone(),
})
.collect()
}
pub fn set_specifier_settings(
&mut self,
specifier: ModuleSpecifier,
client_uri: ModuleSpecifier,
settings: SpecifierSettings,
) {
self
.settings
.specifiers
.insert(specifier, (client_uri, settings));
}
}
#[cfg(test)]
mod tests {
use super::*;
use deno_core::resolve_url;
use deno_core::serde_json::json;
#[test]
fn test_config_specifier_enabled() {
let mut config = Config::new();
let specifier = resolve_url("file:///a.ts").unwrap();
assert!(!config.specifier_enabled(&specifier));
config
.set_workspace_settings(json!({
"enable": true
}))
.expect("could not update");
assert!(config.specifier_enabled(&specifier));
}
#[test]
fn test_config_snapshot_specifier_enabled() {
let mut config = Config::new();
let specifier = resolve_url("file:///a.ts").unwrap();
assert!(!config.specifier_enabled(&specifier));
config
.set_workspace_settings(json!({
"enable": true
}))
.expect("could not update");
let config_snapshot = config.snapshot();
assert!(config_snapshot.specifier_enabled(&specifier));
}
#[test]
fn test_config_specifier_enabled_path() {
let mut config = Config::new();
let specifier_a = resolve_url("file:///project/worker/a.ts").unwrap();
let specifier_b = resolve_url("file:///project/other/b.ts").unwrap();
assert!(!config.specifier_enabled(&specifier_a));
assert!(!config.specifier_enabled(&specifier_b));
let mut enabled_paths = HashMap::new();
enabled_paths.insert(
"file:///project/".to_string(),
vec!["file:///project/worker/".to_string()],
);
config.enabled_paths = enabled_paths;
assert!(config.specifier_enabled(&specifier_a));
assert!(!config.specifier_enabled(&specifier_b));
let config_snapshot = config.snapshot();
assert!(config_snapshot.specifier_enabled(&specifier_a));
assert!(!config_snapshot.specifier_enabled(&specifier_b));
}
#[test]
fn test_set_workspace_settings_defaults() {
let mut config = Config::new();
config
.set_workspace_settings(json!({}))
.expect("could not update");
assert_eq!(
config.get_workspace_settings(),
WorkspaceSettings {
enable: false,
enable_paths: Vec::new(),
cache: None,
certificate_stores: None,
config: None,
import_map: None,
code_lens: CodeLensSettings {
implementations: false,
references: false,
references_all_functions: false,
test: true,
},
internal_debug: false,
lint: false,
suggest: CompletionSettings {
complete_function_calls: false,
names: true,
paths: true,
auto_imports: true,
imports: ImportCompletionSettings {
auto_discover: true,
hosts: HashMap::new(),
}
},
testing: TestingSettings {
args: vec!["--allow-all".to_string(), "--no-check".to_string()],
enable: true
},
tls_certificate: None,
unsafely_ignore_certificate_errors: None,
unstable: false,
}
);
}
}
| 30.105719 | 129 | 0.667204 |
485406be6adab8f19f99e5108e3c2454eb0795a0
| 15,635 |
use crate::blob::generate_blob_uri;
use crate::blob::responses::PutBlockResponse;
use azure_sdk_core::errors::{check_status_extract_headers_and_body, AzureError};
use azure_sdk_core::lease::LeaseId;
use azure_sdk_core::modify_conditions::IfMatchCondition;
use azure_sdk_core::{
AppendPositionOption, AppendPositionSupport, BlobNameRequired, BlobNameSupport, BodyRequired,
BodySupport, ClientRequestIdOption, ClientRequestIdSupport, ContainerNameRequired,
ContainerNameSupport, ContentMD5Option, ContentMD5Support, IfMatchConditionOption,
IfMatchConditionSupport, LeaseIdOption, LeaseIdSupport, No, TimeoutOption, TimeoutSupport,
ToAssign, Yes,
};
use azure_sdk_storage_core::client::Client;
use azure_sdk_storage_core::ClientRequired;
use hyper::{Method, StatusCode};
use std::marker::PhantomData;
#[derive(Debug, Clone)]
pub struct PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
client: &'a Client,
p_container_name: PhantomData<ContainerNameSet>,
p_blob_name: PhantomData<BlobNameSet>,
p_body: PhantomData<BodySet>,
container_name: Option<&'a str>,
blob_name: Option<&'a str>,
body: Option<&'a [u8]>,
timeout: Option<u64>,
content_md5: Option<&'a [u8]>,
lease_id: Option<&'a LeaseId>,
if_match_condition: Option<IfMatchCondition<'a>>,
client_request_id: Option<&'a str>,
append_position: Option<u32>,
}
impl<'a> PutAppendBlockBuilder<'a, No, No, No> {
#[inline]
pub(crate) fn new(client: &'a Client) -> PutAppendBlockBuilder<'a, No, No, No> {
PutAppendBlockBuilder {
client,
p_container_name: PhantomData {},
container_name: None,
p_blob_name: PhantomData {},
blob_name: None,
p_body: PhantomData {},
body: None,
timeout: None,
content_md5: None,
lease_id: None,
if_match_condition: None,
client_request_id: None,
append_position: None,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> ClientRequired<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn client(&self) -> &'a Client {
self.client
}
}
impl<'a, BlobNameSet, BodySet> ContainerNameRequired<'a>
for PutAppendBlockBuilder<'a, Yes, BlobNameSet, BodySet>
where
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn container_name(&self) -> &'a str {
self.container_name.unwrap()
}
}
impl<'a, ContainerNameSet, BodySet> BlobNameRequired<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, Yes, BodySet>
where
ContainerNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn blob_name(&self) -> &'a str {
self.blob_name.unwrap()
}
}
impl<'a, ContainerNameSet, BlobNameSet> BodyRequired<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, Yes>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
{
#[inline]
fn body(&self) -> &'a [u8] {
self.body.unwrap()
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> TimeoutOption
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn timeout(&self) -> Option<u64> {
self.timeout
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> ContentMD5Option<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn content_md5(&self) -> Option<&'a [u8]> {
self.content_md5
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> LeaseIdOption<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn lease_id(&self) -> Option<&'a LeaseId> {
self.lease_id
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> IfMatchConditionOption<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn if_match_condition(&self) -> Option<IfMatchCondition<'a>> {
self.if_match_condition
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> ClientRequestIdOption<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn client_request_id(&self) -> Option<&'a str> {
self.client_request_id
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> AppendPositionOption
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
#[inline]
fn append_position(&self) -> Option<u32> {
self.append_position
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> ContainerNameSupport<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, Yes, BlobNameSet, BodySet>;
#[inline]
fn with_container_name(self, container_name: &'a str) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: Some(container_name),
blob_name: self.blob_name,
body: self.body,
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> BlobNameSupport<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, Yes, BodySet>;
#[inline]
fn with_blob_name(self, blob_name: &'a str) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: Some(blob_name),
body: self.body,
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> BodySupport<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, Yes>;
#[inline]
fn with_body(self, body: &'a [u8]) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: Some(body),
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> TimeoutSupport
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>;
#[inline]
fn with_timeout(self, timeout: u64) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: self.body,
timeout: Some(timeout),
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> ContentMD5Support<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>;
#[inline]
fn with_content_md5(self, content_md5: &'a [u8]) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: self.body,
timeout: self.timeout,
content_md5: Some(content_md5),
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> LeaseIdSupport<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>;
#[inline]
fn with_lease_id(self, lease_id: &'a LeaseId) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: self.body,
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: Some(lease_id),
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> IfMatchConditionSupport<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>;
#[inline]
fn with_if_match_condition(self, if_match_condition: IfMatchCondition<'a>) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: self.body,
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: Some(if_match_condition),
client_request_id: self.client_request_id,
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> ClientRequestIdSupport<'a>
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>;
#[inline]
fn with_client_request_id(self, client_request_id: &'a str) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: self.body,
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: Some(client_request_id),
append_position: self.append_position,
}
}
}
impl<'a, ContainerNameSet, BlobNameSet, BodySet> AppendPositionSupport
for PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
type O = PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>;
#[inline]
fn with_append_position(self, append_position: u32) -> Self::O {
PutAppendBlockBuilder {
client: self.client,
p_container_name: PhantomData {},
p_blob_name: PhantomData {},
p_body: PhantomData {},
container_name: self.container_name,
blob_name: self.blob_name,
body: self.body,
timeout: self.timeout,
content_md5: self.content_md5,
lease_id: self.lease_id,
if_match_condition: self.if_match_condition,
client_request_id: self.client_request_id,
append_position: Some(append_position),
}
}
}
// methods callable regardless
impl<'a, ContainerNameSet, BlobNameSet, BodySet>
PutAppendBlockBuilder<'a, ContainerNameSet, BlobNameSet, BodySet>
where
ContainerNameSet: ToAssign,
BlobNameSet: ToAssign,
BodySet: ToAssign,
{
}
impl<'a> PutAppendBlockBuilder<'a, Yes, Yes, Yes> {
#[inline]
pub async fn finalize(self) -> Result<PutBlockResponse, AzureError> {
let mut uri = generate_blob_uri(&self, Some("comp=appendblock"));
if let Some(timeout) = TimeoutOption::to_uri_parameter(&self) {
uri = format!("{}&{}", uri, timeout);
}
trace!("uri == {:?}", uri);
let future_response = self.client().perform_request(
&uri,
&Method::PUT,
|mut request| {
request = ContentMD5Option::add_header(&self, request);
request = LeaseIdOption::add_header(&self, request);
request = IfMatchConditionOption::add_header(&self, request);
request = ClientRequestIdOption::add_header(&self, request);
request = AppendPositionOption::add_header(&self, request);
request
},
Some(self.body()),
)?;
let (headers, _body) =
check_status_extract_headers_and_body(future_response, StatusCode::CREATED).await?;
PutBlockResponse::from_headers(&headers)
}
}
| 31.973415 | 97 | 0.650208 |
0e8c533415edfa462624c7f20f9cd332c7049f08
| 520 |
// clippy1.rs
// The Clippy tool is a collection of lints to analyze your code
// so you can catch common mistakes and improve your Rust code.
//
// For these exercises the code will fail to compile when there are clippy warnings
// check clippy's suggestions from the output to solve the exercise.
// Execute `rustlings hint clippy1` for hints :)
const ERROR_MARGIN: f64 = 0.00001;
fn main() {
let x = 1.2331f64;
let y = 1.2332f64;
if (y - x).abs() > ERROR_MARGIN {
println!("Success!");
}
}
| 27.368421 | 83 | 0.678846 |
1eff64f9e391933e4bcfa849cf6e541d8d582af3
| 4,713 |
#[doc = "Register `RXEN` reader"]
pub struct R(crate::R<RXEN_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<RXEN_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<RXEN_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<RXEN_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `RXEN` writer"]
pub struct W(crate::W<RXEN_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<RXEN_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<RXEN_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<RXEN_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Reception (RX) enable.\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum RXEN_A {
#[doc = "0: Reception disabled and now data will be written to the RXD.PTR address."]
DISABLED = 0,
#[doc = "1: Reception enabled."]
ENABLED = 1,
}
impl From<RXEN_A> for bool {
#[inline(always)]
fn from(variant: RXEN_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `RXEN` reader - Reception (RX) enable."]
pub struct RXEN_R(crate::FieldReader<bool, RXEN_A>);
impl RXEN_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
RXEN_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> RXEN_A {
match self.bits {
false => RXEN_A::DISABLED,
true => RXEN_A::ENABLED,
}
}
#[doc = "Checks if the value of the field is `DISABLED`"]
#[inline(always)]
pub fn is_disabled(&self) -> bool {
**self == RXEN_A::DISABLED
}
#[doc = "Checks if the value of the field is `ENABLED`"]
#[inline(always)]
pub fn is_enabled(&self) -> bool {
**self == RXEN_A::ENABLED
}
}
impl core::ops::Deref for RXEN_R {
type Target = crate::FieldReader<bool, RXEN_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `RXEN` writer - Reception (RX) enable."]
pub struct RXEN_W<'a> {
w: &'a mut W,
}
impl<'a> RXEN_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: RXEN_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "Reception disabled and now data will be written to the RXD.PTR address."]
#[inline(always)]
pub fn disabled(self) -> &'a mut W {
self.variant(RXEN_A::DISABLED)
}
#[doc = "Reception enabled."]
#[inline(always)]
pub fn enabled(self) -> &'a mut W {
self.variant(RXEN_A::ENABLED)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | (value as u32 & 0x01);
self.w
}
}
impl R {
#[doc = "Bit 0 - Reception (RX) enable."]
#[inline(always)]
pub fn rxen(&self) -> RXEN_R {
RXEN_R::new((self.bits & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Reception (RX) enable."]
#[inline(always)]
pub fn rxen(&mut self) -> RXEN_W {
RXEN_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Reception (RX) enable.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [rxen](index.html) module"]
pub struct RXEN_SPEC;
impl crate::RegisterSpec for RXEN_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [rxen::R](R) reader structure"]
impl crate::Readable for RXEN_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [rxen::W](W) writer structure"]
impl crate::Writable for RXEN_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets RXEN to value 0"]
impl crate::Resettable for RXEN_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 29.273292 | 407 | 0.577339 |
394a0166bddcf2d963f1fc29cabb2a9657502698
| 8,598 |
use super::*;
use std::sync::Arc;
use proptest::strategy::{BoxedStrategy, Just, Strategy};
use liblumen_alloc::erts::process::Process;
use liblumen_alloc::erts::term::prelude::*;
use crate::otp::erlang::charlist_to_string::charlist_to_string;
use crate::otp::erlang::float_to_list_1;
#[test]
fn with_20_digits_is_the_same_as_float_to_list_1() {
with_process_arc(|arc_process| {
let digits = arc_process.integer(20).unwrap();
let options = arc_process
.list_from_slice(&[arc_process.tuple_from_slice(&[tag(), digits]).unwrap()])
.unwrap();
let zero = arc_process.float(0.0).unwrap();
assert_eq!(
native(&arc_process, zero, options).unwrap(),
float_to_list_1::native(&arc_process, zero).unwrap()
);
let one_tenth = arc_process.float(0.1).unwrap();
assert_eq!(
native(&arc_process, one_tenth, options).unwrap(),
float_to_list_1::native(&arc_process, one_tenth).unwrap()
);
});
}
#[test]
fn returns_list_with_coefficient_e_exponent() {
with_process_arc(|arc_process| {
let float = arc_process.float(1234567890.0987654321).unwrap();
assert_eq!(
native(&arc_process, float, options(&arc_process, 0)),
Ok(arc_process.charlist_from_str("1e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 1)),
Ok(arc_process.charlist_from_str("1.2e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 2)),
Ok(arc_process.charlist_from_str("1.23e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 3)),
Ok(arc_process.charlist_from_str("1.235e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 4)),
Ok(arc_process.charlist_from_str("1.2346e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 5)),
Ok(arc_process.charlist_from_str("1.23457e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 6)),
Ok(arc_process.charlist_from_str("1.234568e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 7)),
Ok(arc_process.charlist_from_str("1.2345679e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 8)),
Ok(arc_process.charlist_from_str("1.23456789e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 9)),
Ok(arc_process.charlist_from_str("1.234567890e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 10)),
Ok(arc_process.charlist_from_str("1.2345678901e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 11)),
Ok(arc_process.charlist_from_str("1.23456789010e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 12)),
Ok(arc_process.charlist_from_str("1.234567890099e+09").unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 13)),
Ok(arc_process
.charlist_from_str("1.2345678900988e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 14)),
Ok(arc_process
.charlist_from_str("1.23456789009877e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 15)),
Ok(arc_process
.charlist_from_str("1.234567890098765e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 16)),
Ok(arc_process
.charlist_from_str("1.2345678900987654e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 17)),
Ok(arc_process
.charlist_from_str("1.23456789009876537e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 18)),
Ok(arc_process
.charlist_from_str("1.234567890098765373e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 19)),
Ok(arc_process
.charlist_from_str("1.2345678900987653732e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 20)),
Ok(arc_process
.charlist_from_str("1.23456789009876537323e+09")
.unwrap())
);
assert_eq!(
native(&arc_process, float, options(&arc_process, 21)),
Ok(arc_process
.charlist_from_str("1.234567890098765373230e+09")
.unwrap())
);
});
}
#[test]
fn always_includes_e() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::float(arc_process.clone()),
digits(arc_process.clone()).prop_map(move |digits| {
arc_process
.list_from_slice(&[arc_process.tuple_from_slice(&[tag(), digits]).unwrap()])
.unwrap()
}),
)
},
|(arc_process, float, options)| {
let result = native(&arc_process, float, options);
prop_assert!(result.is_ok());
let list = result.unwrap();
let string: String = charlist_to_string(list).unwrap();
prop_assert!(string.contains('e'));
Ok(())
},
);
}
#[test]
fn always_includes_sign_of_exponent() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::float(arc_process.clone()),
digits(arc_process.clone()).prop_map(move |digits| {
arc_process
.list_from_slice(&[arc_process.tuple_from_slice(&[tag(), digits]).unwrap()])
.unwrap()
}),
)
},
|(arc_process, float, options)| {
let result = native(&arc_process, float, options);
prop_assert!(result.is_ok());
let list = result.unwrap();
let string: String = charlist_to_string(list).unwrap();
let part_vec: Vec<&str> = string.splitn(2, 'e').collect();
prop_assert_eq!(part_vec.len(), 2);
let sign = part_vec[1].chars().nth(0).unwrap();
prop_assert!(sign == '+' || sign == '-');
Ok(())
},
);
}
#[test]
fn exponent_is_at_least_2_digits() {
run!(
|arc_process| {
(
Just(arc_process.clone()),
strategy::term::float(arc_process.clone()),
digits(arc_process.clone()).prop_map(move |digits| {
arc_process
.list_from_slice(&[arc_process.tuple_from_slice(&[tag(), digits]).unwrap()])
.unwrap()
}),
)
},
|(arc_process, float, options)| {
let result = native(&arc_process, float, options);
prop_assert!(result.is_ok());
let list = result.unwrap();
let string: String = charlist_to_string(list).unwrap();
let part_vec: Vec<&str> = string.splitn(2, 'e').collect();
prop_assert_eq!(part_vec.len(), 2);
prop_assert!(2 <= part_vec[1].chars().skip(1).count());
Ok(())
},
);
}
fn digits(arc_process: Arc<Process>) -> BoxedStrategy<Term> {
(Just(arc_process.clone()), 0..=249)
.prop_map(|(arc_process, u)| arc_process.integer(u).unwrap())
.boxed()
}
fn options(process: &Process, digits: u8) -> Term {
process
.list_from_slice(&[process
.tuple_from_slice(&[tag(), process.integer(digits).unwrap()])
.unwrap()])
.unwrap()
}
fn tag() -> Term {
Atom::str_to_term("scientific")
}
| 32.692015 | 100 | 0.535241 |
79605381a21415f90984e74cb0172b766790caa1
| 503 |
use crate::List::*;
#[derive(Debug)]
pub enum List {
Cons(i32, Box<List>),
Nil,
}
impl List {
pub fn new() -> List {
Nil
}
pub fn prepend(self, value: i32) -> List {
Cons(value, Box::new(self))
}
pub fn len(self) -> i32 {
match *self {
Cons(_, ref tail) => i + tail.len(),
Nil => 0,
}
}
pub fn stringfy(self) -> String {
match *self {
Cons(value, ref tail) => format!("{}, {}", value, tail.stringfy()),
Nil => format!("Nil"),
}
}
}
| 17.344828 | 74 | 0.500994 |
d52ec10f7d63c87702c3d2aa02e32e736f1aec40
| 599 |
use crate::blockchain::proof_of_space::ProofOfSpace;
use crate::blockchain::sized_bytes::{Bytes32, Bytes96};
use crate::blockchain::vdf_info::VdfInfo;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Debug)]
pub struct RewardChainBlockUnfinished {
pub total_iters: u128,
pub signage_point_index: u8,
pub pos_ss_cc_challenge_hash: Bytes32,
pub proof_of_space: ProofOfSpace,
pub challenge_chain_sp_vdf: Option<VdfInfo>,
pub challenge_chain_sp_signature: Bytes96,
pub reward_chain_sp_vdf: Option<VdfInfo>,
pub reward_chain_sp_signature: Bytes96,
}
| 35.235294 | 55 | 0.776294 |
1d5e35ae3691128e2c630d59fef6dab63f5d7b12
| 1,213 |
pub fn get_indent_level(text: &str, tab_size: u32) -> u32 {
let tab_size = tab_size as usize;
let mut indent_level = 0;
let mut curr: usize = 0;
loop {
if curr >= text.len() {
break;
}
if &text[curr..=curr] == "\t" {
indent_level += 1;
curr += 1;
} else if curr + tab_size <= text.len()
&& text[curr..curr + tab_size] == " ".repeat(tab_size)
{
indent_level += 1;
curr += tab_size;
} else {
break;
}
}
indent_level
}
pub fn trim_indent(text: &str, indent_level: u32, tab_size: u32) -> String {
// Allow empty line.
if text.trim().is_empty() {
return text.to_owned();
}
let tab_size = tab_size as usize;
let mut offset: usize = 0;
for _ in 0..indent_level {
if &text[offset..=offset] == "\t" {
offset += 1;
} else if offset + tab_size <= text.len()
&& text[offset..offset + tab_size] == " ".repeat(tab_size)
{
offset += tab_size;
} else {
die!("\"{}\" isn't indented enough.", text);
}
}
text[offset..].to_owned()
}
| 24.26 | 76 | 0.480627 |
01d0cd060b94de9e624314416a3da0df11320301
| 8,633 |
use crate::color::{ColoredString, Colors, Elem};
use crate::flags::{DateFlag, Flags};
use chrono::{DateTime, Duration, Local};
use chrono_humanize::HumanTime;
use std::fs::Metadata;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct Date(DateTime<Local>);
impl<'a> From<&'a Metadata> for Date {
fn from(meta: &'a Metadata) -> Self {
let modified_time = meta.modified().expect("failed to retrieve modified date");
let time = modified_time.into();
Date(time)
}
}
impl Date {
pub fn render(&self, colors: &Colors, flags: &Flags) -> ColoredString {
let now = Local::now();
let elem = if self.0 > now - Duration::hours(1) {
Elem::HourOld
} else if self.0 > now - Duration::days(1) {
Elem::DayOld
} else {
Elem::Older
};
colors.colorize(self.date_string(flags), &elem)
}
pub fn date_string(&self, flags: &Flags) -> String {
match &flags.date {
DateFlag::Date => self.0.format("%c").to_string(),
DateFlag::Relative => format!("{}", HumanTime::from(self.0 - Local::now())),
DateFlag::ISO => {
// 365.2425 * 24 * 60 * 60 = 31556952 seconds per year
// 15778476 seconds are 6 months
if self.0 > Local::now() - Duration::seconds(15_778_476) {
self.0.format("%m-%d %R").to_string()
} else {
self.0.format("%F").to_string()
}
}
DateFlag::Formatted(format) => self.0.format(format).to_string(),
}
}
}
#[cfg(test)]
mod test {
use super::Date;
use crate::color::{Colors, ThemeOption};
use crate::flags::{DateFlag, Flags};
use chrono::{DateTime, Duration, Local};
use crossterm::style::{Color, Stylize};
use std::io;
use std::path::Path;
use std::process::{Command, ExitStatus};
use std::{env, fs};
#[cfg(unix)]
fn cross_platform_touch(path: &Path, date: &DateTime<Local>) -> io::Result<ExitStatus> {
Command::new("touch")
.arg("-t")
.arg(date.format("%Y%m%d%H%M.%S").to_string())
.arg(&path)
.status()
}
#[cfg(windows)]
fn cross_platform_touch(path: &Path, date: &DateTime<Local>) -> io::Result<ExitStatus> {
use std::process::Stdio;
let copy_success = Command::new("cmd")
.arg("/C")
.arg("copy")
.arg("NUL")
.arg(path)
.stdout(Stdio::null()) // Windows doesn't have a quiet flag
.status()?
.success();
assert!(copy_success, "failed to create empty file");
Command::new("powershell")
.arg("-Command")
.arg(format!(
r#"$(Get-Item {}).lastwritetime=$(Get-Date "{}")"#,
path.display(),
date.to_rfc3339()
))
.status()
}
#[test]
fn test_an_hour_old_file_color() {
let mut file_path = env::temp_dir();
file_path.push("test_an_hour_old_file_color.tmp");
let creation_date = Local::now() - chrono::Duration::seconds(4);
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert!(success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let flags = Flags::default();
assert_eq!(
creation_date
.format("%c")
.to_string()
.with(Color::AnsiValue(40)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_a_day_old_file_color() {
let mut file_path = env::temp_dir();
file_path.push("test_a_day_old_file_color.tmp");
let creation_date = Local::now() - chrono::Duration::hours(4);
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert!(success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let flags = Flags::default();
assert_eq!(
creation_date
.format("%c")
.to_string()
.with(Color::AnsiValue(42)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_a_several_days_old_file_color() {
let mut file_path = env::temp_dir();
file_path.push("test_a_several_days_old_file_color.tmp");
let creation_date = Local::now() - chrono::Duration::days(2);
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert!(success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let flags = Flags::default();
assert_eq!(
creation_date
.format("%c")
.to_string()
.with(Color::AnsiValue(36)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_with_relative_date() {
let mut file_path = env::temp_dir();
file_path.push("test_with_relative_date.tmp");
let creation_date = Local::now() - chrono::Duration::days(2);
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert!(success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let mut flags = Flags::default();
flags.date = DateFlag::Relative;
assert_eq!(
"2 days ago".to_string().with(Color::AnsiValue(36)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_with_relative_date_now() {
let mut file_path = env::temp_dir();
file_path.push("test_with_relative_date_now.tmp");
let creation_date = Local::now();
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert_eq!(true, success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let mut flags = Flags::default();
flags.date = DateFlag::Relative;
assert_eq!(
"now".to_string().with(Color::AnsiValue(40)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_iso_format_now() {
let mut file_path = env::temp_dir();
file_path.push("test_iso_format_now.tmp");
let creation_date = Local::now();
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert_eq!(true, success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let mut flags = Flags::default();
flags.date = DateFlag::ISO;
assert_eq!(
creation_date
.format("%m-%d %R")
.to_string()
.with(Color::AnsiValue(40)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
#[test]
fn test_iso_format_year_old() {
let mut file_path = env::temp_dir();
file_path.push("test_iso_format_year_old.tmp");
let creation_date = Local::now() - Duration::days(400);
let success = cross_platform_touch(&file_path, &creation_date)
.unwrap()
.success();
assert_eq!(true, success, "failed to exec touch");
let colors = Colors::new(ThemeOption::Default);
let date = Date::from(&file_path.metadata().unwrap());
let mut flags = Flags::default();
flags.date = DateFlag::ISO;
assert_eq!(
creation_date
.format("%F")
.to_string()
.with(Color::AnsiValue(36)),
date.render(&colors, &flags)
);
fs::remove_file(file_path).unwrap();
}
}
| 30.080139 | 92 | 0.542569 |
624f2dc527c25196df975910e5c19b3a10f431c2
| 1,015 |
use crate::Result;
pub use self::dir::Directory;
mod dir;
/// A trait for asset sources, which provides
/// methods for loading bytes.
pub trait Source: Send + Sync + 'static {
/// This is called to check if an asset has been modified.
///
/// Returns the modification time as seconds since `UNIX_EPOCH`.
fn modified(&self, path: &str) -> Result<u64>;
/// Loads the bytes given a path.
///
/// The id should always use `/` as separator in paths.
fn load(&self, path: &str) -> Result<Vec<u8>>;
/// Returns both the result of `load` and `modified` as a tuple.
/// There's a default implementation which just calls both methods,
/// but you may be able to provide a more optimized version yourself.
fn load_with_metadata(&self, path: &str) -> Result<(Vec<u8>, u64)> {
#[cfg(feature = "profiler")]
profile_scope!("source_load_asset_with_metadata");
let m = self.modified(path)?;
let b = self.load(path)?;
Ok((b, m))
}
}
| 30.757576 | 73 | 0.626601 |
fc9a17b63598200b68cda64870c889fd2262f177
| 2,187 |
use std::fmt::Display;
use std::path::{Path, PathBuf};
use clap::Parser;
use dialoguer::{theme::ColorfulTheme, Select};
use crate::error::{Error, Result};
use crate::subcommand::Subcommand;
use crate::DEFAULT_CONFIG;
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
struct ConfigFile {
#[serde(default)]
prompt: Option<String>,
}
impl ConfigFile {
/// Parses the config file and returns the values.
pub fn parse(path: &Path) -> Result<ConfigFile> {
Ok(config::Config::builder()
.add_source(config::File::from(path))
.add_source(config::Environment::with_prefix("CHANGES").separator("_"))
.build()?
.try_deserialize()?)
}
}
pub struct Config {
pub prompt: String,
}
impl ::std::default::Default for Config {
fn default() -> Self {
Self {
prompt: "What type of change is this?".to_string(),
}
}
}
impl From<ConfigFile> for Config {
fn from(c: ConfigFile) -> Self {
Config {
prompt: c.prompt.unwrap_or(Config::default().prompt),
}
}
}
#[derive(Parser, Debug)]
#[clap(name = env!("CARGO_PKG_NAME"), about, version, author)]
pub struct Cli {
#[clap(subcommand)]
command: Subcommand,
#[clap(
name = "config",
short,
long,
value_name = "PATH",
default_value = DEFAULT_CONFIG,
)]
config_path: PathBuf,
}
impl Cli {
pub fn execute(&mut self) -> Result<()> {
let config = if self.config_path.exists() {
Config::from(ConfigFile::parse(&self.config_path)?)
} else {
// TODO: display error that config file is missing
todo!()
};
match &self.command {
Subcommand::Add(cmd) => cmd.execute(&config.prompt),
Subcommand::Generate(cmd) => cmd.execute(),
}
}
}
pub fn select_input<T: Display>(choices: &[T], prompt: String) -> Result<&T> {
let idx = Select::with_theme(&ColorfulTheme::default())
.with_prompt(prompt)
.items(choices)
.interact()?;
choices
.get(idx)
.ok_or_else(|| Error::InvalidChangeType("None".to_owned()))
}
| 24.852273 | 83 | 0.585277 |
08238595429d251773bd09282322d188a14b6304
| 33,778 |
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// All possible error types for this service.
#[non_exhaustive]
#[derive(std::fmt::Debug)]
pub enum Error {
/// <p>The operation failed because of a system problem, even though the request was valid. Retry
/// your request.</p>
InternalErrorException(crate::error::InternalErrorException),
/// <p>The parameters of the request were invalid.</p>
InvalidInputException(crate::error::InvalidInputException),
/// <p>The operation failed because there was nothing to do or the operation wasn't possible. For example, you might have
/// submitted an <code>AssociateAdminAccount</code> request for an account ID that
/// was already set as the Firewall Manager administrator. Or you might have tried to access a Region
/// that's disabled by default, and that you need to enable for the Firewall Manager
/// administrator account and for Organizations before you can access it.</p>
InvalidOperationException(crate::error::InvalidOperationException),
/// <p>The value of the <code>Type</code> parameter is invalid.</p>
InvalidTypeException(crate::error::InvalidTypeException),
/// <p>The operation exceeds a resource limit, for example, the maximum number of
/// <code>policy</code> objects that you can create for an Amazon Web Services account. For more information,
/// see <a href="https://docs.aws.amazon.com/waf/latest/developerguide/fms-limits.html">Firewall
/// Manager Limits</a> in the <i>WAF Developer Guide</i>.</p>
LimitExceededException(crate::error::LimitExceededException),
/// <p>The specified resource was not found.</p>
ResourceNotFoundException(crate::error::ResourceNotFoundException),
/// An unhandled error occurred.
Unhandled(Box<dyn std::error::Error + Send + Sync + 'static>),
}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::InternalErrorException(inner) => inner.fmt(f),
Error::InvalidInputException(inner) => inner.fmt(f),
Error::InvalidOperationException(inner) => inner.fmt(f),
Error::InvalidTypeException(inner) => inner.fmt(f),
Error::LimitExceededException(inner) => inner.fmt(f),
Error::ResourceNotFoundException(inner) => inner.fmt(f),
Error::Unhandled(inner) => inner.fmt(f),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::AssociateAdminAccountError, R>>
for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::AssociateAdminAccountError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::AssociateAdminAccountErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::AssociateAdminAccountErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::AssociateAdminAccountErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::AssociateAdminAccountErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::AssociateAdminAccountErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::AssociateAdminAccountErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeleteAppsListError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::DeleteAppsListError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::DeleteAppsListErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::DeleteAppsListErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::DeleteAppsListErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::DeleteAppsListErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeleteNotificationChannelError, R>>
for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::DeleteNotificationChannelError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::DeleteNotificationChannelErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::DeleteNotificationChannelErrorKind::InvalidOperationException(
inner,
) => Error::InvalidOperationException(inner),
crate::error::DeleteNotificationChannelErrorKind::ResourceNotFoundException(
inner,
) => Error::ResourceNotFoundException(inner),
crate::error::DeleteNotificationChannelErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeletePolicyError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::DeletePolicyError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::DeletePolicyErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::DeletePolicyErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::DeletePolicyErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::DeletePolicyErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::DeletePolicyErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::DeletePolicyErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::DeleteProtocolsListError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::DeleteProtocolsListError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::DeleteProtocolsListErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::DeleteProtocolsListErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::DeleteProtocolsListErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::DeleteProtocolsListErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::DisassociateAdminAccountError, R>>
for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::DisassociateAdminAccountError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::DisassociateAdminAccountErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::DisassociateAdminAccountErrorKind::InvalidOperationException(
inner,
) => Error::InvalidOperationException(inner),
crate::error::DisassociateAdminAccountErrorKind::ResourceNotFoundException(
inner,
) => Error::ResourceNotFoundException(inner),
crate::error::DisassociateAdminAccountErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetAdminAccountError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::GetAdminAccountError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetAdminAccountErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetAdminAccountErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::GetAdminAccountErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetAdminAccountErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetAppsListError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::GetAppsListError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetAppsListErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetAppsListErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::GetAppsListErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetAppsListErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetComplianceDetailError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::GetComplianceDetailError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetComplianceDetailErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetComplianceDetailErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::GetComplianceDetailErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::GetComplianceDetailErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetComplianceDetailErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetNotificationChannelError, R>>
for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::GetNotificationChannelError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetNotificationChannelErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetNotificationChannelErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::GetNotificationChannelErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetNotificationChannelErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetPolicyError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::GetPolicyError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetPolicyErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetPolicyErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::GetPolicyErrorKind::InvalidTypeException(inner) => {
Error::InvalidTypeException(inner)
}
crate::error::GetPolicyErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetPolicyErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetProtectionStatusError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::GetProtectionStatusError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetProtectionStatusErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetProtectionStatusErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::GetProtectionStatusErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetProtectionStatusErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetProtocolsListError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::GetProtocolsListError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetProtocolsListErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetProtocolsListErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::GetProtocolsListErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetProtocolsListErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::GetViolationDetailsError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::GetViolationDetailsError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::GetViolationDetailsErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::GetViolationDetailsErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::GetViolationDetailsErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::GetViolationDetailsErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListAppsListsError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::ListAppsListsError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::ListAppsListsErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::ListAppsListsErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::ListAppsListsErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::ListAppsListsErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::ListAppsListsErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListComplianceStatusError, R>>
for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::ListComplianceStatusError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::ListComplianceStatusErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::ListComplianceStatusErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::ListComplianceStatusErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListMemberAccountsError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::ListMemberAccountsError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::ListMemberAccountsErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::ListMemberAccountsErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::ListMemberAccountsErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListPoliciesError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::ListPoliciesError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::ListPoliciesErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::ListPoliciesErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::ListPoliciesErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::ListPoliciesErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::ListPoliciesErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListProtocolsListsError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::ListProtocolsListsError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::ListProtocolsListsErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::ListProtocolsListsErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::ListProtocolsListsErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::ListProtocolsListsErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::ListTagsForResourceErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::ListTagsForResourceErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::ListTagsForResourceErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::ListTagsForResourceErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::ListTagsForResourceErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutAppsListError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::PutAppsListError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::PutAppsListErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::PutAppsListErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::PutAppsListErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::PutAppsListErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::PutAppsListErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::PutAppsListErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutNotificationChannelError, R>>
for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::PutNotificationChannelError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::PutNotificationChannelErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::PutNotificationChannelErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::PutNotificationChannelErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::PutNotificationChannelErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutPolicyError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::PutPolicyError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::PutPolicyErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::PutPolicyErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::PutPolicyErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::PutPolicyErrorKind::InvalidTypeException(inner) => {
Error::InvalidTypeException(inner)
}
crate::error::PutPolicyErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::PutPolicyErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::PutPolicyErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::PutProtocolsListError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(
err: aws_smithy_http::result::SdkError<crate::error::PutProtocolsListError, R>,
) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::PutProtocolsListErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::PutProtocolsListErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::PutProtocolsListErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::PutProtocolsListErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::PutProtocolsListErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::PutProtocolsListErrorKind::Unhandled(inner) => {
Error::Unhandled(inner)
}
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::TagResourceError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::TagResourceError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::TagResourceErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::TagResourceErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::TagResourceErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::TagResourceErrorKind::LimitExceededException(inner) => {
Error::LimitExceededException(inner)
}
crate::error::TagResourceErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::TagResourceErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl<R> From<aws_smithy_http::result::SdkError<crate::error::UntagResourceError, R>> for Error
where
R: Send + Sync + std::fmt::Debug + 'static,
{
fn from(err: aws_smithy_http::result::SdkError<crate::error::UntagResourceError, R>) -> Self {
match err {
aws_smithy_http::result::SdkError::ServiceError { err, .. } => match err.kind {
crate::error::UntagResourceErrorKind::InternalErrorException(inner) => {
Error::InternalErrorException(inner)
}
crate::error::UntagResourceErrorKind::InvalidInputException(inner) => {
Error::InvalidInputException(inner)
}
crate::error::UntagResourceErrorKind::InvalidOperationException(inner) => {
Error::InvalidOperationException(inner)
}
crate::error::UntagResourceErrorKind::ResourceNotFoundException(inner) => {
Error::ResourceNotFoundException(inner)
}
crate::error::UntagResourceErrorKind::Unhandled(inner) => Error::Unhandled(inner),
},
_ => Error::Unhandled(err.into()),
}
}
}
impl std::error::Error for Error {}
| 46.144809 | 125 | 0.58319 |
33c5ff42a5a2f520ae3b7d077d0e0755657e6d3d
| 1,166 |
use super::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone)]
pub struct Field {
/// The field's type.
pub field_type: Identifier,
/// The name of the field.
pub name: Identifier,
/// The aritiy of the field.
pub arity: FieldArity,
/// The default value of the field.
pub default_value: Option<Expression>,
/// The directives of this field.
pub directives: Vec<Directive>,
/// The comments for this field.
pub documentation: Option<Comment>,
/// The location of this field in the text representation.
pub span: Span,
}
impl WithIdentifier for Field {
fn identifier(&self) -> &Identifier {
&self.name
}
}
impl WithSpan for Field {
fn span(&self) -> &Span {
&self.span
}
}
impl WithDirectives for Field {
fn directives(&self) -> &Vec<Directive> {
&self.directives
}
}
impl WithDocumentation for Field {
fn documentation(&self) -> &Option<Comment> {
&self.documentation
}
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum FieldArity {
Required,
Optional,
List,
}
| 22 | 62 | 0.635506 |
50cd815aa0a324ba4237bbe079d76431ac889b68
| 355 |
pub struct ClubhouseGetMilestoneMilestonePublicId {
pub(crate) path: burgundy::Path,
}
impl ClubhouseGetMilestoneMilestonePublicId {
/// See https://clubhouse.io/api/rest/v2/#Get-Milestone
pub fn run(self) -> crate::Result<crate::types::Milestone> {
self.path
.execute_as_json::<(), crate::types::Milestone>(None)
}
}
| 29.583333 | 65 | 0.678873 |
f7be2225983f1c31d86bb487a2060bbb8ac67350
| 4,572 |
use std::fs::File;
use std::io;
use std::io::Read;
use std::mem;
use std::os::unix::io::AsRawFd;
use crate::ctx::Ctx;
use calloop::LoopHandle;
#[derive(Debug)]
#[repr(C)]
pub struct InputEvent {
tv_sec: isize, // from timeval struct
tv_usec: isize, // from timeval struct
pub type_: u16,
pub code: u16,
pub value: i32,
}
const MAX_KEYS: u16 = 112;
const UK: &str = "<UK>";
const KEY_NAMES: [&'static str; MAX_KEYS as usize] = [
UK,
"<ESC>",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"0",
"-",
"=",
"<Backspace>",
"<Tab>",
"q",
"w",
"e",
"r",
"t",
"y",
"u",
"i",
"o",
"p",
"[",
"]",
"<Enter>",
"<LCtrl>",
"a",
"s",
"d",
"f",
"g",
"h",
"j",
"k",
"l",
";",
"'",
"`",
"<LShift>",
"\\",
"z",
"x",
"c",
"v",
"b",
"n",
"m",
",",
".",
"/",
"<RShift>",
"<KP*>",
"<LAlt>",
" ",
"<CapsLock>",
"<F1>",
"<F2>",
"<F3>",
"<F4>",
"<F5>",
"<F6>",
"<F7>",
"<F8>",
"<F9>",
"<F10>",
"<NumLock>",
"<ScrollLock>",
"<KP7>",
"<KP8>",
"<KP9>",
"<KP->",
"<KP4>",
"<KP5>",
"<KP6>",
"<KP+>",
"<KP1>",
"<KP2>",
"<KP3>",
"<KP0>",
"<KP.>",
UK,
UK,
UK,
"<F11>",
"<F12>",
UK,
UK,
UK,
UK,
UK,
UK,
UK,
"<KPEnter>",
"<RCtrl>",
"<KP/>",
"<SysRq>",
"<RAlt>",
UK,
"<Home>",
"<Up>",
"<PageUp>",
"<Left>",
"<Right>",
"<End>",
"<Down>",
"<PageDown>",
"<Insert>",
"<Delete>",
];
const SHIFT_KEY_NAMES: [&'static str; MAX_KEYS as usize] = [
UK,
"<ESC>",
"!",
"@",
"#",
"$",
"%",
"^",
"&",
"*",
"(",
")",
"_",
"+",
"<Backspace>",
"<Tab>",
"Q",
"W",
"E",
"R",
"T",
"Y",
"U",
"I",
"O",
"P",
"{",
"}",
"<Enter>",
"<LCtrl>",
"A",
"S",
"D",
"F",
"G",
"H",
"J",
"K",
"L",
":",
"\"",
"~",
"<LShift>",
"|",
"Z",
"X",
"C",
"V",
"B",
"N",
"M",
"<",
">",
"?",
"<RShift>",
"<KP*>",
"<LAlt>",
" ",
"<CapsLock>",
"<F1>",
"<F2>",
"<F3>",
"<F4>",
"<F5>",
"<F6>",
"<F7>",
"<F8>",
"<F9>",
"<F10>",
"<NumLock>",
"<ScrollLock>",
"<KP7>",
"<KP8>",
"<KP9>",
"<KP->",
"<KP4>",
"<KP5>",
"<KP6>",
"<KP+>",
"<KP1>",
"<KP2>",
"<KP3>",
"<KP0>",
"<KP.>",
UK,
UK,
UK,
"<F11>",
"<F12>",
UK,
UK,
UK,
UK,
UK,
UK,
UK,
"<KPEnter>",
"<RCtrl>",
"<KP/>",
"<SysRq>",
"<RAlt>",
UK,
"<Home>",
"<Up>",
"<PageUp>",
"<Left>",
"<Right>",
"<End>",
"<Down>",
"<PageDown>",
"<Insert>",
"<Delete>",
];
pub fn get_key_text(code: u16, shift_pressed: u8) -> &'static str {
let arr = if shift_pressed != 0 {
SHIFT_KEY_NAMES
} else {
KEY_NAMES
};
if code < MAX_KEYS {
return arr[code as usize];
} else {
//println!("Unknown key: {}", code);
return UK;
}
}
const EV_KEY: u16 = 1;
pub fn is_key_event(type_: u16) -> bool {
type_ == EV_KEY
}
const KEY_PRESS: i32 = 1;
pub fn is_key_press(value: i32) -> bool {
value == KEY_PRESS
}
pub fn init(loop_handle: LoopHandle<Ctx>) -> std::sync::mpsc::Receiver<InputEvent> {
println!("╠══ kbd init");
let (tx, rx) = std::sync::mpsc::channel::<InputEvent>();
let devnode = "/dev/input/by-path/platform-i8042-serio-0-event-kbd";
let mut file = File::open(devnode).expect("Couldn't Open Keyboard");
let mut buf: [u8; mem::size_of::<InputEvent>()] = unsafe { mem::zeroed() };
let mut buf_reader = io::BufReader::new(file);
std::thread::spawn(move || loop {
let count = buf_reader.read_exact(&mut buf).expect("Read Failed");
let event: InputEvent = unsafe { mem::transmute(buf) };
let _ = tx.send(event);
});
/* loop_handle.insert_source(
calloop::generic::Generic::from_fd(
file.as_raw_fd(),
calloop::Interest::Readable,
calloop::Mode::Level,
),
{
move |_, _, ctx: &mut Ctx<W>| {
println!("called");
Ok(())
}
},
);*/
return rx;
}
| 14.514286 | 84 | 0.369641 |
feffe40b633eb28a7a059089b31eb33fb39722e5
| 1,858 |
//! modifies build to dynamically link in a) indy-sdk
use std::env;
use std::path::Path;
fn main() {
let libindy_lib_path = match env::var("LIBINDY_DIR"){
Ok(val) => val,
Err(..) => panic!("Missing required environment variable LIBINDY_DIR")
};
println!("cargo:rustc-link-search=native={}",libindy_lib_path);
if let Ok(_mode) = env::var("LIBINDY_STATIC") {
println!("cargo:rustc-link-lib=static=indy");
} else {
println!("cargo:rustc-link-lib=dylib=indy");
}
let target = env::var("TARGET").unwrap();
println!("target={}", target);
if target.contains("linux-android") {
let openssl = match env::var("OPENSSL_LIB_DIR") {
Ok(val) => val,
Err(..) => match env::var("OPENSSL_DIR") {
Ok(dir) => Path::new(&dir[..]).join("/lib").to_string_lossy().into_owned(),
Err(..) => panic!("Missing required environment variables OPENSSL_DIR or OPENSSL_LIB_DIR")
}
};
let sodium = match env::var("SODIUM_LIB_DIR") {
Ok(val) => val,
Err(..) => panic!("Missing required environment variable SODIUM_LIB_DIR")
};
println!("cargo:rustc-link-search=native={}", openssl);
println!("cargo:rustc-link-lib=dylib=crypto");
println!("cargo:rustc-link-lib=dylib=ssl");
println!("cargo:rustc-link-search=native={}", sodium);
println!("cargo:rustc-link-lib=static=sodium");
}else if target.find("-windows-").is_some() {
println!("cargo:rustc-link-lib=dylib=ssleay32");
println!("cargo:rustc-link-lib=dylib=zmq");
println!("cargo:rustc-link-lib=dylib=sodium");
let prebuilt_dir = env::var("INDY_PREBUILT_DEPS_DIR").unwrap();
println!("cargo:rustc-flags=-L {}\\lib", prebuilt_dir);
return;
}
}
| 35.056604 | 106 | 0.587191 |
e2230aaacb41992e64f4db71e4d86bffb75ab14d
| 4,212 |
use actix_web::{App, HttpResponse, HttpServer, post, get, middleware, web::{self, Data, ServiceConfig}};
use crate::{config::{app_config::AppConfig, database_config::DBConn}, extensions::json::Jsonify, health_check::{domain::HealthCheck, use_cases::HealthcheckUseCases}, oauth::{domain::{NewClientRequest, NewClientResponseError}, use_cases::register_client}};
#[post("/clients")]
async fn create_client(req_body: web::Json<NewClientRequest>, data: Data<AppState>) -> HttpResponse {
match register_client(&data.db_conn, req_body.into_inner()).await {
Ok(result) =>
HttpResponse::Accepted()
.content_type("application/json")
.body(result.to_json().to_string()),
Err(err) => match err {
NewClientResponseError::Unknown(reason) => HttpResponse::InternalServerError().body(reason)
}
}
}
#[get("/health")]
async fn get_health_status(data: Data<AppState>) -> HttpResponse {
let health_check = HealthCheck::is_ready(&data.db_conn).await;
if health_check.database_ready {
return HttpResponse::Ok()
.content_type("application/json")
.body(health_check.to_json().to_string());
} else {
return HttpResponse::ServiceUnavailable()
.content_type("application/json")
.body(health_check.to_json().to_string());
}
}
fn get_service_config() -> Box<dyn Fn(&mut ServiceConfig)> {
return Box::new(move |cfg: &mut ServiceConfig| {
cfg
.service(get_health_status)
.service(
web::scope("/oauth")
.service(create_client)
);
});
}
struct AppState { db_conn: DBConn }
pub async fn run_service(app_config: AppConfig) -> std::io::Result<()> {
let app_data = web::Data::new(AppState {
db_conn: app_config.db_conn
});
HttpServer::new(move || {
App::new()
.wrap(middleware::Compress::default())
.app_data(app_data.clone())
.configure(get_service_config())
})
.bind(format!("127.0.0.1:{}", app_config.port))?
.run()
.await
}
#[cfg(test)]
mod tests {
use super::*;
use actix_web::{test, App};
use test::{init_service, call_service, TestRequest, read_body_json};
use crate::config::{app_config::get_app_config};
#[actix_rt::test]
async fn when_user_requests_for_healthcheck_and_db_is_up_it_should_return_healthy() {
let app_state = web::Data::new(AppState {
db_conn: get_app_config().await.unwrap().db_conn
});
let mut app = init_service(App::new().app_data(app_state).configure(get_service_config())).await;
let request = TestRequest::get().uri("/health").to_request();
let response = call_service(&mut app, request).await;
assert!(response.status().is_success());
let body: HealthCheck = read_body_json(response).await;
assert_eq!(body, HealthCheck { database_ready: true })
}
// #[actix_rt::test]
// async fn when_user_creates_a_new_client_and_db_is_up_it_should_return_client_id() {
// let app_state = web::Data::new(AppState {
// db_conn: get_app_config().await.unwrap().db_conn
// });
// let mut app = init_service(App::new().app_data(app_state).configure(get_service_config())).await;
// let payload = json!({
// "application_type": "web",
// "redirect_uris": vec![ "https://client.example.org/callback", "https://client.example.org/callback2" ],
// "client_name": "My Cool App",
// "logo_uri": "https://client.example.org/logo.png",
// "token_endpoint_auth_method": "client_secret_basic",
// "contacts": vec![ "[email protected]" ]
// });
// let request = TestRequest::post().uri("/oauth/clients").set_json(&payload).to_request();
// let response = call_service(&mut app, request).await;
// assert!(response.status().is_success());
// let body = read_body(response).await;
// assert_eq!(body, "{\"database_ready\":true}")
// }
}
| 39.735849 | 255 | 0.609687 |
297a0e0dd39ab07cbc909c1e64d035121ebb56c4
| 6,317 |
use super::*;
use super::ecc::{
MontgomeryPoint,
EdwardsPoint
};
use super::boolean::Boolean;
use ::jubjub::*;
use bellman::{
ConstraintSystem
};
use super::lookup::*;
pub use pedersen_hash::Personalization;
impl Personalization {
fn get_constant_bools(&self) -> Vec<Boolean> {
self.get_bits()
.into_iter()
.map(|e| Boolean::constant(e))
.collect()
}
}
pub fn pedersen_hash<E: JubjubEngine, CS>(
mut cs: CS,
personalization: Personalization,
bits: &[Boolean],
params: &E::Params
) -> Result<EdwardsPoint<E>, SynthesisError>
where CS: ConstraintSystem<E>
{
let personalization = personalization.get_constant_bools();
assert_eq!(personalization.len(), 6);
let mut edwards_result = None;
let mut bits = personalization.iter().chain(bits.iter());
let mut segment_generators = params.pedersen_circuit_generators().iter();
let boolean_false = Boolean::constant(false);
let mut segment_i = 0;
loop {
let mut segment_result = None;
let mut segment_windows = &segment_generators.next()
.expect("enough segments")[..];
let mut window_i = 0;
while let Some(a) = bits.next() {
let b = bits.next().unwrap_or(&boolean_false);
let c = bits.next().unwrap_or(&boolean_false);
let tmp = lookup3_xy_with_conditional_negation(
cs.namespace(|| format!("segment {}, window {}", segment_i, window_i)),
&[a.clone(), b.clone(), c.clone()],
&segment_windows[0]
)?;
let tmp = MontgomeryPoint::interpret_unchecked(tmp.0, tmp.1);
match segment_result {
None => {
segment_result = Some(tmp);
},
Some(ref mut segment_result) => {
*segment_result = tmp.add(
cs.namespace(|| format!("addition of segment {}, window {}", segment_i, window_i)),
segment_result,
params
)?;
}
}
segment_windows = &segment_windows[1..];
if segment_windows.len() == 0 {
break;
}
window_i += 1;
}
match segment_result {
Some(segment_result) => {
// Convert this segment into twisted Edwards form.
let segment_result = segment_result.into_edwards(
cs.namespace(|| format!("conversion of segment {} into edwards", segment_i)),
params
)?;
match edwards_result {
Some(ref mut edwards_result) => {
*edwards_result = segment_result.add(
cs.namespace(|| format!("addition of segment {} to accumulator", segment_i)),
edwards_result,
params
)?;
},
None => {
edwards_result = Some(segment_result);
}
}
},
None => {
// We didn't process any new bits.
break;
}
}
segment_i += 1;
}
Ok(edwards_result.unwrap())
}
#[cfg(test)]
mod test {
use rand::{SeedableRng, Rng, XorShiftRng};
use super::*;
use ::circuit::test::*;
use ::circuit::boolean::{Boolean, AllocatedBit};
use ff::PrimeField;
use pairing::bls12_381::{Bls12, Fr};
#[test]
fn test_pedersen_hash_constraints() {
let mut rng = XorShiftRng::from_seed([0x3dbe6259, 0x8d313d76, 0x3237db17, 0xe5bc0654]);
let params = &JubjubBls12::new();
let mut cs = TestConstraintSystem::<Bls12>::new();
let input: Vec<bool> = (0..(Fr::NUM_BITS * 2)).map(|_| rng.gen()).collect();
let input_bools: Vec<Boolean> = input.iter().enumerate().map(|(i, b)| {
Boolean::from(
AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)).unwrap()
)
}).collect();
pedersen_hash(
cs.namespace(|| "pedersen hash"),
Personalization::NoteCommitment,
&input_bools,
params
).unwrap();
assert!(cs.is_satisfied());
assert_eq!(cs.num_constraints(), 1377);
}
#[test]
fn test_pedersen_hash() {
let mut rng = XorShiftRng::from_seed([0x3dbe6259, 0x8d313d76, 0x3237db17, 0xe5bc0654]);
let params = &JubjubBls12::new();
for length in 0..751 {
for _ in 0..5 {
let mut input: Vec<bool> = (0..length).map(|_| rng.gen()).collect();
let mut cs = TestConstraintSystem::<Bls12>::new();
let input_bools: Vec<Boolean> = input.iter().enumerate().map(|(i, b)| {
Boolean::from(
AllocatedBit::alloc(cs.namespace(|| format!("input {}", i)), Some(*b)).unwrap()
)
}).collect();
let res = pedersen_hash(
cs.namespace(|| "pedersen hash"),
Personalization::MerkleTree(1),
&input_bools,
params
).unwrap();
assert!(cs.is_satisfied());
let expected = ::pedersen_hash::pedersen_hash::<Bls12, _>(
Personalization::MerkleTree(1),
input.clone().into_iter(),
params
).into_xy();
assert_eq!(res.get_x().get_value().unwrap(), expected.0);
assert_eq!(res.get_y().get_value().unwrap(), expected.1);
// Test against the output of a different personalization
let unexpected = ::pedersen_hash::pedersen_hash::<Bls12, _>(
Personalization::MerkleTree(0),
input.into_iter(),
params
).into_xy();
assert!(res.get_x().get_value().unwrap() != unexpected.0);
assert!(res.get_y().get_value().unwrap() != unexpected.1);
}
}
}
}
| 32.394872 | 107 | 0.498654 |
645912e9aa41dbacccf223ecab618846c786dd59
| 701 |
//! In CHIP-8, instruction set has opcodes that allow the program to jump to a certain address or call a subroutine.
//! The stack is used to remember the current location before a jump is performed.
//! So anytime you perform a jump or call a subroutine, store the program counter (PC) in the stack before proceeding.
//! The system has 16 levels of stack and in order to remember which level of the stack is used,
//! you need to implement a stack pointer (sp).
#[derive(Debug)]
pub struct Stack {
pub cells: Vec<u16>,
}
impl Stack {
/// `Stack::new()` will return new Stack with 16 cellls, all initialized to zero.
pub fn new() -> Stack {
Stack { cells: vec![0; 16] }
}
}
| 38.944444 | 118 | 0.693295 |
bfc011203e8c4885f346dbb75eb0f9e5f4f36c25
| 1,818 |
use bitset_fixed::BitSet;
use ddo::{Problem, Relaxation, Decision};
use crate::model::{Minla, State};
use std::cmp::Reverse;
#[derive(Debug, Clone)]
pub struct MinlaRelax<'a> {
pb: &'a Minla
}
impl <'a> MinlaRelax<'a> {
pub fn new(pb : &'a Minla) -> MinlaRelax<'a> {
MinlaRelax{pb}
}
fn edge_lb(&self, n : isize, m : isize) -> isize {
let mut edge_lb = 0;
let mut edges = m;
for k in 1..n {
if edges <= 0 {
break;
} else {
edge_lb += edges;
edges -= n - k;
}
}
edge_lb
}
fn cut_lb(&self, state : &State) -> isize {
let mut cuts = state.cut.clone();
// sort decreasingly
cuts.sort_by_key(|&b| Reverse(b));
// cut weights in optimistic order
let mut cut_lb = 0;
for (dist, cut) in cuts.into_iter().enumerate() {
if cut == 0 {
break;
}
cut_lb += dist as isize * cut;
}
cut_lb
}
fn ub(&self, vertices : &BitSet, state : &State) -> isize {
let n = vertices.count_ones() as isize;
if n == 0 {
0
} else {
- self.cut_lb(state) - self.edge_lb(n, state.m)
}
}
}
impl <'a> Relaxation<State> for MinlaRelax<'a> {
fn merge_states(&self, _states: &mut dyn Iterator<Item=&State>) -> State {
State {
free: BitSet::new(0),
cut: vec![0; self.pb.nb_vars()],
m: 0
}
}
fn relax_edge(&self, _src: &State, dst: &State, _relaxed: &State, _decision: Decision, cost: isize) -> isize {
cost + self.ub(&dst.free, &dst)
}
fn estimate(&self, state: &State) -> isize {
self.ub(&state.free, &state)
}
}
| 23.012658 | 114 | 0.484048 |
1cd5591c44145770a0b40c8d7d126ac70803e103
| 6,278 |
#![allow(
unused_parens,
clippy::excessive_precision,
clippy::missing_safety_doc,
clippy::not_unsafe_ptr_arg_deref,
clippy::should_implement_trait,
clippy::too_many_arguments,
clippy::unused_unit,
)]
//! # Phase Unwrapping API
//!
//! Two-dimensional phase unwrapping is found in different applications like terrain elevation estimation
//! in synthetic aperture radar (SAR), field mapping in magnetic resonance imaging or as a way of finding
//! corresponding pixels in structured light reconstruction with sinusoidal patterns.
//!
//! Given a phase map, wrapped between [-pi; pi], phase unwrapping aims at finding the "true" phase map
//! by adding the right number of 2*pi to each pixel.
//!
//! The problem is straightforward for perfect wrapped phase map, but real data are usually not noise-free.
//! Among the different algorithms that were developed, quality-guided phase unwrapping methods are fast
//! and efficient. They follow a path that unwraps high quality pixels first,
//! avoiding error propagation from the start.
//!
//! In this module, a quality-guided phase unwrapping is implemented following the approach described in [histogramUnwrapping](https://docs.opencv.org/4.5.3/d0/de3/citelist.html#CITEREF_histogramUnwrapping) .
use crate::{mod_prelude::*, core, sys, types};
pub mod prelude {
pub use { super::PhaseUnwrappingConst, super::PhaseUnwrapping, super::HistogramPhaseUnwrappingConst, super::HistogramPhaseUnwrapping };
}
/// Class implementing two-dimensional phase unwrapping based on [histogramUnwrapping](https://docs.opencv.org/4.5.3/d0/de3/citelist.html#CITEREF_histogramUnwrapping)
/// This algorithm belongs to the quality-guided phase unwrapping methods.
/// First, it computes a reliability map from second differences between a pixel and its eight neighbours.
/// Reliability values lie between 0 and 16*pi*pi. Then, this reliability map is used to compute
/// the reliabilities of "edges". An edge is an entity defined by two pixels that are connected
/// horizontally or vertically. Its reliability is found by adding the the reliabilities of the
/// two pixels connected through it. Edges are sorted in a histogram based on their reliability values.
/// This histogram is then used to unwrap pixels, starting from the highest quality pixel.
///
/// The wrapped phase map and the unwrapped result are stored in CV_32FC1 Mat.
pub trait HistogramPhaseUnwrappingConst: crate::phase_unwrapping::PhaseUnwrappingConst {
fn as_raw_HistogramPhaseUnwrapping(&self) -> *const c_void;
}
pub trait HistogramPhaseUnwrapping: crate::phase_unwrapping::HistogramPhaseUnwrappingConst + crate::phase_unwrapping::PhaseUnwrapping {
fn as_raw_mut_HistogramPhaseUnwrapping(&mut self) -> *mut c_void;
/// Get the reliability map computed from the wrapped phase map.
///
/// ## Parameters
/// * reliabilityMap: Image where the reliability map is stored.
fn get_inverse_reliability_map(&mut self, reliability_map: &mut dyn core::ToOutputArray) -> Result<()> {
output_array_arg!(reliability_map);
unsafe { sys::cv_phase_unwrapping_HistogramPhaseUnwrapping_getInverseReliabilityMap_const__OutputArrayR(self.as_raw_mut_HistogramPhaseUnwrapping(), reliability_map.as_raw__OutputArray()) }.into_result()
}
}
impl dyn HistogramPhaseUnwrapping + '_ {
/// Constructor
///
/// ## Parameters
/// * parameters: HistogramPhaseUnwrapping parameters HistogramPhaseUnwrapping::Params: width,height of the phase map and histogram characteristics.
///
/// ## C++ default parameters
/// * parameters: HistogramPhaseUnwrapping::Params()
pub fn create(parameters: crate::phase_unwrapping::HistogramPhaseUnwrapping_Params) -> Result<core::Ptr<dyn crate::phase_unwrapping::HistogramPhaseUnwrapping>> {
unsafe { sys::cv_phase_unwrapping_HistogramPhaseUnwrapping_create_const_ParamsR(¶meters) }.into_result().map(|r| unsafe { core::Ptr::<dyn crate::phase_unwrapping::HistogramPhaseUnwrapping>::opencv_from_extern(r) } )
}
}
/// Parameters of phaseUnwrapping constructor.
///
/// ## Parameters
/// * width: Phase map width.
/// * height: Phase map height.
/// * histThresh: Bins in the histogram are not of equal size. Default value is 3*pi*pi. The one before "histThresh" value are smaller.
/// * nbrOfSmallBins: Number of bins between 0 and "histThresh". Default value is 10.
/// * nbrOfLargeBins: Number of bins between "histThresh" and 32*pi*pi (highest edge reliability value). Default value is 5.
#[repr(C)]
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct HistogramPhaseUnwrapping_Params {
pub width: i32,
pub height: i32,
pub hist_thresh: f32,
pub nbr_of_small_bins: i32,
pub nbr_of_large_bins: i32,
}
opencv_type_simple! { crate::phase_unwrapping::HistogramPhaseUnwrapping_Params }
impl HistogramPhaseUnwrapping_Params {
pub fn default() -> Result<crate::phase_unwrapping::HistogramPhaseUnwrapping_Params> {
unsafe { sys::cv_phase_unwrapping_HistogramPhaseUnwrapping_Params_Params() }.into_result()
}
}
/// Abstract base class for phase unwrapping.
pub trait PhaseUnwrappingConst: core::AlgorithmTraitConst {
fn as_raw_PhaseUnwrapping(&self) -> *const c_void;
}
pub trait PhaseUnwrapping: core::AlgorithmTrait + crate::phase_unwrapping::PhaseUnwrappingConst {
fn as_raw_mut_PhaseUnwrapping(&mut self) -> *mut c_void;
/// Unwraps a 2D phase map.
///
/// ## Parameters
/// * wrappedPhaseMap: The wrapped phase map of type CV_32FC1 that needs to be unwrapped.
/// * unwrappedPhaseMap: The unwrapped phase map.
/// * shadowMask: Optional CV_8UC1 mask image used when some pixels do not hold any phase information in the wrapped phase map.
///
/// ## C++ default parameters
/// * shadow_mask: noArray()
fn unwrap_phase_map(&mut self, wrapped_phase_map: &dyn core::ToInputArray, unwrapped_phase_map: &mut dyn core::ToOutputArray, shadow_mask: &dyn core::ToInputArray) -> Result<()> {
input_array_arg!(wrapped_phase_map);
output_array_arg!(unwrapped_phase_map);
input_array_arg!(shadow_mask);
unsafe { sys::cv_phase_unwrapping_PhaseUnwrapping_unwrapPhaseMap_const__InputArrayR_const__OutputArrayR_const__InputArrayR(self.as_raw_mut_PhaseUnwrapping(), wrapped_phase_map.as_raw__InputArray(), unwrapped_phase_map.as_raw__OutputArray(), shadow_mask.as_raw__InputArray()) }.into_result()
}
}
| 50.224 | 292 | 0.776043 |
d7e243c11fec7b998aa5e04b84ba53f46e35ee48
| 3,156 |
#[doc = "Register `DMMU_TABLE10` reader"]
pub struct R(crate::R<DMMU_TABLE10_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<DMMU_TABLE10_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<DMMU_TABLE10_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<DMMU_TABLE10_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `DMMU_TABLE10` writer"]
pub struct W(crate::W<DMMU_TABLE10_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<DMMU_TABLE10_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<DMMU_TABLE10_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<DMMU_TABLE10_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `DMMU_TABLE10` reader - "]
pub struct DMMU_TABLE10_R(crate::FieldReader<u8, u8>);
impl DMMU_TABLE10_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
DMMU_TABLE10_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for DMMU_TABLE10_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `DMMU_TABLE10` writer - "]
pub struct DMMU_TABLE10_W<'a> {
w: &'a mut W,
}
impl<'a> DMMU_TABLE10_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x7f) | (value as u32 & 0x7f);
self.w
}
}
impl R {
#[doc = "Bits 0:6"]
#[inline(always)]
pub fn dmmu_table10(&self) -> DMMU_TABLE10_R {
DMMU_TABLE10_R::new((self.bits & 0x7f) as u8)
}
}
impl W {
#[doc = "Bits 0:6"]
#[inline(always)]
pub fn dmmu_table10(&mut self) -> DMMU_TABLE10_W {
DMMU_TABLE10_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dmmu_table10](index.html) module"]
pub struct DMMU_TABLE10_SPEC;
impl crate::RegisterSpec for DMMU_TABLE10_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [dmmu_table10::R](R) reader structure"]
impl crate::Readable for DMMU_TABLE10_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [dmmu_table10::W](W) writer structure"]
impl crate::Writable for DMMU_TABLE10_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets DMMU_TABLE10 to value 0x0a"]
impl crate::Resettable for DMMU_TABLE10_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0x0a
}
}
| 30.346154 | 393 | 0.616603 |
f8e131ac902edfdb0fe82b297cad532d218f1c7f
| 16,340 |
use std::net::SocketAddrV6;
use std::ops::{Deref, DerefMut};
use std::sync::Arc;
use std::{io, str};
use blake2::{
digest::{Input, VariableOutput},
Blake2b,
};
use bytes::{Buf, BufMut, BytesMut};
use ring::aead;
use ring::digest;
use ring::hkdf;
use ring::hmac::SigningKey;
use rustls::quic::{ClientQuicExt, ServerQuicExt};
pub use rustls::{Certificate, NoClientAuth, PrivateKey, TLSError};
pub use rustls::{ClientConfig, ClientSession, ServerConfig, ServerSession, Session};
use rustls::{KeyLogFile, ProtocolVersion};
use webpki::DNSNameRef;
use webpki_roots;
use endpoint::EndpointError;
use packet::{ConnectionId, AEAD_TAG_SIZE};
use transport_parameters::TransportParameters;
use {Side, RESET_TOKEN_SIZE};
pub enum TlsSession {
Client(ClientSession),
Server(ServerSession),
}
impl TlsSession {
pub fn new_client(
config: &Arc<ClientConfig>,
hostname: &str,
params: &TransportParameters,
) -> Result<TlsSession, EndpointError> {
let pki_server_name = DNSNameRef::try_from_ascii_str(hostname)
.map_err(|_| EndpointError::InvalidDnsName(hostname.into()))?;
Ok(TlsSession::Client(ClientSession::new_quic(
&config,
pki_server_name,
to_vec(Side::Client, params),
)))
}
pub fn new_server(config: &Arc<ServerConfig>, params: &TransportParameters) -> TlsSession {
TlsSession::Server(ServerSession::new_quic(
config,
to_vec(Side::Server, params),
))
}
pub fn get_sni_hostname(&self) -> Option<&str> {
match *self {
TlsSession::Client(_) => None,
TlsSession::Server(ref session) => session.get_sni_hostname(),
}
}
}
impl Deref for TlsSession {
type Target = dyn Session;
fn deref(&self) -> &Self::Target {
match *self {
TlsSession::Client(ref session) => session,
TlsSession::Server(ref session) => session,
}
}
}
impl DerefMut for TlsSession {
fn deref_mut(&mut self) -> &mut (dyn Session + 'static) {
match *self {
TlsSession::Client(ref mut session) => session,
TlsSession::Server(ref mut session) => session,
}
}
}
pub fn build_client_config() -> ClientConfig {
let mut config = ClientConfig::new();
config
.root_store
.add_server_trust_anchors(&webpki_roots::TLS_SERVER_ROOTS);
config.versions = vec![ProtocolVersion::TLSv1_3];
config.alpn_protocols = vec![ALPN_PROTOCOL.into()];
config.key_log = Arc::new(KeyLogFile::new());
config
}
pub fn build_server_config() -> ServerConfig {
let mut config = ServerConfig::new(NoClientAuth::new());
config.set_protocols(&[ALPN_PROTOCOL.into()]);
config.key_log = Arc::new(KeyLogFile::new());
config
}
fn to_vec(side: Side, params: &TransportParameters) -> Vec<u8> {
let mut bytes = Vec::new();
params.write(side, &mut bytes);
bytes
}
/// Value used in ACKs we transmit
pub const ACK_DELAY_EXPONENT: u8 = 3;
/// Magic value used to indicate 0-RTT support in NewSessionTicket
//pub const TLS_MAX_EARLY_DATA: u32 = 0xffff_ffff;
pub fn reset_token_for(key: &[u8], id: &ConnectionId) -> [u8; RESET_TOKEN_SIZE] {
let mut mac = Blake2b::new_keyed(key, RESET_TOKEN_SIZE);
mac.process(id);
// TODO: Server ID??
let mut result = [0; RESET_TOKEN_SIZE];
mac.variable_result(&mut result).unwrap();
result
}
#[derive(Clone)]
pub enum Crypto {
// ZeroRtt(ZeroRttCrypto),
Handshake(CryptoContext),
OneRtt(CryptoContext),
}
impl Crypto {
/*
pub fn new_0rtt(tls: &TlsSide) -> Self {
let suite = tls.get_negotiated_ciphersuite().unwrap();
let tls_cipher = tls.current_cipher().unwrap();
let digest = tls_cipher.handshake_digest().unwrap();
let cipher = Cipher::from_nid(tls_cipher.cipher_nid().unwrap()).unwrap();
const LABEL: &str = "EXPORTER-QUIC 0rtt";
let mut secret = vec![0; digest.size()];
tls.export_keying_material_early(&mut secret, &LABEL, b"")
.unwrap();
Crypto::ZeroRtt(ZeroRttCrypto {
state: CryptoState::new(digest, cipher, secret.into()),
cipher,
})
}
*/
pub fn new_handshake(id: &ConnectionId, side: Side) -> Self {
let (digest, cipher) = (&digest::SHA256, &aead::AES_128_GCM);
let (local_label, remote_label) = if side == Side::Client {
(b"client hs", b"server hs")
} else {
(b"server hs", b"client hs")
};
let hs_secret = handshake_secret(id);
let local = CryptoState::new(
digest,
cipher,
expanded_handshake_secret(&hs_secret, local_label),
);
let remote = CryptoState::new(
digest,
cipher,
expanded_handshake_secret(&hs_secret, remote_label),
);
Crypto::Handshake(CryptoContext {
local,
remote,
digest,
cipher,
})
}
pub fn new_1rtt(tls: &TlsSession, side: Side) -> Self {
let suite = tls.get_negotiated_ciphersuite().unwrap();
let (cipher, digest) = (suite.get_aead_alg(), suite.get_hash());
const SERVER_LABEL: &[u8] = b"EXPORTER-QUIC server 1rtt";
const CLIENT_LABEL: &[u8] = b"EXPORTER-QUIC client 1rtt";
let (local_label, remote_label) = if side == Side::Client {
(CLIENT_LABEL, SERVER_LABEL)
} else {
(SERVER_LABEL, CLIENT_LABEL)
};
let mut local_secret = vec![0; digest.output_len];
tls.export_keying_material(&mut local_secret, local_label, None)
.unwrap();
let local = CryptoState::new(digest, cipher, local_secret);
let mut remote_secret = vec![0; digest.output_len];
tls.export_keying_material(&mut remote_secret, remote_label, None)
.unwrap();
let remote = CryptoState::new(digest, cipher, remote_secret);
Crypto::OneRtt(CryptoContext {
local,
remote,
digest,
cipher,
})
}
/*
pub fn is_0rtt(&self) -> bool {
match *self {
Crypto::ZeroRtt(_) => true,
_ => false,
}
}
*/
pub fn is_handshake(&self) -> bool {
match *self {
Crypto::Handshake(_) => true,
_ => false,
}
}
pub fn is_1rtt(&self) -> bool {
match *self {
Crypto::OneRtt(_) => true,
_ => false,
}
}
pub fn write_nonce(&self, state: &CryptoState, number: u64, out: &mut [u8]) {
let out = {
let mut write = io::Cursor::new(out);
write.put_u32_be(0);
write.put_u64_be(number);
debug_assert_eq!(write.remaining(), 0);
write.into_inner()
};
debug_assert_eq!(out.len(), state.iv.len());
for (out, inp) in out.iter_mut().zip(state.iv.iter()) {
*out ^= inp;
}
}
pub fn encrypt(&self, packet: u64, buf: &mut Vec<u8>, header_len: usize) {
// FIXME: retain crypter
let (cipher, state) = match *self {
//Crypto::ZeroRtt(ref crypto) => (crypto.cipher, &crypto.state),
Crypto::Handshake(ref crypto) | Crypto::OneRtt(ref crypto) => {
(crypto.cipher, &crypto.local)
}
};
let mut nonce_buf = [0u8; aead::MAX_TAG_LEN];
let nonce = &mut nonce_buf[..cipher.nonce_len()];
self.write_nonce(&state, packet, nonce);
let tag = vec![0; cipher.tag_len()];
buf.extend(tag);
let key = aead::SealingKey::new(cipher, &state.key).unwrap();
let (header, payload) = buf.split_at_mut(header_len);
aead::seal_in_place(&key, &*nonce, header, payload, cipher.tag_len()).unwrap();
}
pub fn decrypt(&self, packet: u64, header: &[u8], payload: &mut BytesMut) -> Result<(), ()> {
if payload.len() < AEAD_TAG_SIZE {
return Err(());
}
let (cipher, state) = match *self {
//Crypto::ZeroRtt(ref crypto) => (crypto.cipher, &crypto.state),
Crypto::Handshake(ref crypto) | Crypto::OneRtt(ref crypto) => {
(crypto.cipher, &crypto.remote)
}
};
let mut nonce_buf = [0u8; aead::MAX_TAG_LEN];
let nonce = &mut nonce_buf[..cipher.nonce_len()];
self.write_nonce(&state, packet, nonce);
let payload_len = payload.len();
let key = aead::OpeningKey::new(cipher, &state.key).unwrap();
aead::open_in_place(&key, &*nonce, header, 0, payload.as_mut()).map_err(|_| ())?;
payload.split_off(payload_len - cipher.tag_len());
Ok(())
}
pub fn update(&self, side: Side) -> Crypto {
match *self {
Crypto::OneRtt(ref crypto) => Crypto::OneRtt(CryptoContext {
local: crypto.local.update(crypto.digest, crypto.cipher, side),
remote: crypto.local.update(crypto.digest, crypto.cipher, !side),
digest: crypto.digest,
cipher: crypto.cipher,
}),
_ => unreachable!(),
}
}
}
/*
pub struct CookieFactory {
mac_key: [u8; 64],
}
const COOKIE_MAC_BYTES: usize = 64;
impl CookieFactory {
fn new(mac_key: [u8; 64]) -> Self {
Self { mac_key }
}
fn generate(&self, conn: &ConnectionInfo, out: &mut [u8]) -> usize {
let mac = self.generate_mac(conn);
out[0..COOKIE_MAC_BYTES].copy_from_slice(&mac);
COOKIE_MAC_BYTES
}
fn generate_mac(&self, conn: &ConnectionInfo) -> [u8; COOKIE_MAC_BYTES] {
let mut mac = Blake2b::new_keyed(&self.mac_key, COOKIE_MAC_BYTES);
mac.process(&conn.remote.ip().octets());
{
let mut buf = [0; 2];
BigEndian::write_u16(&mut buf, conn.remote.port());
mac.process(&buf);
}
let mut result = [0; COOKIE_MAC_BYTES];
mac.variable_result(&mut result).unwrap();
result
}
fn verify(&self, conn: &ConnectionInfo, cookie_data: &[u8]) -> bool {
let expected = self.generate_mac(conn);
if !constant_time_eq(cookie_data, &expected) {
return false;
}
true
}
}
*/
#[derive(Clone)]
pub struct ConnectionInfo {
pub(crate) id: ConnectionId,
pub(crate) remote: SocketAddrV6,
}
const HANDSHAKE_SALT: [u8; 20] = [
0x9c, 0x10, 0x8f, 0x98, 0x52, 0x0a, 0x5c, 0x5c, 0x32, 0x96, 0x8e, 0x95, 0x0e, 0x8a, 0x2c, 0x5f,
0xe0, 0x6d, 0x6c, 0x38,
];
#[derive(Clone)]
pub struct CryptoState {
secret: Vec<u8>,
key: Vec<u8>,
iv: Vec<u8>,
}
impl CryptoState {
fn new(
digest: &'static digest::Algorithm,
cipher: &'static aead::Algorithm,
secret: Vec<u8>,
) -> Self {
let secret_key = SigningKey::new(digest, &secret);
let mut key = vec![0; cipher.key_len()];
qhkdf_expand(&secret_key, b"key", &mut key);
let mut iv = vec![0; cipher.nonce_len()];
qhkdf_expand(&secret_key, b"iv", &mut iv);
Self { secret, key, iv }
}
fn update(
&self,
digest: &'static digest::Algorithm,
cipher: &'static aead::Algorithm,
side: Side,
) -> CryptoState {
let secret_key = SigningKey::new(digest, &self.secret);
let mut new_secret = vec![0; digest.output_len];
qhkdf_expand(
&secret_key,
if side == Side::Client {
b"client 1rtt"
} else {
b"server 1rtt"
},
&mut new_secret,
);
Self::new(digest, cipher, new_secret)
}
}
#[derive(Clone)]
pub struct ZeroRttCrypto {
state: CryptoState,
cipher: &'static aead::Algorithm,
}
#[derive(Clone)]
pub struct CryptoContext {
local: CryptoState,
remote: CryptoState,
digest: &'static digest::Algorithm,
cipher: &'static aead::Algorithm,
}
#[derive(Debug, Fail)]
pub enum ConnectError {
#[fail(display = "session ticket was malformed")]
MalformedSession,
#[fail(display = "TLS error: {}", _0)]
Tls(TLSError),
}
impl From<TLSError> for ConnectError {
fn from(x: TLSError) -> Self {
ConnectError::Tls(x)
}
}
pub fn expanded_handshake_secret(prk: &SigningKey, label: &[u8]) -> Vec<u8> {
let mut out = vec![0u8; digest::SHA256.output_len];
qhkdf_expand(prk, label, &mut out);
out
}
pub fn qhkdf_expand(key: &SigningKey, label: &[u8], out: &mut [u8]) {
let mut info = Vec::with_capacity(2 + 1 + 5 + out.len());
info.put_u16_be(out.len() as u16);
info.put_u8(5 + (label.len() as u8));
info.extend_from_slice(b"QUIC ");
info.extend_from_slice(&label);
hkdf::expand(key, &info, out);
}
fn handshake_secret(conn_id: &ConnectionId) -> SigningKey {
let key = SigningKey::new(&digest::SHA256, &HANDSHAKE_SALT);
let mut buf = Vec::with_capacity(8);
buf.put_slice(conn_id);
hkdf::extract(&key, &buf)
}
const ALPN_PROTOCOL: &str = "hq-11";
#[cfg(test)]
mod test {
use super::*;
use packet::PacketNumber;
use rand;
use MAX_CID_SIZE;
#[test]
fn packet_number() {
for prev in 0..1024 {
for x in 0..256 {
let found = PacketNumber::U8(x as u8).expand(prev);
assert!(found as i64 - (prev + 1) as i64 <= 128 || prev < 128);
}
}
// Order of operations regression test
assert_eq!(PacketNumber::U32(0xa0bd197c).expand(0xa0bd197a), 0xa0bd197c);
}
#[test]
fn handshake_crypto_roundtrip() {
let conn = ConnectionId::random(&mut rand::thread_rng(), MAX_CID_SIZE as u8);
let client = Crypto::new_handshake(&conn, Side::Client);
let server = Crypto::new_handshake(&conn, Side::Server);
let mut buf = b"headerpayload".to_vec();
client.encrypt(0, &mut buf, 6);
let mut header = BytesMut::from(buf);
let mut payload = header.split_off(6);
server.decrypt(0, &header, &mut payload).unwrap();
assert_eq!(&*payload, b"payload");
}
#[test]
fn key_derivation() {
let id = ConnectionId(
[0x83, 0x94, 0xc8, 0xf0, 0x3e, 0x51, 0x57, 0x08]
.iter()
.cloned()
.collect(),
);
let digest = &digest::SHA256;
let cipher = &aead::AES_128_GCM;
let hs_secret = handshake_secret(&id);
let client_secret = expanded_handshake_secret(&hs_secret, b"client hs");
assert_eq!(
&client_secret[..],
[
0x83, 0x55, 0xf2, 0x1a, 0x3d, 0x8f, 0x83, 0xec, 0xb3, 0xd0, 0xf9, 0x71, 0x08, 0xd3,
0xf9, 0x5e, 0x0f, 0x65, 0xb4, 0xd8, 0xae, 0x88, 0xa0, 0x61, 0x1e, 0xe4, 0x9d, 0xb0,
0xb5, 0x23, 0x59, 0x1d
]
);
let client_state = CryptoState::new(digest, cipher, client_secret);
assert_eq!(
&client_state.key[..],
[
0x3a, 0xd0, 0x54, 0x2c, 0x4a, 0x85, 0x84, 0x74, 0x00, 0x63, 0x04, 0x9e, 0x3b, 0x3c,
0xaa, 0xb2
]
);
assert_eq!(
&client_state.iv[..],
[0xd1, 0xfd, 0x26, 0x05, 0x42, 0x75, 0x3a, 0xba, 0x38, 0x58, 0x9b, 0xad]
);
let server_secret = expanded_handshake_secret(&hs_secret, b"server hs");
assert_eq!(
&server_secret[..],
[
0xf8, 0x0e, 0x57, 0x71, 0x48, 0x4b, 0x21, 0xcd, 0xeb, 0xb5, 0xaf, 0xe0, 0xa2, 0x56,
0xa3, 0x17, 0x41, 0xef, 0xe2, 0xb5, 0xc6, 0xb6, 0x17, 0xba, 0xe1, 0xb2, 0xf1, 0x5a,
0x83, 0x04, 0x83, 0xd6
]
);
let server_state = CryptoState::new(digest, cipher, server_secret);
assert_eq!(
&server_state.key[..],
[
0xbe, 0xe4, 0xc2, 0x4d, 0x2a, 0xf1, 0x33, 0x80, 0xa9, 0xfa, 0x24, 0xa5, 0xe2, 0xba,
0x2c, 0xff
]
);
assert_eq!(
&server_state.iv[..],
[0x25, 0xb5, 0x8e, 0x24, 0x6d, 0x9e, 0x7d, 0x5f, 0xfe, 0x43, 0x23, 0xfe]
);
}
}
//pub type SessionTicketBuffer = Arc<Mutex<Vec<Result<SslSession, ()>>>>;
| 30.714286 | 99 | 0.571114 |
2f5d89f149f9b537767558320106a7182ff874b8
| 901 |
extern crate serde_json;
use serde_json::Value as JsonValue;
pub struct Server<'a> {
hostname: &'a String,
clients: &'a String,
max_clients: &'a String,
map_name: &'a String,
game_type: &'a String
}
impl<'a> ToString for Server<'a> {
fn to_string(&self) -> String {
return format!("<hostname: {}, client: {}>", self.hostname, self.clients)
}
}
pub fn parse_server_data<'a>(data: &JsonValue) -> Server {
let hostname = data["hostname"].as_str().unwrap();
let clients = data["clients"].as_str().unwrap();
let max_clients = data["sv_maxclients"].as_str().unwrap();
let game_type = data["gametype"].as_str().unwrap();
let map_name = data["mapname"].as_str().unwrap();
return Server {
hostname: &hostname,
clients: &clients,
max_clients: &max_clients,
map_name: &map_name,
game_type: &game_type
}
}
| 28.15625 | 81 | 0.619312 |
26dedca08032a51f13ae9f99e6a8a60a6d8c0741
| 2,236 |
use std::{convert::TryInto, error::Error, path::PathBuf};
use decodeme::{read_file_header, PageTag, FILE_HEADER_SIZE, FILE_MAGIC_TOP_LEVEL};
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
struct TruncateOpt {
file: PathBuf,
}
#[derive(StructOpt, Debug)]
enum Opt {
/// Truncate to a single page per tag
#[structopt(name = "truncate")]
Truncate(TruncateOpt),
}
fn truncate(file_contents: &[u8]) -> Result<Vec<u8>, Box<dyn Error + Send + Sync>> {
let file_version = read_file_header(&file_contents, FILE_MAGIC_TOP_LEVEL, None, "top-level")?;
if file_version < 7 || file_version > 8 {
return Err(format!("File version {} is not support", file_version).into());
}
let paged_data = &file_contents[FILE_HEADER_SIZE..];
let mut truncated = file_contents[..FILE_HEADER_SIZE].to_vec();
let mut event_page_emitted = false;
let mut pos = 0;
while pos < paged_data.len() {
let page_start = pos;
let tag = TryInto::try_into(paged_data[pos]).unwrap();
let page_size =
u32::from_le_bytes(paged_data[pos + 1..pos + 5].try_into().unwrap()) as usize;
assert!(page_size > 0);
let page_end = page_start + 5 + page_size;
let page_bytes = &paged_data[page_start..page_end];
match tag {
PageTag::Events => {
// Copy only the first event page
if !event_page_emitted {
truncated.extend_from_slice(page_bytes);
event_page_emitted = true;
}
}
PageTag::StringData | PageTag::StringIndex => {
// Copy all string table pages
truncated.extend_from_slice(page_bytes);
}
}
pos = page_end;
}
Ok(truncated)
}
fn main() -> Result<(), Box<dyn Error + Send + Sync>> {
let opt = Opt::from_args();
match opt {
Opt::Truncate(opt) => {
let file_contents = std::fs::read(&opt.file)?;
let truncated = truncate(&file_contents)?;
let output_file_name = opt.file.with_extension("truncated.mm_profdata");
std::fs::write(output_file_name, truncated)?;
}
}
Ok(())
}
| 29.038961 | 98 | 0.591682 |
9172e62c227b0b88c605cd393228323f2350c69a
| 2,053 |
//! CKB VM cost model.
//!
//! The cost model assign cycles to instructions.
//! copied from https://github.com/nervosnetwork/ckb/blob/develop/script/src/cost_model.rs
use ckb_vm::{
instructions::{extract_opcode, insts},
Instruction,
};
/// How many bytes can transfer when VM costs one cycle.
// 0.25 cycles per byte
pub const BYTES_PER_CYCLE: u64 = 4;
/// Calculates how many cycles spent to load the specified number of bytes.
pub fn transferred_byte_cycles(bytes: u64) -> u64 {
// Compiler will optimize the divisin here to shifts.
(bytes + BYTES_PER_CYCLE - 1) / BYTES_PER_CYCLE
}
/// Returns the spent cycles to execute the secific instruction.
pub fn instruction_cycles(i: Instruction) -> u64 {
match extract_opcode(i) {
// IMC
insts::OP_JALR => 3,
insts::OP_LD => 2,
insts::OP_LW => 3,
insts::OP_LH => 3,
insts::OP_LB => 3,
insts::OP_LWU => 3,
insts::OP_LHU => 3,
insts::OP_LBU => 3,
insts::OP_SB => 3,
insts::OP_SH => 3,
insts::OP_SW => 3,
insts::OP_SD => 2,
insts::OP_BEQ => 3,
insts::OP_BGE => 3,
insts::OP_BGEU => 3,
insts::OP_BLT => 3,
insts::OP_BLTU => 3,
insts::OP_BNE => 3,
insts::OP_EBREAK => 500,
insts::OP_ECALL => 500,
insts::OP_JAL => 3,
insts::OP_MUL => 5,
insts::OP_MULW => 5,
insts::OP_MULH => 5,
insts::OP_MULHU => 5,
insts::OP_MULHSU => 5,
insts::OP_DIV => 32,
insts::OP_DIVW => 32,
insts::OP_DIVU => 32,
insts::OP_DIVUW => 32,
insts::OP_REM => 32,
insts::OP_REMW => 32,
insts::OP_REMU => 32,
insts::OP_REMUW => 32,
// // MOP
// insts::OP_WIDE_MUL => 5,
// insts::OP_WIDE_MULU => 5,
// insts::OP_WIDE_MULSU => 5,
// insts::OP_WIDE_DIV => 32,
// insts::OP_WIDE_DIVU => 32,
// insts::OP_FAR_JUMP_REL => 3,
// insts::OP_FAR_JUMP_ABS => 3,
_ => 1,
}
}
| 29.753623 | 90 | 0.54603 |
bfefff276fbf9a3a9a0fdb2e6892a8f29ae3f245
| 31,443 |
//! Server implementation and builder.
mod conn;
mod incoming;
mod recover_error;
#[cfg(feature = "tls")]
#[cfg_attr(docsrs, doc(cfg(feature = "tls")))]
mod tls;
pub use conn::{Connected, TcpConnectInfo};
#[cfg(feature = "tls")]
pub use tls::ServerTlsConfig;
#[cfg(feature = "tls")]
pub use conn::TlsConnectInfo;
#[cfg(feature = "tls")]
use super::service::TlsAcceptor;
#[cfg(unix)]
pub use conn::UdsConnectInfo;
use incoming::TcpIncoming;
#[cfg(feature = "tls")]
pub(crate) use tokio_rustls::server::TlsStream;
#[cfg(feature = "tls")]
use crate::transport::Error;
use self::recover_error::RecoverError;
use super::service::{GrpcTimeout, Or, Routes, ServerIo};
use crate::body::BoxBody;
use bytes::Bytes;
use futures_core::Stream;
use futures_util::{
future::{self, MapErr},
ready, TryFutureExt,
};
use http::{Request, Response};
use http_body::Body as _;
use hyper::{server::accept, Body};
use pin_project::pin_project;
use std::{
fmt,
future::Future,
marker::PhantomData,
net::SocketAddr,
pin::Pin,
sync::Arc,
task::{Context, Poll},
time::Duration,
};
use tokio::io::{AsyncRead, AsyncWrite};
use tower::{
layer::util::Identity, layer::Layer, limit::concurrency::ConcurrencyLimitLayer, util::Either,
Service, ServiceBuilder,
};
type BoxHttpBody = http_body::combinators::UnsyncBoxBody<Bytes, crate::Error>;
type BoxService = tower::util::BoxService<Request<Body>, Response<BoxHttpBody>, crate::Error>;
type TraceInterceptor = Arc<dyn Fn(&http::Request<()>) -> tracing::Span + Send + Sync + 'static>;
const DEFAULT_HTTP2_KEEPALIVE_TIMEOUT_SECS: u64 = 20;
/// A default batteries included `transport` server.
///
/// This is a wrapper around [`hyper::Server`] and provides an easy builder
/// pattern style builder [`Server`]. This builder exposes easy configuration parameters
/// for providing a fully featured http2 based gRPC server. This should provide
/// a very good out of the box http2 server for use with tonic but is also a
/// reference implementation that should be a good starting point for anyone
/// wanting to create a more complex and/or specific implementation.
#[derive(Default, Clone)]
pub struct Server<L = Identity> {
trace_interceptor: Option<TraceInterceptor>,
concurrency_limit: Option<usize>,
timeout: Option<Duration>,
#[cfg(feature = "tls")]
tls: Option<TlsAcceptor>,
init_stream_window_size: Option<u32>,
init_connection_window_size: Option<u32>,
max_concurrent_streams: Option<u32>,
tcp_keepalive: Option<Duration>,
tcp_nodelay: bool,
http2_keepalive_interval: Option<Duration>,
http2_keepalive_timeout: Option<Duration>,
max_frame_size: Option<u32>,
accept_http1: bool,
layer: L,
}
/// A stack based `Service` router.
#[derive(Debug)]
pub struct Router<A, B, L = Identity> {
server: Server<L>,
routes: Routes<A, B, Request<Body>>,
}
/// A service that is produced from a Tonic `Router`.
///
/// This service implementation will route between multiple Tonic
/// gRPC endpoints and can be consumed with the rest of the `tower`
/// ecosystem.
#[derive(Debug, Clone)]
pub struct RouterService<S> {
inner: S,
}
impl<S> Service<Request<Body>> for RouterService<S>
where
S: Service<Request<Body>, Response = Response<BoxBody>> + Clone + Send + 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
{
type Response = Response<BoxBody>;
type Error = crate::Error;
#[allow(clippy::type_complexity)]
type Future = MapErr<S::Future, fn(S::Error) -> crate::Error>;
#[inline]
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, req: Request<Body>) -> Self::Future {
self.inner.call(req).map_err(Into::into)
}
}
/// A trait to provide a static reference to the service's
/// name. This is used for routing service's within the router.
pub trait NamedService {
/// The `Service-Name` as described [here].
///
/// [here]: https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests
const NAME: &'static str;
}
impl<S: NamedService, T> NamedService for Either<S, T> {
const NAME: &'static str = S::NAME;
}
impl Server {
/// Create a new server builder that can configure a [`Server`].
pub fn builder() -> Self {
Server {
tcp_nodelay: true,
accept_http1: false,
..Default::default()
}
}
}
impl<L> Server<L> {
/// Configure TLS for this server.
#[cfg(feature = "tls")]
#[cfg_attr(docsrs, doc(cfg(feature = "tls")))]
pub fn tls_config(self, tls_config: ServerTlsConfig) -> Result<Self, Error> {
Ok(Server {
tls: Some(tls_config.tls_acceptor().map_err(Error::from_source)?),
..self
})
}
/// Set the concurrency limit applied to on requests inbound per connection.
///
/// # Example
///
/// ```
/// # use tonic::transport::Server;
/// # use tower_service::Service;
/// # let builder = Server::builder();
/// builder.concurrency_limit_per_connection(32);
/// ```
pub fn concurrency_limit_per_connection(self, limit: usize) -> Self {
Server {
concurrency_limit: Some(limit),
..self
}
}
/// Set a timeout on for all request handlers.
///
/// # Example
///
/// ```
/// # use tonic::transport::Server;
/// # use tower_service::Service;
/// # use std::time::Duration;
/// # let mut builder = Server::builder();
/// builder.timeout(Duration::from_secs(30));
/// ```
pub fn timeout(&mut self, timeout: Duration) -> &mut Self {
self.timeout = Some(timeout);
self
}
/// Sets the [`SETTINGS_INITIAL_WINDOW_SIZE`][spec] option for HTTP2
/// stream-level flow control.
///
/// Default is 65,535
///
/// [spec]: https://http2.github.io/http2-spec/#SETTINGS_INITIAL_WINDOW_SIZE
pub fn initial_stream_window_size(self, sz: impl Into<Option<u32>>) -> Self {
Server {
init_stream_window_size: sz.into(),
..self
}
}
/// Sets the max connection-level flow control for HTTP2
///
/// Default is 65,535
pub fn initial_connection_window_size(self, sz: impl Into<Option<u32>>) -> Self {
Server {
init_connection_window_size: sz.into(),
..self
}
}
/// Sets the [`SETTINGS_MAX_CONCURRENT_STREAMS`][spec] option for HTTP2
/// connections.
///
/// Default is no limit (`None`).
///
/// [spec]: https://http2.github.io/http2-spec/#SETTINGS_MAX_CONCURRENT_STREAMS
pub fn max_concurrent_streams(self, max: impl Into<Option<u32>>) -> Self {
Server {
max_concurrent_streams: max.into(),
..self
}
}
/// Set whether HTTP2 Ping frames are enabled on accepted connections.
///
/// If `None` is specified, HTTP2 keepalive is disabled, otherwise the duration
/// specified will be the time interval between HTTP2 Ping frames.
/// The timeout for receiving an acknowledgement of the keepalive ping
/// can be set with [`Server::http2_keepalive_timeout`].
///
/// Default is no HTTP2 keepalive (`None`)
///
pub fn http2_keepalive_interval(self, http2_keepalive_interval: Option<Duration>) -> Self {
Server {
http2_keepalive_interval,
..self
}
}
/// Sets a timeout for receiving an acknowledgement of the keepalive ping.
///
/// If the ping is not acknowledged within the timeout, the connection will be closed.
/// Does nothing if http2_keep_alive_interval is disabled.
///
/// Default is 20 seconds.
///
pub fn http2_keepalive_timeout(self, http2_keepalive_timeout: Option<Duration>) -> Self {
Server {
http2_keepalive_timeout,
..self
}
}
/// Set whether TCP keepalive messages are enabled on accepted connections.
///
/// If `None` is specified, keepalive is disabled, otherwise the duration
/// specified will be the time to remain idle before sending TCP keepalive
/// probes.
///
/// Default is no keepalive (`None`)
///
pub fn tcp_keepalive(self, tcp_keepalive: Option<Duration>) -> Self {
Server {
tcp_keepalive,
..self
}
}
/// Set the value of `TCP_NODELAY` option for accepted connections. Enabled by default.
pub fn tcp_nodelay(self, enabled: bool) -> Self {
Server {
tcp_nodelay: enabled,
..self
}
}
/// Sets the maximum frame size to use for HTTP2.
///
/// Passing `None` will do nothing.
///
/// If not set, will default from underlying transport.
pub fn max_frame_size(self, frame_size: impl Into<Option<u32>>) -> Self {
Server {
max_frame_size: frame_size.into(),
..self
}
}
/// Allow this server to accept http1 requests.
///
/// Accepting http1 requests is only useful when developing `grpc-web`
/// enabled services. If this setting is set to `true` but services are
/// not correctly configured to handle grpc-web requests, your server may
/// return confusing (but correct) protocol errors.
///
/// Default is `false`.
pub fn accept_http1(self, accept_http1: bool) -> Self {
Server {
accept_http1,
..self
}
}
/// Intercept inbound headers and add a [`tracing::Span`] to each response future.
pub fn trace_fn<F>(self, f: F) -> Self
where
F: Fn(&http::Request<()>) -> tracing::Span + Send + Sync + 'static,
{
Server {
trace_interceptor: Some(Arc::new(f)),
..self
}
}
/// Create a router with the `S` typed service as the first service.
///
/// This will clone the `Server` builder and create a router that will
/// route around different services.
pub fn add_service<S>(&mut self, svc: S) -> Router<S, Unimplemented, L>
where
S: Service<Request<Body>, Response = Response<BoxBody>>
+ NamedService
+ Clone
+ Send
+ 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
L: Clone,
{
Router::new(self.clone(), svc)
}
/// Create a router with the optional `S` typed service as the first service.
///
/// This will clone the `Server` builder and create a router that will
/// route around different services.
///
/// # Note
/// Even when the argument given is `None` this will capture *all* requests to this service name.
/// As a result, one cannot use this to toggle between two identically named implementations.
pub fn add_optional_service<S>(
&mut self,
svc: Option<S>,
) -> Router<Either<S, Unimplemented>, Unimplemented, L>
where
S: Service<Request<Body>, Response = Response<BoxBody>>
+ NamedService
+ Clone
+ Send
+ 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
L: Clone,
{
let svc = match svc {
Some(some) => Either::A(some),
None => Either::B(Unimplemented::default()),
};
Router::new(self.clone(), svc)
}
/// Set the [Tower] [`Layer`] all services will be wrapped in.
///
/// This enables using middleware from the [Tower ecosystem][eco].
///
/// # Example
///
/// ```
/// # use tonic::transport::Server;
/// # use tower_service::Service;
/// use tower::timeout::TimeoutLayer;
/// use std::time::Duration;
///
/// # let mut builder = Server::builder();
/// builder.layer(TimeoutLayer::new(Duration::from_secs(30)));
/// ```
///
/// Note that timeouts should be set using [`Server::timeout`]. `TimeoutLayer` is only used
/// here as an example.
///
/// You can build more complex layers using [`ServiceBuilder`]. Those layers can include
/// [interceptors]:
///
/// ```
/// # use tonic::transport::Server;
/// # use tower_service::Service;
/// use tower::ServiceBuilder;
/// use std::time::Duration;
/// use tonic::{Request, Status, service::interceptor};
///
/// fn auth_interceptor(request: Request<()>) -> Result<Request<()>, Status> {
/// if valid_credentials(&request) {
/// Ok(request)
/// } else {
/// Err(Status::unauthenticated("invalid credentials"))
/// }
/// }
///
/// fn valid_credentials(request: &Request<()>) -> bool {
/// // ...
/// # true
/// }
///
/// fn some_other_interceptor(request: Request<()>) -> Result<Request<()>, Status> {
/// Ok(request)
/// }
///
/// let layer = ServiceBuilder::new()
/// .load_shed()
/// .timeout(Duration::from_secs(30))
/// .layer(interceptor(auth_interceptor))
/// .layer(interceptor(some_other_interceptor))
/// .into_inner();
///
/// Server::builder().layer(layer);
/// ```
///
/// [Tower]: https://github.com/tower-rs/tower
/// [`Layer`]: tower::layer::Layer
/// [eco]: https://github.com/tower-rs
/// [`ServiceBuilder`]: tower::ServiceBuilder
/// [interceptors]: crate::service::Interceptor
pub fn layer<NewLayer>(self, new_layer: NewLayer) -> Server<NewLayer> {
Server {
layer: new_layer,
trace_interceptor: self.trace_interceptor,
concurrency_limit: self.concurrency_limit,
timeout: self.timeout,
#[cfg(feature = "tls")]
tls: self.tls,
init_stream_window_size: self.init_stream_window_size,
init_connection_window_size: self.init_connection_window_size,
max_concurrent_streams: self.max_concurrent_streams,
tcp_keepalive: self.tcp_keepalive,
tcp_nodelay: self.tcp_nodelay,
http2_keepalive_interval: self.http2_keepalive_interval,
http2_keepalive_timeout: self.http2_keepalive_timeout,
max_frame_size: self.max_frame_size,
accept_http1: self.accept_http1,
}
}
pub(crate) async fn serve_with_shutdown<S, I, F, IO, IE, ResBody>(
self,
svc: S,
incoming: I,
signal: Option<F>,
) -> Result<(), super::Error>
where
L: Layer<S>,
L::Service: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
<<L as Layer<S>>::Service as Service<Request<Body>>>::Future: Send + 'static,
<<L as Layer<S>>::Service as Service<Request<Body>>>::Error: Into<crate::Error> + Send,
I: Stream<Item = Result<IO, IE>>,
IO: AsyncRead + AsyncWrite + Connected + Unpin + Send + 'static,
IO::ConnectInfo: Clone + Send + Sync + 'static,
IE: Into<crate::Error>,
F: Future<Output = ()>,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
let trace_interceptor = self.trace_interceptor.clone();
let concurrency_limit = self.concurrency_limit;
let init_connection_window_size = self.init_connection_window_size;
let init_stream_window_size = self.init_stream_window_size;
let max_concurrent_streams = self.max_concurrent_streams;
let timeout = self.timeout;
let max_frame_size = self.max_frame_size;
let http2_only = !self.accept_http1;
let http2_keepalive_interval = self.http2_keepalive_interval;
let http2_keepalive_timeout = self
.http2_keepalive_timeout
.unwrap_or_else(|| Duration::new(DEFAULT_HTTP2_KEEPALIVE_TIMEOUT_SECS, 0));
let svc = self.layer.layer(svc);
let tcp = incoming::tcp_incoming(incoming, self);
let incoming = accept::from_stream::<_, _, crate::Error>(tcp);
let svc = MakeSvc {
inner: svc,
concurrency_limit,
timeout,
trace_interceptor,
_io: PhantomData,
};
let server = hyper::Server::builder(incoming)
.http2_only(http2_only)
.http2_initial_connection_window_size(init_connection_window_size)
.http2_initial_stream_window_size(init_stream_window_size)
.http2_max_concurrent_streams(max_concurrent_streams)
.http2_keep_alive_interval(http2_keepalive_interval)
.http2_keep_alive_timeout(http2_keepalive_timeout)
.http2_max_frame_size(max_frame_size);
if let Some(signal) = signal {
server
.serve(svc)
.with_graceful_shutdown(signal)
.await
.map_err(super::Error::from_source)?
} else {
server.serve(svc).await.map_err(super::Error::from_source)?;
}
Ok(())
}
}
impl<S, L> Router<S, Unimplemented, L> {
pub(crate) fn new(server: Server<L>, svc: S) -> Self
where
S: Service<Request<Body>, Response = Response<BoxBody>>
+ NamedService
+ Clone
+ Send
+ 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
{
let svc_name = <S as NamedService>::NAME;
let svc_route = format!("/{}", svc_name);
let pred = move |req: &Request<Body>| {
let path = req.uri().path();
path.starts_with(&svc_route)
};
Self {
server,
routes: Routes::new(pred, svc, Unimplemented::default()),
}
}
}
impl<A, B, L> Router<A, B, L>
where
A: Service<Request<Body>, Response = Response<BoxBody>> + Clone + Send + 'static,
A::Future: Send + 'static,
A::Error: Into<crate::Error> + Send,
B: Service<Request<Body>, Response = Response<BoxBody>> + Clone + Send + 'static,
B::Future: Send + 'static,
B::Error: Into<crate::Error> + Send,
{
/// Add a new service to this router.
pub fn add_service<S>(self, svc: S) -> Router<S, Or<A, B, Request<Body>>, L>
where
S: Service<Request<Body>, Response = Response<BoxBody>>
+ NamedService
+ Clone
+ Send
+ 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
{
let Self { routes, server } = self;
let svc_name = <S as NamedService>::NAME;
let svc_route = format!("/{}", svc_name);
let pred = move |req: &Request<Body>| {
let path = req.uri().path();
path.starts_with(&svc_route)
};
let routes = routes.push(pred, svc);
Router { server, routes }
}
/// Add a new optional service to this router.
///
/// # Note
/// Even when the argument given is `None` this will capture *all* requests to this service name.
/// As a result, one cannot use this to toggle between two identically named implementations.
#[allow(clippy::type_complexity)]
pub fn add_optional_service<S>(
self,
svc: Option<S>,
) -> Router<Either<S, Unimplemented>, Or<A, B, Request<Body>>, L>
where
S: Service<Request<Body>, Response = Response<BoxBody>>
+ NamedService
+ Clone
+ Send
+ 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
{
let Self { routes, server } = self;
let svc_name = <S as NamedService>::NAME;
let svc_route = format!("/{}", svc_name);
let pred = move |req: &Request<Body>| {
let path = req.uri().path();
path.starts_with(&svc_route)
};
let svc = match svc {
Some(some) => Either::A(some),
None => Either::B(Unimplemented::default()),
};
let routes = routes.push(pred, svc);
Router { server, routes }
}
/// Consume this [`Server`] creating a future that will execute the server
/// on [tokio]'s default executor.
///
/// [`Server`]: struct.Server.html
/// [tokio]: https://docs.rs/tokio
pub async fn serve<ResBody>(self, addr: SocketAddr) -> Result<(), super::Error>
where
L: Layer<Routes<A, B, Request<Body>>>,
L::Service: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Future:
Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Error:
Into<crate::Error> + Send,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
let incoming = TcpIncoming::new(addr, self.server.tcp_nodelay, self.server.tcp_keepalive)
.map_err(super::Error::from_source)?;
self.server
.serve_with_shutdown::<_, _, future::Ready<()>, _, _, ResBody>(
self.routes,
incoming,
None,
)
.await
}
/// Consume this [`Server`] creating a future that will execute the server
/// on [tokio]'s default executor. And shutdown when the provided signal
/// is received.
///
/// [`Server`]: struct.Server.html
/// [tokio]: https://docs.rs/tokio
pub async fn serve_with_shutdown<F: Future<Output = ()>, ResBody>(
self,
addr: SocketAddr,
signal: F,
) -> Result<(), super::Error>
where
L: Layer<Routes<A, B, Request<Body>>>,
L::Service: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Future:
Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Error:
Into<crate::Error> + Send,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
let incoming = TcpIncoming::new(addr, self.server.tcp_nodelay, self.server.tcp_keepalive)
.map_err(super::Error::from_source)?;
self.server
.serve_with_shutdown(self.routes, incoming, Some(signal))
.await
}
/// Consume this [`Server`] creating a future that will execute the server on
/// the provided incoming stream of `AsyncRead + AsyncWrite`.
///
/// [`Server`]: struct.Server.html
pub async fn serve_with_incoming<I, IO, IE, ResBody>(
self,
incoming: I,
) -> Result<(), super::Error>
where
I: Stream<Item = Result<IO, IE>>,
IO: AsyncRead + AsyncWrite + Connected + Unpin + Send + 'static,
IO::ConnectInfo: Clone + Send + Sync + 'static,
IE: Into<crate::Error>,
L: Layer<Routes<A, B, Request<Body>>>,
L::Service: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Future:
Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Error:
Into<crate::Error> + Send,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
self.server
.serve_with_shutdown::<_, _, future::Ready<()>, _, _, ResBody>(
self.routes,
incoming,
None,
)
.await
}
/// Consume this [`Server`] creating a future that will execute the server on
/// the provided incoming stream of `AsyncRead + AsyncWrite`. Similar to
/// `serve_with_shutdown` this method will also take a signal future to
/// gracefully shutdown the server.
///
/// [`Server`]: struct.Server.html
pub async fn serve_with_incoming_shutdown<I, IO, IE, F, ResBody>(
self,
incoming: I,
signal: F,
) -> Result<(), super::Error>
where
I: Stream<Item = Result<IO, IE>>,
IO: AsyncRead + AsyncWrite + Connected + Unpin + Send + 'static,
IO::ConnectInfo: Clone + Send + Sync + 'static,
IE: Into<crate::Error>,
F: Future<Output = ()>,
L: Layer<Routes<A, B, Request<Body>>>,
L::Service: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Future:
Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Error:
Into<crate::Error> + Send,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
self.server
.serve_with_shutdown(self.routes, incoming, Some(signal))
.await
}
/// Create a tower service out of a router.
pub fn into_service<ResBody>(self) -> RouterService<L::Service>
where
L: Layer<Routes<A, B, Request<Body>>>,
L::Service: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Future:
Send + 'static,
<<L as Layer<Routes<A, B, Request<Body>>>>::Service as Service<Request<Body>>>::Error:
Into<crate::Error> + Send,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
let inner = self.server.layer.layer(self.routes);
RouterService { inner }
}
}
impl<L> fmt::Debug for Server<L> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Builder").finish()
}
}
struct Svc<S> {
inner: S,
trace_interceptor: Option<TraceInterceptor>,
}
impl<S, ResBody> Service<Request<Body>> for Svc<S>
where
S: Service<Request<Body>, Response = Response<ResBody>>,
S::Error: Into<crate::Error>,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
type Response = Response<BoxHttpBody>;
type Error = crate::Error;
type Future = SvcFuture<S::Future>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx).map_err(Into::into)
}
fn call(&mut self, mut req: Request<Body>) -> Self::Future {
let span = if let Some(trace_interceptor) = &self.trace_interceptor {
let (parts, body) = req.into_parts();
let bodyless_request = Request::from_parts(parts, ());
let span = trace_interceptor(&bodyless_request);
let (parts, _) = bodyless_request.into_parts();
req = Request::from_parts(parts, body);
span
} else {
tracing::Span::none()
};
SvcFuture {
inner: self.inner.call(req),
span,
}
}
}
#[pin_project]
struct SvcFuture<F> {
#[pin]
inner: F,
span: tracing::Span,
}
impl<F, E, ResBody> Future for SvcFuture<F>
where
F: Future<Output = Result<Response<ResBody>, E>>,
E: Into<crate::Error>,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
type Output = Result<Response<BoxHttpBody>, crate::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
let _guard = this.span.enter();
let response: Response<ResBody> = ready!(this.inner.poll(cx)).map_err(Into::into)?;
let response = response.map(|body| body.map_err(Into::into).boxed_unsync());
Poll::Ready(Ok(response))
}
}
impl<S> fmt::Debug for Svc<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Svc").finish()
}
}
struct MakeSvc<S, IO> {
concurrency_limit: Option<usize>,
timeout: Option<Duration>,
inner: S,
trace_interceptor: Option<TraceInterceptor>,
_io: PhantomData<fn() -> IO>,
}
impl<S, ResBody, IO> Service<&ServerIo<IO>> for MakeSvc<S, IO>
where
IO: Connected,
S: Service<Request<Body>, Response = Response<ResBody>> + Clone + Send + 'static,
S::Future: Send + 'static,
S::Error: Into<crate::Error> + Send,
ResBody: http_body::Body<Data = Bytes> + Send + 'static,
ResBody::Error: Into<crate::Error>,
{
type Response = BoxService;
type Error = crate::Error;
type Future = future::Ready<Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Ok(()).into()
}
fn call(&mut self, io: &ServerIo<IO>) -> Self::Future {
let conn_info = io.connect_info();
let svc = self.inner.clone();
let concurrency_limit = self.concurrency_limit;
let timeout = self.timeout;
let trace_interceptor = self.trace_interceptor.clone();
let svc = ServiceBuilder::new()
.layer_fn(RecoverError::new)
.option_layer(concurrency_limit.map(ConcurrencyLimitLayer::new))
.layer_fn(|s| GrpcTimeout::new(s, timeout))
.service(svc);
let svc = ServiceBuilder::new()
.layer(BoxService::layer())
.map_request(move |mut request: Request<Body>| {
match &conn_info {
tower::util::Either::A(inner) => {
request.extensions_mut().insert(inner.clone());
}
tower::util::Either::B(inner) => {
#[cfg(feature = "tls")]
{
request.extensions_mut().insert(inner.clone());
request.extensions_mut().insert(inner.get_ref().clone());
}
#[cfg(not(feature = "tls"))]
{
// just a type check to make sure we didn't forget to
// insert this into the extensions
let _: &() = inner;
}
}
}
request
})
.service(Svc {
inner: svc,
trace_interceptor,
});
future::ready(Ok(svc))
}
}
#[derive(Default, Clone, Debug)]
#[doc(hidden)]
pub struct Unimplemented {
_p: (),
}
impl Service<Request<Body>> for Unimplemented {
type Response = Response<BoxBody>;
type Error = crate::Error;
type Future = future::Ready<Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Ok(()).into()
}
fn call(&mut self, _req: Request<Body>) -> Self::Future {
future::ok(
http::Response::builder()
.status(200)
.header("grpc-status", "12")
.header("content-type", "application/grpc")
.body(crate::body::empty_body())
.unwrap(),
)
}
}
| 33.700965 | 101 | 0.583055 |
e2e65eecf254bfb1445e6d6b7076b5eb916f75ed
| 310 |
/*!
* The general error capturer across the entire compiler chain.
*/
use alloc::string::String;
/**
* A description of an error somewhere along the compiler chain
*/
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
Lexer(String),
Parser(String),
Transformer(String),
Generator(String),
}
| 19.375 | 63 | 0.680645 |
bb2a890098a5fd4bb5364ccfacfb165b934aa8df
| 908 |
extern crate piston_window;
use piston_window::*;
pub struct Viewer {
pub(crate) window_width: u32,
pub(crate) window_height: u32,
pub(crate) window: PistonWindow,
pub(crate) glyphs: Glyphs,
}
impl Viewer {
pub fn new(window_width: u32, window_height: u32) -> Self {
let mut window: PistonWindow = WindowSettings::new("Gym-rs", (window_width, window_height))
.exit_on_esc(true)
.build()
.unwrap_or_else(|e| { panic!("Failed to build PistonWindow: {}", e) });
let font = find_folder::Search::ParentsThenKids(3, 3)
.for_folder("font").unwrap();
let glyphs = window.load_font(font.join("anon.ttf")).unwrap();
Self {
window_width,
window_height,
window,
glyphs,
}
}
}
impl Default for Viewer {
fn default() -> Self {
Self::new(600, 400)
}
}
| 24.540541 | 99 | 0.584802 |
e6fa85131b3e61961acec75811c5fcfd9afdbb3a
| 765 |
use std::fs;
fn main() {
let contents = fs::read_to_string("input.txt").expect("Error");
let map = contents.lines().collect();
let result = slop(&map, 3, 1);
println!("Result: {}", result);
let slops = [[1, 1], [3, 1], [5, 1], [7, 1], [1, 2]];
let mut result2 = 1;
for s in &slops {
result2 *= slop(&map, s[0], s[1]);
}
println!("Result2: {}", result2);
}
fn slop(map: &Vec<&str>, right: usize, down: usize) -> usize {
let mut x = 0;
let mut y = 0;
let mut trees = 0;
while y < map.len() {
let path: Vec<char> = map[y].chars().collect();
if path[x] == '#' {
trees += 1;
}
x += right;
x %= path.len();
y += down;
}
return trees;
}
| 21.25 | 67 | 0.466667 |
f52b2220cd499e12a2f3a99c98874412460416f3
| 1,810 |
use jstime_core as jstime;
mod common;
#[cfg(test)]
mod api {
use super::*;
#[test]
fn run_script() {
let _setup_guard = common::setup();
let options = jstime::Options::default();
let mut jstime = jstime::JSTime::new(options);
let result = jstime.run_script("\"Hello, World!\"", "jstime");
assert_eq!(result.unwrap(), "Hello, World!");
let result = jstime.run_script("1 + 1", "jstime");
assert_eq!(result.unwrap(), "2");
let result = jstime.run_script("const a = 123; const b = 456; a + b;", "jstime");
assert_eq!(result.unwrap(), "579");
let result = jstime.run_script("a", "jstime");
assert_eq!(result.unwrap(), "123");
}
#[test]
fn run_script_error() {
let _setup_guard = common::setup();
let options = jstime::Options::default();
let mut jstime = jstime::JSTime::new(options);
let err = match jstime.run_script("a", "jstime") {
Ok(_result) => panic!(),
Err(e) => e,
};
assert_eq!(
err.to_string(),
"ReferenceError: a is not defined\n at jstime:1:1"
);
let err = match jstime.run_script("}", "jstime") {
Ok(_result) => panic!(),
Err(e) => e,
};
assert_eq!(err.to_string(), "SyntaxError: Unexpected token \'}\'");
}
#[test]
fn import() {
let _setup_guard = common::setup();
let options = jstime::Options::default();
let mut jstime = jstime::JSTime::new(options);
let hello_path = "./tests/fixtures/hello-world.js";
let _result = jstime.import(&hello_path);
let result = jstime.run_script("globalThis.hello", "jstime");
assert_eq!(result.unwrap(), "hello world");
}
}
| 34.807692 | 89 | 0.545856 |
ef2f7457376ee0cbec5377acb23f7d18f3e656d8
| 29,919 |
#![cfg(any(feature = "bpf_c", feature = "bpf_rust"))]
#[macro_use]
extern crate solana_bpf_loader_program;
use solana_bpf_loader_program::{
create_vm,
serialization::{deserialize_parameters, serialize_parameters},
};
use solana_rbpf::vm::{EbpfVm, InstructionMeter};
use solana_runtime::{
bank::Bank,
bank_client::BankClient,
genesis_utils::{create_genesis_config, GenesisConfigInfo},
loader_utils::load_program,
process_instruction::{
ComputeBudget, ComputeMeter, Executor, InvokeContext, Logger, ProcessInstruction,
},
};
use solana_sdk::{
account::{Account, KeyedAccount},
bpf_loader, bpf_loader_deprecated,
client::SyncClient,
clock::{DEFAULT_SLOTS_PER_EPOCH, MAX_PROCESSING_AGE},
entrypoint::{MAX_PERMITTED_DATA_INCREASE, SUCCESS},
instruction::{AccountMeta, CompiledInstruction, Instruction, InstructionError},
message::Message,
pubkey::Pubkey,
signature::{Keypair, Signer},
sysvar::{clock, fees, rent, slot_hashes, stake_history},
transaction::{Transaction, TransactionError},
};
use std::{cell::RefCell, env, fs::File, io::Read, path::PathBuf, rc::Rc, sync::Arc};
/// BPF program file extension
const PLATFORM_FILE_EXTENSION_BPF: &str = "so";
/// Create a BPF program file name
fn create_bpf_path(name: &str) -> PathBuf {
let mut pathbuf = {
let current_exe = env::current_exe().unwrap();
PathBuf::from(current_exe.parent().unwrap().parent().unwrap())
};
pathbuf.push("bpf/");
pathbuf.push(name);
pathbuf.set_extension(PLATFORM_FILE_EXTENSION_BPF);
pathbuf
}
fn load_bpf_program(
bank_client: &BankClient,
loader_id: &Pubkey,
payer_keypair: &Keypair,
name: &str,
) -> Pubkey {
let path = create_bpf_path(name);
let mut file = File::open(path).unwrap();
let mut elf = Vec::new();
file.read_to_end(&mut elf).unwrap();
load_program(bank_client, payer_keypair, loader_id, elf)
}
fn run_program(
name: &str,
program_id: &Pubkey,
parameter_accounts: &[KeyedAccount],
instruction_data: &[u8],
) -> Result<u64, InstructionError> {
let path = create_bpf_path(name);
let mut file = File::open(path).unwrap();
let mut data = vec![];
file.read_to_end(&mut data).unwrap();
let loader_id = bpf_loader::id();
let mut invoke_context = MockInvokeContext::default();
let executable = EbpfVm::create_executable_from_elf(&data, None).unwrap();
let (mut vm, heap_region) = create_vm(
&loader_id,
executable.as_ref(),
parameter_accounts,
&mut invoke_context,
)
.unwrap();
let mut parameter_bytes = serialize_parameters(
&bpf_loader::id(),
program_id,
parameter_accounts,
&instruction_data,
)
.unwrap();
assert_eq!(
SUCCESS,
vm.execute_program(parameter_bytes.as_mut_slice(), &[], &[heap_region.clone()])
.unwrap()
);
deserialize_parameters(&bpf_loader::id(), parameter_accounts, ¶meter_bytes).unwrap();
Ok(vm.get_total_instruction_count())
}
fn process_transaction_and_record_inner(
bank: &Bank,
tx: Transaction,
) -> (Result<(), TransactionError>, Vec<Vec<CompiledInstruction>>) {
let signature = tx.signatures.get(0).unwrap().clone();
let txs = vec![tx];
let tx_batch = bank.prepare_batch(&txs, None);
let (mut results, _, mut inner) =
bank.load_execute_and_commit_transactions(&tx_batch, MAX_PROCESSING_AGE, false, true);
let inner_instructions = inner.swap_remove(0);
let result = results
.fee_collection_results
.swap_remove(0)
.and_then(|_| bank.get_signature_status(&signature).unwrap());
(
result,
inner_instructions.expect("cpi recording should be enabled"),
)
}
#[test]
#[cfg(any(feature = "bpf_c", feature = "bpf_rust"))]
fn test_program_bpf_sanity() {
solana_logger::setup();
let mut programs = Vec::new();
#[cfg(feature = "bpf_c")]
{
programs.extend_from_slice(&[
("alloc", true),
("bpf_to_bpf", true),
("multiple_static", true),
("noop", true),
("noop++", true),
("panic", false),
("relative_call", true),
("sanity", true),
("sanity++", true),
("sha256", true),
("struct_pass", true),
("struct_ret", true),
]);
}
#[cfg(feature = "bpf_rust")]
{
programs.extend_from_slice(&[
("solana_bpf_rust_128bit", true),
("solana_bpf_rust_alloc", true),
("solana_bpf_rust_dep_crate", true),
("solana_bpf_rust_external_spend", false),
("solana_bpf_rust_iter", true),
("solana_bpf_rust_many_args", true),
("solana_bpf_rust_noop", true),
("solana_bpf_rust_panic", false),
("solana_bpf_rust_param_passing", true),
("solana_bpf_rust_rand", true),
("solana_bpf_rust_sanity", true),
("solana_bpf_rust_sha256", true),
("solana_bpf_rust_sysval", true),
]);
}
for program in programs.iter() {
println!("Test program: {:?}", program.0);
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(50);
let mut bank = Bank::new(&genesis_config);
let (name, id, entrypoint) = solana_bpf_loader_program!();
bank.add_builtin_loader(&name, id, entrypoint);
let bank = Arc::new(bank);
// Create bank with a specific slot, used by solana_bpf_rust_sysvar test
let bank = Bank::new_from_parent(&bank, &Pubkey::default(), DEFAULT_SLOTS_PER_EPOCH + 1);
let bank_client = BankClient::new(bank);
// Call user program
let program_id =
load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.0);
let account_metas = vec![
AccountMeta::new(mint_keypair.pubkey(), true),
AccountMeta::new(Keypair::new().pubkey(), false),
AccountMeta::new(clock::id(), false),
AccountMeta::new(fees::id(), false),
AccountMeta::new(slot_hashes::id(), false),
AccountMeta::new(stake_history::id(), false),
AccountMeta::new(rent::id(), false),
];
let instruction = Instruction::new(program_id, &1u8, account_metas);
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
if program.1 {
assert!(result.is_ok());
} else {
assert!(result.is_err());
}
}
}
#[test]
#[cfg(any(feature = "bpf_c", feature = "bpf_rust"))]
fn test_program_bpf_loader_deprecated() {
solana_logger::setup();
let mut programs = Vec::new();
#[cfg(feature = "bpf_c")]
{
programs.extend_from_slice(&[("deprecated_loader")]);
}
#[cfg(feature = "bpf_rust")]
{
programs.extend_from_slice(&[("solana_bpf_rust_deprecated_loader")]);
}
for program in programs.iter() {
println!("Test program: {:?}", program);
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(50);
let mut bank = Bank::new(&genesis_config);
let (name, id, entrypoint) = solana_bpf_loader_deprecated_program!();
bank.add_builtin_loader(&name, id, entrypoint);
let bank_client = BankClient::new(bank);
let program_id = load_bpf_program(
&bank_client,
&bpf_loader_deprecated::id(),
&mint_keypair,
program,
);
let account_metas = vec![AccountMeta::new(mint_keypair.pubkey(), true)];
let instruction = Instruction::new(program_id, &1u8, account_metas);
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert!(result.is_ok());
}
}
#[test]
fn test_program_bpf_duplicate_accounts() {
solana_logger::setup();
let mut programs = Vec::new();
#[cfg(feature = "bpf_c")]
{
programs.extend_from_slice(&[("dup_accounts")]);
}
#[cfg(feature = "bpf_rust")]
{
programs.extend_from_slice(&[("solana_bpf_rust_dup_accounts")]);
}
for program in programs.iter() {
println!("Test program: {:?}", program);
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(50);
let mut bank = Bank::new(&genesis_config);
let (name, id, entrypoint) = solana_bpf_loader_program!();
bank.add_builtin_loader(&name, id, entrypoint);
let bank = Arc::new(bank);
let bank_client = BankClient::new_shared(&bank);
let program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program);
let payee_account = Account::new(10, 1, &program_id);
let payee_pubkey = Pubkey::new_rand();
bank.store_account(&payee_pubkey, &payee_account);
let account = Account::new(10, 1, &program_id);
let pubkey = Pubkey::new_rand();
let account_metas = vec![
AccountMeta::new(mint_keypair.pubkey(), true),
AccountMeta::new(payee_pubkey, false),
AccountMeta::new(pubkey, false),
AccountMeta::new(pubkey, false),
];
bank.store_account(&pubkey, &account);
let instruction = Instruction::new(program_id, &1u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let data = bank_client.get_account_data(&pubkey).unwrap().unwrap();
assert!(result.is_ok());
assert_eq!(data[0], 1);
bank.store_account(&pubkey, &account);
let instruction = Instruction::new(program_id, &2u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let data = bank_client.get_account_data(&pubkey).unwrap().unwrap();
assert!(result.is_ok());
assert_eq!(data[0], 2);
bank.store_account(&pubkey, &account);
let instruction = Instruction::new(program_id, &3u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let data = bank_client.get_account_data(&pubkey).unwrap().unwrap();
assert!(result.is_ok());
assert_eq!(data[0], 3);
bank.store_account(&pubkey, &account);
let instruction = Instruction::new(program_id, &4u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let lamports = bank_client.get_balance(&pubkey).unwrap();
assert!(result.is_ok());
assert_eq!(lamports, 11);
bank.store_account(&pubkey, &account);
let instruction = Instruction::new(program_id, &5u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let lamports = bank_client.get_balance(&pubkey).unwrap();
assert!(result.is_ok());
assert_eq!(lamports, 12);
bank.store_account(&pubkey, &account);
let instruction = Instruction::new(program_id, &6u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let lamports = bank_client.get_balance(&pubkey).unwrap();
assert!(result.is_ok());
assert_eq!(lamports, 13);
}
}
#[test]
fn test_program_bpf_error_handling() {
solana_logger::setup();
let mut programs = Vec::new();
#[cfg(feature = "bpf_c")]
{
programs.extend_from_slice(&[("error_handling")]);
}
#[cfg(feature = "bpf_rust")]
{
programs.extend_from_slice(&[("solana_bpf_rust_error_handling")]);
}
for program in programs.iter() {
println!("Test program: {:?}", program);
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(50);
let mut bank = Bank::new(&genesis_config);
let (name, id, entrypoint) = solana_bpf_loader_program!();
bank.add_builtin_loader(&name, id, entrypoint);
let bank_client = BankClient::new(bank);
let program_id = load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program);
let account_metas = vec![AccountMeta::new(mint_keypair.pubkey(), true)];
let instruction = Instruction::new(program_id, &1u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert!(result.is_ok());
let instruction = Instruction::new(program_id, &2u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InstructionError(0, InstructionError::InvalidAccountData)
);
let instruction = Instruction::new(program_id, &3u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InstructionError(0, InstructionError::Custom(0))
);
let instruction = Instruction::new(program_id, &4u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InstructionError(0, InstructionError::Custom(42))
);
let instruction = Instruction::new(program_id, &5u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let result = result.unwrap_err().unwrap();
if TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) != result
{
assert_eq!(
result,
TransactionError::InstructionError(0, InstructionError::InvalidError)
);
}
let instruction = Instruction::new(program_id, &6u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let result = result.unwrap_err().unwrap();
if TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) != result
{
assert_eq!(
result,
TransactionError::InstructionError(0, InstructionError::InvalidError)
);
}
let instruction = Instruction::new(program_id, &7u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
let result = result.unwrap_err().unwrap();
if TransactionError::InstructionError(0, InstructionError::InvalidInstructionData) != result
{
assert_eq!(
result,
TransactionError::InstructionError(0, InstructionError::AccountBorrowFailed)
);
}
let instruction = Instruction::new(program_id, &8u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InstructionError(0, InstructionError::InvalidInstructionData)
);
let instruction = Instruction::new(program_id, &9u8, account_metas.clone());
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InstructionError(0, InstructionError::MaxSeedLengthExceeded)
);
}
}
#[test]
fn test_program_bpf_invoke() {
solana_logger::setup();
const TEST_SUCCESS: u8 = 1;
const TEST_PRIVILEGE_ESCALATION_SIGNER: u8 = 2;
const TEST_PRIVILEGE_ESCALATION_WRITABLE: u8 = 3;
const TEST_PPROGRAM_NOT_EXECUTABLE: u8 = 4;
let mut programs = Vec::new();
#[cfg(feature = "bpf_c")]
{
programs.extend_from_slice(&[("invoke", "invoked")]);
}
#[cfg(feature = "bpf_rust")]
{
programs.extend_from_slice(&[("solana_bpf_rust_invoke", "solana_bpf_rust_invoked")]);
}
for program in programs.iter() {
println!("Test program: {:?}", program);
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(50);
let mut bank = Bank::new(&genesis_config);
let (name, id, entrypoint) = solana_bpf_loader_program!();
bank.add_builtin_loader(&name, id, entrypoint);
let bank = Arc::new(bank);
let bank_client = BankClient::new_shared(&bank);
let invoke_program_id =
load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.0);
let invoked_program_id =
load_bpf_program(&bank_client, &bpf_loader::id(), &mint_keypair, program.1);
let argument_keypair = Keypair::new();
let account = Account::new(42, 100, &invoke_program_id);
bank.store_account(&argument_keypair.pubkey(), &account);
let invoked_argument_keypair = Keypair::new();
let account = Account::new(10, 10, &invoked_program_id);
bank.store_account(&invoked_argument_keypair.pubkey(), &account);
let from_keypair = Keypair::new();
let account = Account::new(84, 0, &solana_sdk::system_program::id());
bank.store_account(&from_keypair.pubkey(), &account);
let (derived_key1, nonce1) =
Pubkey::find_program_address(&[b"You pass butter"], &invoke_program_id);
let (derived_key2, nonce2) =
Pubkey::find_program_address(&[b"Lil'", b"Bits"], &invoke_program_id);
let (derived_key3, nonce3) =
Pubkey::find_program_address(&[derived_key2.as_ref()], &invoke_program_id);
let mint_pubkey = mint_keypair.pubkey();
let account_metas = vec![
AccountMeta::new(mint_pubkey, true),
AccountMeta::new(argument_keypair.pubkey(), true),
AccountMeta::new_readonly(invoked_program_id, false),
AccountMeta::new(invoked_argument_keypair.pubkey(), true),
AccountMeta::new_readonly(invoked_program_id, false),
AccountMeta::new(argument_keypair.pubkey(), true),
AccountMeta::new(derived_key1, false),
AccountMeta::new(derived_key2, false),
AccountMeta::new_readonly(derived_key3, false),
AccountMeta::new_readonly(solana_sdk::system_program::id(), false),
AccountMeta::new(from_keypair.pubkey(), true),
];
// success cases
let instruction = Instruction::new(
invoke_program_id,
&[TEST_SUCCESS, nonce1, nonce2, nonce3],
account_metas.clone(),
);
let message = Message::new(&[instruction], Some(&mint_pubkey));
let tx = Transaction::new(
&[
&mint_keypair,
&argument_keypair,
&invoked_argument_keypair,
&from_keypair,
],
message.clone(),
bank.last_blockhash(),
);
let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx);
assert!(result.is_ok());
let invoked_programs: Vec<Pubkey> = inner_instructions[0]
.iter()
.map(|ix| message.account_keys[ix.program_id_index as usize].clone())
.collect();
assert_eq!(
invoked_programs,
vec![
solana_sdk::system_program::id(),
solana_sdk::system_program::id(),
invoked_program_id.clone(),
invoked_program_id.clone(),
invoked_program_id.clone(),
invoked_program_id.clone(),
invoked_program_id.clone(),
invoked_program_id.clone(),
invoked_program_id.clone(),
invoked_program_id.clone(),
]
);
// failure cases
let instruction = Instruction::new(
invoke_program_id,
&[TEST_PRIVILEGE_ESCALATION_SIGNER, nonce1, nonce2, nonce3],
account_metas.clone(),
);
let message = Message::new(&[instruction], Some(&mint_pubkey));
let tx = Transaction::new(
&[
&mint_keypair,
&argument_keypair,
&invoked_argument_keypair,
&from_keypair,
],
message.clone(),
bank.last_blockhash(),
);
let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx);
let invoked_programs: Vec<Pubkey> = inner_instructions[0]
.iter()
.map(|ix| message.account_keys[ix.program_id_index as usize].clone())
.collect();
assert_eq!(invoked_programs, vec![invoked_program_id.clone()]);
assert_eq!(
result.unwrap_err(),
TransactionError::InstructionError(0, InstructionError::Custom(194969602))
);
let instruction = Instruction::new(
invoke_program_id,
&[TEST_PRIVILEGE_ESCALATION_WRITABLE, nonce1, nonce2, nonce3],
account_metas.clone(),
);
let message = Message::new(&[instruction], Some(&mint_pubkey));
let tx = Transaction::new(
&[
&mint_keypair,
&argument_keypair,
&invoked_argument_keypair,
&from_keypair,
],
message.clone(),
bank.last_blockhash(),
);
let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx);
let invoked_programs: Vec<Pubkey> = inner_instructions[0]
.iter()
.map(|ix| message.account_keys[ix.program_id_index as usize].clone())
.collect();
assert_eq!(invoked_programs, vec![invoked_program_id.clone()]);
assert_eq!(
result.unwrap_err(),
TransactionError::InstructionError(0, InstructionError::Custom(194969602))
);
let instruction = Instruction::new(
invoke_program_id,
&[TEST_PPROGRAM_NOT_EXECUTABLE, nonce1, nonce2, nonce3],
account_metas.clone(),
);
let message = Message::new(&[instruction], Some(&mint_pubkey));
let tx = Transaction::new(
&[
&mint_keypair,
&argument_keypair,
&invoked_argument_keypair,
&from_keypair,
],
message.clone(),
bank.last_blockhash(),
);
let (result, inner_instructions) = process_transaction_and_record_inner(&bank, tx);
let invoked_programs: Vec<Pubkey> = inner_instructions[0]
.iter()
.map(|ix| message.account_keys[ix.program_id_index as usize].clone())
.collect();
assert_eq!(invoked_programs, vec![argument_keypair.pubkey().clone()]);
assert_eq!(
result.unwrap_err(),
TransactionError::InstructionError(0, InstructionError::AccountNotExecutable)
);
// Check final state
assert_eq!(43, bank.get_balance(&derived_key1));
let account = bank.get_account(&derived_key1).unwrap();
assert_eq!(invoke_program_id, account.owner);
assert_eq!(
MAX_PERMITTED_DATA_INCREASE,
bank.get_account(&derived_key1).unwrap().data.len()
);
for i in 0..20 {
assert_eq!(i as u8, account.data[i]);
}
}
}
#[test]
fn assert_instruction_count() {
solana_logger::setup();
let mut programs = Vec::new();
#[cfg(feature = "bpf_c")]
{
programs.extend_from_slice(&[
("bpf_to_bpf", 13),
("multiple_static", 8),
("noop", 57),
("relative_call", 10),
("sanity", 1140),
("sanity++", 1140),
("struct_pass", 8),
("struct_ret", 22),
]);
}
#[cfg(feature = "bpf_rust")]
{
programs.extend_from_slice(&[
("solana_bpf_rust_128bit", 543),
("solana_bpf_rust_alloc", 19082),
("solana_bpf_rust_dep_crate", 2),
("solana_bpf_rust_external_spend", 485),
("solana_bpf_rust_iter", 723),
("solana_bpf_rust_many_args", 231),
("solana_bpf_rust_noop", 459),
("solana_bpf_rust_param_passing", 54),
("solana_bpf_rust_sanity", 2223),
]);
}
for program in programs.iter() {
println!("Test program: {:?}", program.0);
let program_id = Pubkey::new_rand();
let key = Pubkey::new_rand();
let mut account = RefCell::new(Account::default());
let parameter_accounts = vec![KeyedAccount::new(&key, false, &mut account)];
let count = run_program(program.0, &program_id, ¶meter_accounts[..], &[]).unwrap();
println!(" {} : {:?} ({:?})", program.0, count, program.1,);
assert!(count <= program.1);
}
}
// Mock InvokeContext
#[derive(Debug, Default)]
struct MockInvokeContext {
pub key: Pubkey,
pub logger: MockLogger,
pub compute_budget: ComputeBudget,
pub compute_meter: MockComputeMeter,
}
impl InvokeContext for MockInvokeContext {
fn push(&mut self, _key: &Pubkey) -> Result<(), InstructionError> {
Ok(())
}
fn pop(&mut self) {}
fn verify_and_update(
&mut self,
_message: &Message,
_instruction: &CompiledInstruction,
_accounts: &[Rc<RefCell<Account>>],
) -> Result<(), InstructionError> {
Ok(())
}
fn get_caller(&self) -> Result<&Pubkey, InstructionError> {
Ok(&self.key)
}
fn get_programs(&self) -> &[(Pubkey, ProcessInstruction)] {
&[]
}
fn get_logger(&self) -> Rc<RefCell<dyn Logger>> {
Rc::new(RefCell::new(self.logger.clone()))
}
fn get_compute_budget(&self) -> &ComputeBudget {
&self.compute_budget
}
fn get_compute_meter(&self) -> Rc<RefCell<dyn ComputeMeter>> {
Rc::new(RefCell::new(self.compute_meter.clone()))
}
fn add_executor(&mut self, _pubkey: &Pubkey, _executor: Arc<dyn Executor>) {}
fn get_executor(&mut self, _pubkey: &Pubkey) -> Option<Arc<dyn Executor>> {
None
}
fn record_instruction(&self, _instruction: &Instruction) {}
fn is_feature_active(&self, _feature_id: &Pubkey) -> bool {
true
}
}
#[derive(Debug, Default, Clone)]
struct MockComputeMeter {}
impl ComputeMeter for MockComputeMeter {
fn consume(&mut self, _amount: u64) -> Result<(), InstructionError> {
Ok(())
}
fn get_remaining(&self) -> u64 {
u64::MAX
}
}
#[derive(Debug, Default, Clone)]
struct MockLogger {}
impl Logger for MockLogger {
fn log_enabled(&self) -> bool {
true
}
fn log(&mut self, _message: &str) {
// println!("{}", message);
}
}
struct TestInstructionMeter {}
impl InstructionMeter for TestInstructionMeter {
fn consume(&mut self, _amount: u64) {}
fn get_remaining(&self) -> u64 {
u64::MAX
}
}
#[cfg(any(feature = "bpf_rust"))]
#[test]
fn test_program_bpf_instruction_introspection() {
solana_logger::setup();
let GenesisConfigInfo {
genesis_config,
mint_keypair,
..
} = create_genesis_config(50_000);
let mut bank = Bank::new(&genesis_config);
let (name, id, entrypoint) = solana_bpf_loader_program!();
bank.add_builtin_loader(&name, id, entrypoint);
let bank = Arc::new(bank);
let bank_client = BankClient::new_shared(&bank);
let program_id = load_bpf_program(
&bank_client,
&bpf_loader::id(),
&mint_keypair,
"solana_bpf_rust_instruction_introspection",
);
// Passing transaction
let account_metas = vec![AccountMeta::new_readonly(
solana_sdk::sysvar::instructions::id(),
false,
)];
let instruction0 = Instruction::new(program_id, &[0u8, 0u8], account_metas.clone());
let instruction1 = Instruction::new(program_id, &[0u8, 1u8], account_metas.clone());
let instruction2 = Instruction::new(program_id, &[0u8, 2u8], account_metas);
let message = Message::new(
&[instruction0, instruction1, instruction2],
Some(&mint_keypair.pubkey()),
);
let result = bank_client.send_and_confirm_message(&[&mint_keypair], message);
println!("result: {:?}", result);
assert!(result.is_ok());
// writable special instructions11111 key, should not be allowed
let account_metas = vec![AccountMeta::new(
solana_sdk::sysvar::instructions::id(),
false,
)];
let instruction = Instruction::new(program_id, &0u8, account_metas);
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InvalidAccountIndex
);
// No accounts, should error
let instruction = Instruction::new(program_id, &0u8, vec![]);
let result = bank_client.send_and_confirm_instruction(&mint_keypair, instruction);
assert!(result.is_err());
assert_eq!(
result.unwrap_err().unwrap(),
TransactionError::InstructionError(
0,
solana_sdk::instruction::InstructionError::NotEnoughAccountKeys
)
);
assert!(bank
.get_account(&solana_sdk::sysvar::instructions::id())
.is_none());
}
| 36.046988 | 100 | 0.612387 |
bfaba9a4a4e0c14b402cd2d475d23f60973ea610
| 1,181 |
extern crate rustc_expand;
extern crate rustc_parse as parse;
extern crate rustc_span;
extern crate syntax;
use rustc_span::source_map::FilePathMapping;
use rustc_span::FileName;
use syntax::ast;
use syntax::ptr::P;
use syntax::sess::ParseSess;
use std::panic;
pub fn libsyntax_expr(input: &str) -> Option<P<ast::Expr>> {
match panic::catch_unwind(|| {
let sess = ParseSess::new(FilePathMapping::empty());
let e = parse::new_parser_from_source_str(
&sess,
FileName::Custom("test_precedence".to_string()),
input.to_string(),
)
.parse_expr();
match e {
Ok(expr) => Some(expr),
Err(mut diagnostic) => {
diagnostic.emit();
None
}
}
}) {
Ok(Some(e)) => Some(e),
Ok(None) => None,
Err(_) => {
errorf!("libsyntax panicked\n");
None
}
}
}
pub fn syn_expr(input: &str) -> Option<syn::Expr> {
match syn::parse_str(input) {
Ok(e) => Some(e),
Err(msg) => {
errorf!("syn failed to parse\n{:?}\n", msg);
None
}
}
}
| 24.102041 | 60 | 0.524132 |
3a0919c3f050c419d4f57e8a0102c34504330c18
| 9,084 |
// Copyright 2019 Google LLC. All Rights Reserved.
// Copyright 2019-2020 Guillaume Becquin
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use protobuf::parse_from_bytes;
use std::fs::File;
use std::io::Read;
use hashbrown::HashMap as BrownHashMap;
use itertools::Itertools;
use crate::Vocab;
use std::collections::HashMap;
use crate::preprocessing::vocab::base_vocab::swap_key_values;
use std::process;
use crate::preprocessing::tokenizer::base_tokenizer::{TokenRef, OffsetSize};
use crate::preprocessing::vocab::sentencepiece_proto::sentencepiece_model::ModelProto;
#[derive(Debug, Clone, Copy)]
pub struct Node<'a> {
pub text: &'a str,
pub score: f32,
pub index: i64,
pub start: usize,
pub end: usize,
pub reference_offsets: &'a [OffsetSize],
}
#[derive(Debug, Clone)]
pub struct TrieNode {
pub text: String,
pub len: usize,
pub score: f32,
pub index: i64,
pub end: bool,
pub children: BrownHashMap<char, TrieNode>,
}
impl TrieNode {
pub fn new(text: String) -> TrieNode {
let len = text.chars().count();
TrieNode {
text,
len,
score: 0.0,
index: 0,
end: false,
children: BrownHashMap::new(),
}
}
}
pub struct SentencePieceModel {
pub root: TrieNode,
}
impl SentencePieceModel {
pub fn from_file(path: &str) -> SentencePieceModel {
let mut f = File::open(path).unwrap();
let mut contents = Vec::new();
f.read_to_end(&mut contents).unwrap();
let proto = parse_from_bytes::<ModelProto>(contents.as_slice()).unwrap();
let root = TrieNode::new("".to_string());
let mut vocab = SentencePieceModel { root };
for (idx, piece) in proto.get_pieces().iter().enumerate() {
vocab.insert(piece.get_piece(), piece.get_score(), idx as i64);
}
vocab
}
pub fn from_proto(proto: &ModelProto) -> SentencePieceModel {
let root = TrieNode::new("".to_string());
let mut vocab = SentencePieceModel { root };
for (idx, piece) in proto.get_pieces().iter().enumerate() {
vocab.insert(piece.get_piece(), piece.get_score(), idx as i64);
}
vocab
}
fn insert(&mut self, word: &str, score: f32, index: i64) {
let char_count = word.chars().count();
let mut node = &mut self.root;
for (idx, character) in word.chars().enumerate() {
if !node.children.contains_key(&character) {
let mut text = node.text.clone();
text.push(character);
let new_node = TrieNode::new(text);
node.children.insert(character, new_node);
}
node = node.children.get_mut(&character).unwrap();
if idx == char_count - 1 {
node.end = true;
node.score = score;
node.index = index;
}
}
}
pub fn common_prefix_search<'a>(&'a self, text: &'a str) -> Vec<&TrieNode> {
let mut results = vec!();
let mut characters = text.chars();
let mut node = self.root.children.get(&characters.next().unwrap());
if node.is_some() {
if node.unwrap().end {
results.push(node.unwrap());
}
} else {
return vec!();
}
while let Some(character) = characters.next() {
node = node.unwrap().children.get(&character);
if node.is_some() {
if node.unwrap().end {
results.push(node.unwrap());
}
} else {
break;
}
}
results
}
pub fn decode_forward_token_ref<'a>(&'a self, token: TokenRef<'a>) -> Vec<Option<Node<'a>>> {
let mut char_positions = token.text
.char_indices()
.map(|(pos, _)| pos)
.collect_vec();
char_positions.push(token.text.len());
let mut results = vec!(None; char_positions.len());
let mut scores = vec!(std::f32::NEG_INFINITY; char_positions.len());
scores[0] = 0f32;
for char_start in 0..char_positions.len() - 1 {
let matches = self.common_prefix_search(&token.text[char_positions[char_start]..]);
for node in matches {
let local_score = scores[char_start] + node.score;
let char_end = char_start + node.len;
if local_score > scores[char_end] {
results[char_end] = Some(Node {
text: &token.text[char_positions[char_start]..char_positions[char_end]],
score: local_score,
index: node.index,
start: char_start,
end: char_end,
reference_offsets: &token.reference_offsets[char_start..char_end],
});
scores[char_end] = local_score;
}
}
if scores[char_start + 1] <= std::f32::MIN {
results[char_start + 1] = Some(Node {
text: &token.text[char_positions[char_start]..char_positions[char_start + 1]],
score: std::f32::MIN,
index: 0,
start: char_start,
end: char_start + 1,
reference_offsets: &token.reference_offsets[char_start..char_start + 1],
});
scores[char_start + 1] = 0f32;
}
}
results
}
pub fn decode_backward<'a>(&'a self, nodes: &'a Vec<Option<Node<'a>>>) -> Vec<&'a Node> {
let mut next_node = nodes.last().unwrap();
let mut best_sequence = vec!();
while next_node.is_some() {
let node_value = next_node.as_ref().unwrap();
best_sequence.push(node_value);
next_node = &nodes[node_value.start];
};
best_sequence.reverse();
best_sequence
}
}
pub struct SentencePieceVocab {
pub values: HashMap<String, i64>,
pub indices: HashMap<i64, String>,
pub unknown_value: &'static str,
pub special_values: HashMap<String, i64>,
pub special_indices: HashMap<i64, String>,
}
impl SentencePieceVocab {
pub fn pad_value() -> &'static str { "<pad>" }
pub fn sep_value() -> &'static str { "<sep>" }
pub fn cls_value() -> &'static str { "<cls>" }
pub fn mask_value() -> &'static str { "<mask>" }
pub fn bos_value() -> &'static str { "<s>" }
pub fn eos_value() -> &'static str { "</s>" }
}
impl Vocab for SentencePieceVocab {
fn unknown_value() -> &'static str { "<unk>" }
fn get_unknown_value(&self) -> &'static str { "<unk>" }
fn values(&self) -> &HashMap<String, i64> {
&self.values
}
fn indices(&self) -> &HashMap<i64, String> { &self.indices }
fn special_values(&self) -> &HashMap<String, i64> { &self.special_values }
fn special_indices(&self) -> &HashMap<i64, String> { &self.special_indices }
fn from_file(path: &str) -> SentencePieceVocab {
let mut f = File::open(path).unwrap();
let mut contents = Vec::new();
f.read_to_end(&mut contents).unwrap();
let proto = parse_from_bytes::<ModelProto>(contents.as_slice()).unwrap();
let mut values = HashMap::new();
for (idx, piece) in proto.get_pieces().iter().enumerate() {
values.insert(piece.get_piece().to_owned(), idx as i64);
}
let mut special_values = HashMap::new();
let unknown_value = SentencePieceVocab::unknown_value();
SentencePieceVocab::_register_as_special_value(unknown_value, &values, &mut special_values);
let indices = swap_key_values(&values);
let special_indices = swap_key_values(&special_values);
SentencePieceVocab { values, indices, unknown_value, special_values, special_indices }
}
fn token_to_id(&self, token: &str) -> i64 {
match self._token_to_id(token, &self.values, &self.special_values, &self.unknown_value) {
Ok(index) => index,
Err(err) => {
println!("{}", err);
process::exit(1);
}
}
}
fn id_to_token(&self, id: &i64) -> String {
match self._id_to_token(&id, &self.indices, &self.special_indices, &self.unknown_value) {
Ok(token) => token,
Err(err) => {
println!("{}", err);
process::exit(1);
}
}
}
}
| 34.150376 | 100 | 0.56528 |
f87567df85f750269f9daabb3f8432cc4a7ee943
| 7,864 |
//! QUIC transport protocol support for Tokio
//!
//! [QUIC](https://en.wikipedia.org/wiki/QUIC) is a modern transport protocol addressing
//! shortcomings of TCP, such as head-of-line blocking, poor security, slow handshakes, and
//! inefficient congestion control. This crate provides a portable userspace implementation. It
//! builds on top of quinn-proto, which implements protocol logic independent of any particular
//! runtime.
//!
//! The entry point of this crate is the [`Endpoint`](generic/struct.Endpoint.html).
//!
#![cfg_attr(
feature = "rustls",
doc = "```no_run
# use futures_util::TryFutureExt;
let mut builder = quinn::Endpoint::builder();
// ... configure builder ...
// Ensure you're inside a tokio runtime context
let (endpoint, _) = builder.bind(&\"[::]:0\".parse().unwrap()).unwrap();
// ... use endpoint ...
```"
)]
//! # About QUIC
//!
//! A QUIC connection is an association between two endpoints. The endpoint which initiates the
//! connection is termed the client, and the endpoint which accepts it is termed the server. A
//! single endpoint may function as both client and server for different connections, for example
//! in a peer-to-peer application. To communicate application data, each endpoint may open streams
//! up to a limit dictated by its peer. Typically, that limit is increased as old streams are
//! finished.
//!
//! Streams may be unidirectional or bidirectional, and are cheap to create and disposable. For
//! example, a traditionally datagram-oriented application could use a new stream for every
//! message it wants to send, no longer needing to worry about MTUs. Bidirectional streams behave
//! much like a traditional TCP connection, and are useful for sending messages that have an
//! immediate response, such as an HTTP request. Stream data is delivered reliably, and there is no
//! ordering enforced between data on different streams.
//!
//! By avoiding head-of-line blocking and providing unified congestion control across all streams
//! of a connection, QUIC is able to provide higher throughput and lower latency than one or
//! multiple TCP connections between the same two hosts, while providing more useful behavior than
//! raw UDP sockets.
//!
//! Quinn also exposes unreliable datagrams, which are a low-level primitive preferred when
//! automatic fragmentation and retransmission of certain data is not desired.
//!
//! QUIC uses encryption and identity verification built directly on TLS 1.3. Just as with a TLS
//! server, it is useful for a QUIC server to be identified by a certificate signed by a trusted
//! authority. If this is infeasible--for example, if servers are short-lived or not associated
//! with a domain name--then as with TLS, self-signed certificates can be used to provide
//! encryption alone.
#![warn(missing_docs)]
use std::time::Duration;
mod broadcast;
mod builders;
mod connection;
mod endpoint;
mod mutex;
mod platform;
mod recv_stream;
mod send_stream;
mod work_limiter;
pub use proto::{
crypto, ApplicationClose, Certificate, CertificateChain, Chunk, ConfigError, ConnectError,
ConnectionClose, ConnectionError, ParseError, PrivateKey, StreamId, Transmit, TransportConfig,
VarInt,
};
pub use crate::builders::EndpointError;
pub use crate::connection::{SendDatagramError, ZeroRttAccepted};
pub use crate::recv_stream::{ReadError, ReadExactError, ReadToEndError};
pub use crate::send_stream::{StoppedError, WriteError};
/// Types that are generic over the crypto protocol implementation
pub mod generic {
pub use crate::builders::{ClientConfigBuilder, EndpointBuilder, ServerConfigBuilder};
pub use crate::connection::{
Connecting, Connection, Datagrams, IncomingBiStreams, IncomingUniStreams, NewConnection,
OpenBi, OpenUni,
};
pub use crate::endpoint::{Endpoint, Incoming};
pub use crate::recv_stream::{Read, ReadChunk, ReadChunks, ReadExact, ReadToEnd, RecvStream};
pub use crate::send_stream::SendStream;
pub use proto::generic::{ClientConfig, ServerConfig};
}
#[cfg(feature = "rustls")]
mod rustls_impls {
use crate::generic;
use proto::crypto::rustls::TlsSession;
/// A `ClientConfig` using rustls for the cryptography protocol
pub type ClientConfig = generic::ClientConfig<TlsSession>;
/// A `ServerConfig` using rustls for the cryptography protocol
pub type ServerConfig = generic::ServerConfig<TlsSession>;
/// A `ClientConfigBuilder` using rustls for the cryptography protocol
pub type ClientConfigBuilder = generic::ClientConfigBuilder<TlsSession>;
/// An `EndpointBuilder` using rustls for the cryptography protocol
pub type EndpointBuilder = generic::EndpointBuilder<TlsSession>;
/// A `ServerConfigBuilder` using rustls for the cryptography protocol
pub type ServerConfigBuilder = generic::ServerConfigBuilder<TlsSession>;
/// A `Connecting` using rustls for the cryptography protocol
pub type Connecting = generic::Connecting<TlsSession>;
/// A `Connection` using rustls for the cryptography protocol
pub type Connection = generic::Connection<TlsSession>;
/// A `Datagrams` using rustls for the cryptography protocol
pub type Datagrams = generic::Datagrams<TlsSession>;
/// An `IncomingBiStreams` using rustls for the cryptography protocol
pub type IncomingBiStreams = generic::IncomingBiStreams<TlsSession>;
/// An `IncomingUniStreams` using rustls for the cryptography protocol
pub type IncomingUniStreams = generic::IncomingUniStreams<TlsSession>;
/// A `NewConnection` using rustls for the cryptography protocol
pub type NewConnection = generic::NewConnection<TlsSession>;
/// An `OpenBi` using rustls for the cryptography protocol
pub type OpenBi = generic::OpenBi<TlsSession>;
/// An `OpenUni` using rustls for the cryptography protocol
pub type OpenUni = generic::OpenUni<TlsSession>;
/// An `Endpoint` using rustls for the cryptography protocol
pub type Endpoint = generic::Endpoint<TlsSession>;
/// An `Incoming` using rustls for the cryptography protocol
pub type Incoming = generic::Incoming<TlsSession>;
/// A `Read` using rustls for the cryptography protocol
pub type Read<'a> = generic::Read<'a, TlsSession>;
/// A `ReadExact` using rustls for the cryptography protocol
pub type ReadExact<'a> = generic::ReadExact<'a, TlsSession>;
/// A `ReadToEnd` using rustls for the cryptography protocol
pub type ReadToEnd = generic::ReadToEnd<TlsSession>;
/// A `RecvStream` using rustls for the cryptography protocol
pub type RecvStream = generic::RecvStream<TlsSession>;
/// A `SendStream` using rustls for the cryptography protocol
pub type SendStream = generic::SendStream<TlsSession>;
}
#[cfg(feature = "rustls")]
pub use rustls_impls::*;
#[cfg(test)]
mod tests;
#[derive(Debug)]
enum ConnectionEvent {
Close {
error_code: VarInt,
reason: bytes::Bytes,
},
Proto(proto::ConnectionEvent),
}
#[derive(Debug)]
enum EndpointEvent {
Proto(proto::EndpointEvent),
Transmit(proto::Transmit),
}
/// Maximum number of datagrams processed in send/recv calls to make before moving on to other processing
///
/// This helps ensure we don't starve anything when the CPU is slower than the link.
/// Value is selected by picking a low number which didn't degrade throughput in benchmarks.
const IO_LOOP_BOUND: usize = 160;
/// The maximum amount of time that should be spent in `recvmsg()` calls per endpoint iteration
///
/// 50us are chosen so that an endpoint iteration with a 50us sendmsg limit blocks
/// the runtime for a maximum of about 100us.
/// Going much lower does not yield any noticeable difference, since a single `recvmmsg`
/// batch of size 32 was observed to take 30us on some systems.
const RECV_TIME_BOUND: Duration = Duration::from_micros(50);
| 45.195402 | 105 | 0.742116 |
d5d6919e290b599d23659323789393472ab3495a
| 1,601 |
#![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [ wasm_bindgen ( extends = HtmlElement , extends = Element , extends = Node , extends = EventTarget , extends = :: js_sys :: Object , js_name = HTMLTimeElement , typescript_type = "HTMLTimeElement" ) ]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `HtmlTimeElement` class."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/HTMLTimeElement)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `HtmlTimeElement`*"]
pub type HtmlTimeElement;
# [ wasm_bindgen ( structural , method , getter , js_class = "HTMLTimeElement" , js_name = dateTime ) ]
#[doc = "Getter for the `dateTime` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/HTMLTimeElement/dateTime)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `HtmlTimeElement`*"]
pub fn date_time(this: &HtmlTimeElement) -> String;
# [ wasm_bindgen ( structural , method , setter , js_class = "HTMLTimeElement" , js_name = dateTime ) ]
#[doc = "Setter for the `dateTime` field of this object."]
#[doc = ""]
#[doc = "[MDN Documentation](https://developer.mozilla.org/en-US/docs/Web/API/HTMLTimeElement/dateTime)"]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `HtmlTimeElement`*"]
pub fn set_date_time(this: &HtmlTimeElement, value: &str);
}
| 55.206897 | 207 | 0.660837 |
38dc785bd75c47f0351aa965e2ab0a7572f256cc
| 2,618 |
use crate::prelude::*;
use std::collections::{HashMap, HashSet};
#[derive(Debug, Clone, Default, Deref)]
pub struct ClusterNodes(SharedRwLock<HashMap<String, NodeEntities>>);
#[derive(Debug, Clone, Default, Deref)]
pub struct NodeEntities(SharedRwLock<HashMap<String, EntityDataContainer>>);
#[derive(Debug, Clone, Default, Deref)]
pub struct EntityDataContainer(SharedRwLock<EntityData>);
#[derive(Debug, Clone, Default)]
pub struct EntityData {
pub stat: RollingVec<String>,
pub attr: Document,
}
macro_rules! get_or_insert {
($item:ident, $name:ident, $type:ty) => {{
let item = {
let lck = $item.read().await;
(&*lck).get($name).cloned()
};
if item.is_some() {
item.unwrap()
} else {
let mut lck = $item.write().await;
let i: $type = Default::default();
lck.insert($name.into(), i.clone());
i
}
}};
}
impl ClusterNodes {
pub async fn update_stat(&self, node: &str, entity: &str, stat: String) {
let e = get_or_insert!(self, node, NodeEntities);
let dat = get_or_insert!(e, entity, EntityDataContainer);
let lck = dat.write().await;
lck.stat.add(stat).await;
}
pub async fn update_attr(&self, node: &str, entity: &str, attr: Document) {
let e = get_or_insert!(self, node, NodeEntities);
let dat = get_or_insert!(e, entity, EntityDataContainer);
let mut lck = dat.write().await;
lck.attr = attr;
}
pub async fn get_nodes(&self) -> Vec<String> {
let lck = self.read().await;
lck.keys().map(|k| k.to_string()).collect()
}
pub async fn get_entities(&self) -> HashSet<String> {
let lck = self.read().await;
let mut result: HashSet<String> = Default::default();
for (_, v) in lck.iter() {
let lck = v.read().await;
for val in lck.keys().map(|k| k.to_string()).into_iter() {
result.insert(val);
}
}
result
}
pub async fn get_dev_id_prefix(&self, dev_id: &str) -> Vec<(String, String, EntityData)> {
let lck = self.read().await;
let mut result: Vec<(String, String, EntityData)> = Default::default();
for (k, v) in lck.iter() {
let lck = v.read().await;
for (k2, v) in lck.iter() {
if k2.starts_with(dev_id) {
let v = v.read().await;
result.insert(0, (k.to_string(), k2.to_string(), v.clone()));
}
}
}
result
}
}
| 31.542169 | 94 | 0.553858 |
915999e07d279f40142a9c95f5ca03b0f5c8170d
| 32,185 |
use bytes::BytesMut;
use fnv::FnvBuildHasher;
use futures::stream::Fuse;
use futures::sync::mpsc::{Sender, UnboundedReceiver};
use futures::sync::oneshot;
use futures::{Async, AsyncSink, Future, Poll, Sink, Stream};
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::error::Error;
use std::fmt::{self, Display, Formatter};
use std::mem;
use std::time::{Duration, Instant};
use tokio_timer::Delay;
use crate::header::map::HeaderMapExtension;
use crate::header::types::CSeq;
use crate::protocol::codec::decoder::request::DecodeError as RequestDecodeError;
use crate::protocol::codec::decoder::response::DecodeError as ResponseDecodeError;
use crate::protocol::codec::{CodecEvent, DecodeError, Message, ProtocolError};
use crate::protocol::connection::pending::{PendingRequestResponse, PendingRequestUpdate};
use crate::protocol::connection::sender::SenderHandle;
use crate::request::Request;
use crate::response::{
Response, BAD_REQUEST_RESPONSE, FORBIDDEN_RESPONSE, REQUEST_MESSAGE_BODY_TOO_LARGE_RESPONSE,
REQUEST_URI_TOO_LONG_RESPONSE, VERSION_NOT_SUPPORTED_RESPONSE,
};
use crate::status::StatusCode;
/// Receiver responsible for processing incoming messages, including forwarding requests to the
/// request handler and matching responses to pending requests.
#[must_use = "futures do nothing unless polled"]
pub struct Receiver<TStream>
where
TStream: Stream<Item = Message, Error = ProtocolError> + Send + 'static,
{
/// How long should we wait before decoding is timed out and the connection is dropped.
decode_timeout_duration: Duration,
/// The current decoding timer tracking the time between the start of decoding and the end.
decoding_timer: Option<Delay>,
/// The forwarding receiver responsible for forwarding requests to the request handler in the
/// order of their `"CSeq"`s.
forwarding_receiver: Option<ForwardingReceiver>,
/// Are requests allowed to be accepted.
requests_allowed: bool,
/// The response receiver responsible for matching incoming responses to pending requests.
response_receiver: Option<ResponseReceiver>,
/// A stream of codec events used to reset the decoding timer.
rx_codec_event: Fuse<UnboundedReceiver<CodecEvent>>,
/// A handle to the sender through which responses are sent.
sender_handle: Option<SenderHandle>,
/// The underlying connection stream from which messages are read and decoded from. This stream
/// must represent an ordered, reliable protocol (e.g. TCP).
stream: Option<TStream>,
}
impl<TStream> Receiver<TStream>
where
TStream: Stream<Item = Message, Error = ProtocolError> + Send + 'static,
{
/// Processes the given codec event.
///
/// Encoding events are ignored, but decoding events are used to create decoding timers such
/// that unlively connections are not kept open.
fn handle_codec_event(&mut self, event: CodecEvent) {
use self::CodecEvent::*;
match event {
DecodingStarted => {
let expire_time = Instant::now() + self.decode_timeout_duration;
self.decoding_timer = Some(Delay::new(expire_time));
}
DecodingEnded => {
self.decoding_timer = None;
}
_ => {}
}
}
/// Processes the given message.
///
/// If it is a request, it will be buffered internally until it is ready to be forwarded to the
/// request handler.
///
/// If it is a response, it will be matched against a pending request with the same `"CSeq"` (if
/// it exists).
fn handle_message(&mut self, message: Message) -> Result<(), RequestReceiverError> {
match message {
Message::Request(request) => {
if self.requests_allowed {
self.forwarding_receiver
.as_mut()
.expect("presence of request receiver should imply forwarding")
.handle_request(request)?;
}
}
Message::Response(response) => {
if let Some(response_receiver) = self.response_receiver.as_mut() {
response_receiver.handle_response(response);
}
}
}
Ok(())
}
/// Handles the given protocol error that occurred while trying to poll the internal stream by
/// sending a client error response (i.e. status code between 400-500).
///
/// This mainly is to provide better error messages to the agent if we can pinpoint what the
/// exact problem is (e.g. an unsupported RTPS version vs invalid syntax).
fn handle_protocol_error(&self, error: &ProtocolError) {
if let Some(sender_handle) = self.sender_handle.as_ref() {
match error {
ProtocolError::DecodeError(DecodeError::Request(
RequestDecodeError::UnsupportedVersion,
))
| ProtocolError::DecodeError(DecodeError::Response(
ResponseDecodeError::UnsupportedVersion,
)) => {
let message = Message::Response(VERSION_NOT_SUPPORTED_RESPONSE.clone());
send_message(message, sender_handle);
}
ProtocolError::DecodeError(DecodeError::Request(
RequestDecodeError::BodyTooLong,
)) => {
let message =
Message::Response(REQUEST_MESSAGE_BODY_TOO_LARGE_RESPONSE.clone());
send_message(message, sender_handle);
}
ProtocolError::DecodeError(DecodeError::Request(
RequestDecodeError::URITooLong,
)) => {
let message = Message::Response(REQUEST_URI_TOO_LONG_RESPONSE.clone());
send_message(message, sender_handle);
}
ProtocolError::DecodeError(_) => {
let message = Message::Response(BAD_REQUEST_RESPONSE.clone());
send_message(message, sender_handle);
}
_ => {}
}
}
}
/// Handles an error while trying to process a request.
///
/// This should not be called if the request receiver is shutdown.
fn handle_request_receiver_error(&self, error: RequestReceiverError) {
let sender_handle = self
.sender_handle
.as_ref()
.expect("request receiver error should imply message sending is active");
match error {
RequestReceiverError::BadRequest => {
let message = Message::Response(BAD_REQUEST_RESPONSE.clone());
send_message(message, sender_handle);
}
RequestReceiverError::CSeqDifferenceTooLarge => {
let message = Message::Response(FORBIDDEN_RESPONSE.clone());
send_message(message, sender_handle);
}
}
}
/// Returns whether request forwarding to the handler is shutdown.
///
/// If true, this implies that request receiving is shutdown. There may still be requests that
/// are currently queued in the handler, so responses may still be written.
pub fn is_forwarding_shutdown(&self) -> bool {
self.forwarding_receiver.is_none()
}
/// Returns whether all receiving is shutdown (i.e. no requests or responses can be received).
pub fn is_receiving_shutdown(&self) -> bool {
self.is_request_receiver_shutdown() && self.is_response_receiver_shutdown()
}
/// Returns whether request receiving is shutdown.
///
/// The forwarding receiver may still be running due to the backpressure of the handler queue.
pub fn is_request_receiver_shutdown(&self) -> bool {
!self.requests_allowed
}
/// Returns whether response receiving is shutdown.
pub fn is_response_receiver_shutdown(&self) -> bool {
self.response_receiver.is_none()
}
/// Returns whether all receiving and forwarding is shutdown.
fn is_shutdown(&self) -> bool {
self.is_receiving_shutdown() && self.is_forwarding_shutdown()
}
/// Constructs a new receiver.
pub fn new(
stream: TStream,
rx_pending_request: UnboundedReceiver<PendingRequestUpdate>,
rx_codec_event: UnboundedReceiver<CodecEvent>,
tx_incoming_request: Sender<(CSeq, Request<BytesMut>)>,
sender_handle: SenderHandle,
decode_timeout_duration: Duration,
request_buffer_size: usize,
) -> Self {
Receiver {
decode_timeout_duration,
decoding_timer: None,
forwarding_receiver: Some(ForwardingReceiver::new(
tx_incoming_request,
request_buffer_size,
)),
requests_allowed: true,
response_receiver: Some(ResponseReceiver::new(rx_pending_request)),
rx_codec_event: rx_codec_event.fuse(),
sender_handle: Some(sender_handle),
stream: Some(stream),
}
}
/// Checks for new codec events.
///
/// If `Ok(Async::Ready(()))` is never returned.
///
/// If `Ok(Async::NotReady)` is returned, then there are no more codec events to be processed.
///
/// If `Err(())` is never returned.
pub fn poll_codec_events(&mut self) -> Poll<(), ()> {
loop {
match self
.rx_codec_event
.poll()
.expect("`Receiver.rx_codec_event` should not error")
{
Async::Ready(Some(event)) => self.handle_codec_event(event),
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => panic!("`Receiver.rx_codec_event` should not end"),
}
}
}
/// Checks to see if the decoding timer has expired.
///
/// If `Ok(Async::Ready(()))` is returned, then the timer has expired.
///
/// If `Ok(Async::NotReady)` is returned, then there is either no timer or it has not expired.
///
/// If `Err(())` is returned, then there was a timer error due to there being too many timers.
fn poll_decoding_timer(&mut self) -> Poll<(), ()> {
if let Some(decoding_timer) = self.decoding_timer.as_mut() {
match decoding_timer.poll() {
Ok(Async::Ready(_)) => return Ok(Async::Ready(())),
Ok(Async::NotReady) => return Ok(Async::NotReady),
Err(ref error) if error.is_at_capacity() => return Err(()),
_ => panic!("decoding timer should not be shutdown"),
}
}
Ok(Async::NotReady)
}
/// Drives the request and response receivers.
///
/// If `Ok(Async::Ready(()))` is returned, then all of receiving is shutdown.
///
/// If `Ok(Async::NotReady)` is returned, then receiving is still running, at least partially.
/// Specifically, either the request or response receivers are still running.
///
/// If `Err(())` is returned, then there was an error driving either the request or response
/// receivers. This could arise due to some underlying IO error or due to too many timers being
/// created.
fn poll_receiving(&mut self) -> Poll<(), ()> {
if let Some(response_receiver) = self.response_receiver.as_mut() {
match response_receiver.poll() {
Ok(Async::Ready(_)) | Err(_) => {
// From our side of the connection, this implies that we are no longer sending
// anymore requests and so we do not care to process responses any longer.
self.shutdown_response_receiver();
}
_ => (),
}
}
match self.poll_stream() {
Ok(Async::Ready(_)) | Err(_) => {
// There are no more messages to be received.
return Ok(Async::Ready(()));
}
_ => (),
}
// The response here is effectively constant, so ignore it.
let _ = self.poll_codec_events();
match self.poll_decoding_timer() {
Ok(Async::Ready(_)) | Err(_) => {
// Either the decoding timer expired or there were too many timers.
//
// In the former, the other agent took too long to send anymore data, so we close
// receiving in order to avoid locking up resources for no reason.
//
// In the latter, we no longer have a decoder timer. Shutting down receiving serves
// two purposes here. One, it helps to prevent DoS attacks and two, it helps to shed
// load.
return Ok(Async::Ready(()));
}
_ => (),
}
Ok(Async::NotReady)
}
/// Checks if there are any messages to be processed from the internal connection stream.
///
/// If `Ok(Async::Ready(()))` is returned, then the stream has been closed and no more messages
/// will be received.
///
/// If `Ok(Async::NotReady)` is returned, then either there are no more messages to be processed
/// from the stream currently, or no messages can currently be accepted.
///
/// If `Err(`[`ProtocolError`]`)` is returned, then there was a protocol error while trying to
/// poll the stream.
pub fn poll_stream(&mut self) -> Poll<(), ProtocolError> {
match self.stream.take() {
Some(mut stream) => loop {
if let Some(forwarding_receiver) = self.forwarding_receiver.as_ref() {
// If the forwarding receiver is full, then any incoming requests cannot be
// handled. This also blocks any incoming responses, since we have to process
// messages as they come.
if forwarding_receiver.is_full() {
self.stream = Some(stream);
return Ok(Async::NotReady);
}
}
match stream.poll() {
Ok(Async::Ready(Some(message))) => {
if let Err(error) = self.handle_message(message) {
self.handle_request_receiver_error(error);
}
}
Ok(Async::NotReady) => {
self.stream = Some(stream);
return Ok(Async::NotReady);
}
Ok(Async::Ready(None)) => return Ok(Async::Ready(())),
Err(error) => {
self.handle_protocol_error(&error);
return Err(error);
}
}
},
None => Ok(Async::Ready(())),
}
}
/// Shuts down the forwarding receiver.
///
/// Since the request receiver cannot be running without a forwarding receiver, this also shuts
/// down the request receiver.
///
/// Returns whether all receiving and forwarding is shutdown.
pub fn shutdown_forwarding_receiver(&mut self) -> bool {
self.forwarding_receiver = None;
self.shutdown_request_receiver()
}
/// Shuts down all of receiving.
///
/// The forwarding receiver will be shutdown only if there are no buffered requests awaiting
/// forwarding.
///
/// Returns whether all receiving and forwarding is shutdown.
pub fn shutdown_receiving(&mut self) -> bool {
self.shutdown_request_receiver();
self.shutdown_response_receiver()
}
/// Shuts down the request receiver.
///
/// The forwarding receiver will be shutdown only if there are no buffered requests awaiting
/// forwarding.
///
/// Returns whether all receiving and forwarding is shutdown.
pub fn shutdown_request_receiver(&mut self) -> bool {
self.requests_allowed = false;
if self.is_response_receiver_shutdown() {
self.sender_handle = None;
self.stream = None;
}
if let Some(forwarding_receiver) = self.forwarding_receiver.as_mut() {
if !forwarding_receiver.has_requests_ready() {
self.forwarding_receiver = None;
}
}
self.is_shutdown()
}
/// Shuts down the response receiver.
///
/// Returns whether all receiving and forwarding is shutdown.
pub fn shutdown_response_receiver(&mut self) -> bool {
self.response_receiver = None;
if self.is_request_receiver_shutdown() {
self.sender_handle = None;
self.stream = None;
}
self.is_shutdown()
}
}
impl<TStream> Future for Receiver<TStream>
where
TStream: Stream<Item = Message, Error = ProtocolError> + Send + 'static,
{
type Item = ();
type Error = ();
/// Processes any incoming messages, forwards ready requests to the request handler, and matches
/// incoming requests to pending requests.
///
/// If `Ok(Async::Ready(()))` is returned, then the receiver (including forwarding) is shutdown.
///
/// If `Ok(Async::NotReady)` is returned, then there is no more progress that can be made
/// currently.
///
/// If `Err(())` will never be returned.
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
match self.poll_receiving() {
Ok(Async::Ready(_)) | Err(_) => {
self.shutdown_receiving();
}
_ => (),
}
let is_shutdown = match self.forwarding_receiver.as_mut() {
Some(forwarding_receiver) => match forwarding_receiver.poll() {
Err(_) => self.shutdown_forwarding_receiver(),
Ok(Async::Ready(_)) if self.is_request_receiver_shutdown() => {
self.shutdown_forwarding_receiver()
}
_ => false,
},
None => self.is_shutdown(),
};
if is_shutdown {
Ok(Async::Ready(()))
} else {
Ok(Async::NotReady)
}
}
}
/// Receiver responsible for forwarding incoming requests to the request handler in the order of
/// their `"CSeq"`s.
///
/// Even with a reliable transport protocol like TCP, there is no guarantee on the order requests
/// arrive in such as when multiplexing is involved (e.g. proxies). As a result, the we need to
/// temporarily buffer requests internally until it is their turn to be forwarded.
#[must_use = "futures do nothing unless polled"]
struct ForwardingReceiver {
/// A map from request `"CSeq"`s to the corresponding request.
buffered_requests: HashMap<CSeq, Request<BytesMut>, FnvBuildHasher>,
/// The expected sequence number for the next incoming request. This will be [`Option::None`] in
/// the case where we have yet to receive a request, since it is the client that determines the
/// initial `"CSeq"`.
incoming_sequence_number: Option<CSeq>,
/// The capacity of the buffer map.
request_buffer_size: usize,
/// The channel that connects to the request handler. Requests sent through this channel should
/// be ordered by their `"CSeq"`s.
tx_incoming_request: Sender<(CSeq, Request<BytesMut>)>,
}
impl ForwardingReceiver {
/// Handles the given request by buffering it internally until it is time for it to be
/// forwarded.
///
/// There are some error conditions handled here:
/// - If the `"CSeq"` of the request is not valid, a Bad Request (400) is returned.
/// - If the difference between the current incoming sequence number and the request's `"CSeq"`
/// is larger than the internal buffer, a Not Enough Bandwidth (453) is returned.
/// - If the `"CSeq"` of the request is associated to an already buffered request, a Bad
/// Request (400) is returned.
pub fn handle_request(
&mut self,
request: Request<BytesMut>,
) -> Result<(), RequestReceiverError> {
match request.headers().typed_get::<CSeq>() {
Some(cseq) => {
let incoming_sequence_number = self.incoming_sequence_number_or_default(cseq);
if *(cseq - incoming_sequence_number) > self.request_buffer_size as u32 {
Err(RequestReceiverError::CSeqDifferenceTooLarge)
} else {
debug_assert!(self.buffered_requests.len() < self.request_buffer_size);
match self.buffered_requests.entry(cseq) {
Entry::Occupied(_) => Err(RequestReceiverError::BadRequest),
Entry::Vacant(entry) => {
entry.insert(request);
Ok(())
}
}
}
}
None => Err(RequestReceiverError::BadRequest),
}
}
/// Returns whether the forwarding receiver has any requests that are ready to be forwarded.
/// Even if there are requests ready, it may not be possible to forward any at the current time
/// due to the handler queue being full.
pub fn has_requests_ready(&self) -> bool {
match self.incoming_sequence_number {
Some(incoming_sequence_number) => self
.buffered_requests
.contains_key(&incoming_sequence_number),
None => false,
}
}
/// Returns the current `"CSeq"` used for incoming requests or defaults to the given one.
///
/// Before the first request has been received on a connection, we do not know what `"CSeq"` we
/// should be looking at, so we default to whatever the first request has if we do not yet know.
/// In this case, the internal `"CSeq"` will be set to this default.
pub fn incoming_sequence_number_or_default(&mut self, cseq: CSeq) -> CSeq {
match self.incoming_sequence_number {
Some(cseq) => cseq,
None => {
self.incoming_sequence_number = Some(cseq);
cseq
}
}
}
/// Returns whether the internal request buffer is full.
pub fn is_full(&self) -> bool {
self.buffered_requests.len() >= self.request_buffer_size
}
/// Constructs a new forwarding receivers that forwards requests, in order, through the given
/// bounded channel of the given size.
///
/// The size is also used internally in the forwarding receiver as the size of the buffer used
/// to reorder the requests based on their `"CSeq"`.
pub fn new(
tx_incoming_request: Sender<(CSeq, Request<BytesMut>)>,
request_buffer_size: usize,
) -> Self {
ForwardingReceiver {
buffered_requests: HashMap::with_capacity_and_hasher(
request_buffer_size,
FnvBuildHasher::default(),
),
incoming_sequence_number: None,
request_buffer_size,
tx_incoming_request,
}
}
}
impl Future for ForwardingReceiver {
type Item = ();
type Error = ();
/// Tries to forward any ready requests to the request handler.
///
/// If `Ok(Async::Ready(()))` is returned, then all requests that could have been forwarded have
/// been forwarded.
///
/// If `Ok(Async::NotReady)` is returned, then channel between the forwarding receiver and the
/// request handler is full, and forwarding will have to be tried again later.
///
/// If `Err(())` is returned, then the request handler's receiver has been dropped meaning the
/// forwarding receiver can be shutdown.
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
if let Some(mut incoming_sequence_number) = self.incoming_sequence_number {
while let Some(request) = self.buffered_requests.remove(&incoming_sequence_number) {
match self
.tx_incoming_request
.start_send((incoming_sequence_number, request))
.map_err(|_| ())?
{
AsyncSink::Ready => {
incoming_sequence_number = incoming_sequence_number.wrapping_increment()
}
AsyncSink::NotReady((_, request)) => {
self.buffered_requests
.insert(incoming_sequence_number, request);
self.incoming_sequence_number = Some(incoming_sequence_number);
return Ok(Async::NotReady);
}
}
}
self.incoming_sequence_number = Some(incoming_sequence_number);
}
Ok(Async::Ready(()))
}
}
/// The type responsible for keeping track of pending requests, matching incoming responses to those
/// requests, and notifying request owners of the match.
#[must_use = "futures do nothing unless polled"]
struct ResponseReceiver {
/// A map of pending requests sent by this agent that are awaiting responses from the end-agent.
pending_requests: HashMap<CSeq, oneshot::Sender<PendingRequestResponse>, FnvBuildHasher>,
/// The stream of pending request updates that is used to add or remove pending requests.
rx_pending_request: Fuse<UnboundedReceiver<PendingRequestUpdate>>,
}
impl ResponseReceiver {
/// Handles a pending request updates.
///
/// If the update is the addition of a pending request, the request, along with its `"CSeq"`
/// value, will be stored awaiting the corresponding response.
///
/// If the update is the removal of a pending request, the request with the given `"CSeq"` is
/// removed and no response will be matched even if it does come at a later time.
fn handle_pending_request_update(&mut self, update: PendingRequestUpdate) {
use self::PendingRequestUpdate::*;
match update {
AddPendingRequest((cseq, tx_pending_request)) => {
debug_assert!(!self.pending_requests.contains_key(&cseq));
self.pending_requests.insert(cseq, tx_pending_request);
}
RemovePendingRequest(cseq) => {
debug_assert!(self.pending_requests.contains_key(&cseq));
self.pending_requests.remove(&cseq);
}
}
}
/// Handles the given response by finding the matching pending request and sending it back to
/// the future owning the pending request.
///
/// Continue (100) responses are treated differently in that the pending request is not removed,
/// but instead updated to let the owner know that the request can still be considered alive.
pub fn handle_response(&mut self, response: Response<BytesMut>) {
debug_assert!(!self.should_shutdown());
// Ignore any responses without a `"CSeq"` header or without a corresponding pending
// request.
//
// It is possible the oneshot receiver has been dropped before we can send a
// response. If the pending request future was being polled on a separate thread
// then the response receiver, the pending request future may have closed the
// receiver and sent a new pending request update to cancel the request.
if let Some(cseq) = response.headers().typed_get::<CSeq>() {
if response.status_code() == StatusCode::Continue {
if let Some(pending_request) = self.pending_requests.get_mut(&cseq) {
let (tx_pending_request, rx_pending_request) = oneshot::channel();
if mem::replace(pending_request, tx_pending_request)
.send(PendingRequestResponse::Continue(rx_pending_request))
.is_err()
{
self.pending_requests.remove(&cseq);
}
}
} else if let Some(pending_request) = self.pending_requests.remove(&cseq) {
let _ = pending_request.send(PendingRequestResponse::Response(response));
}
}
}
/// Constructs a new response receiver using the given pending request update stream.
pub fn new(rx_pending_request: UnboundedReceiver<PendingRequestUpdate>) -> Self {
ResponseReceiver {
pending_requests: HashMap::with_hasher(FnvBuildHasher::default()),
rx_pending_request: rx_pending_request.fuse(),
}
}
/// Removes all pending requests from the response receiver.
///
/// All pending request receivers will receive a message indicating that no request will be
/// matched, effectively resulting in a cancelled request.
fn remove_pending_requests(&mut self) {
self.pending_requests
.drain()
.for_each(|(_, tx_pending_request)| {
let _ = tx_pending_request.send(PendingRequestResponse::None);
});
}
/// Whether whether the response receiver is capable of receiving anymore responses.
pub fn should_shutdown(&self) -> bool {
self.rx_pending_request.is_done()
}
}
impl Drop for ResponseReceiver {
fn drop(&mut self) {
// Try to handle any remaining pending request updates before cancelling all pending
// requests.
self.poll().unwrap();
self.remove_pending_requests();
}
}
impl Future for ResponseReceiver {
type Item = ();
type Error = ();
/// Handles incoming pending request updates.
///
/// A pending request update is either the addition of a pending request or the removal of a
/// pending request (probably due a timeout).
///
/// If `Ok(Async::Ready(()))` is returned, then the pending request update stream has ended and
/// the response receiver is shutdown.
///
/// If `Ok(Async::NotReady)` is returned, then there are no pending request updates to be
/// processed currently.
///
/// The error `Err(())` will never be returned.
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
loop {
match self
.rx_pending_request
.poll()
.expect("`ResponseReceiver.rx_pending_request` should not error")
{
Async::Ready(Some(update)) => self.handle_pending_request_update(update),
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => {
// If the pending request stream has ended, this means there should be no
// pending requests. If there were pending requests, they could never expire
// because the stream used to remove them has ended. So, we assume that it
// cannot happen.
debug_assert!(self.pending_requests.is_empty());
return Ok(Async::Ready(()));
}
}
}
}
}
/// Error that may be returned when processing incoming requests.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
#[non_exhaustive]
enum RequestReceiverError {
/// Some part of the request is invalid and cannot be processed.
BadRequest,
/// The difference in the next expected `"CSeq"` and the request's `"CSeq"` was too large to
/// internally buffer.
CSeqDifferenceTooLarge,
}
impl Display for RequestReceiverError {
fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {
use self::RequestReceiverError::*;
match self {
BadRequest => write!(formatter, "bad request"),
CSeqDifferenceTooLarge => write!(formatter, "CSeq difference too large"),
}
}
}
impl Error for RequestReceiverError {}
/// Sends the given message through the given sender handler.
///
/// It is assumed the sender handle is always alive, sending a message must not fail.
fn send_message(message: Message, sender_handle: &SenderHandle) {
sender_handle
.try_send_message(message)
.expect("`Receiver.sender_handle` should not have been dropped");
}
| 40.484277 | 100 | 0.605997 |
8f4fe0b11af6b83da103e61b9b5685eacc4940a4
| 7,953 |
//! Spawning shell in PTY via `portable-pty` crate.
// FIXME: Prompt incorrectly read from PTY in some cases (#24)
use portable_pty::{native_pty_system, Child, CommandBuilder, MasterPty, PtyPair, PtySize};
use std::{
collections::HashMap,
error::Error as StdError,
ffi::{OsStr, OsString},
io,
path::{Path, PathBuf},
};
use crate::{
traits::{ConfigureCommand, ShellProcess, SpawnShell, SpawnedShell},
utils::is_recoverable_kill_error,
};
fn into_io_error(err: Box<dyn StdError + Send + Sync>) -> io::Error {
err.downcast::<io::Error>()
.map_or_else(|err| io::Error::new(io::ErrorKind::Other, err), |err| *err)
}
/// Command to spawn in a pseudo-terminal (PTY).
///
/// # Examples
///
/// Since shell spawning is performed [in a generic way](crate::traits::SpawnShell),
/// [`PtyCommand`] can be used as a drop-in replacement for [`Command`](std::process::Command):
///
/// ```
/// # use term_transcript::{PtyCommand, ShellOptions, UserInput, Transcript};
/// # fn main() -> anyhow::Result<()> {
/// let transcript = Transcript::from_inputs(
/// &mut ShellOptions::new(PtyCommand::default()),
/// vec![UserInput::command(r#"echo "Hello world!""#)],
/// )?;
/// // do something with `transcript`...
/// # Ok(())
/// # }
/// ```
// Unfortunately, the `portable-pty` is structured in a way that makes reusing `Command`
// from the standard library impossible.
#[cfg_attr(docsrs, doc(cfg(feature = "portable-pty")))]
#[derive(Debug, Clone)]
pub struct PtyCommand {
args: Vec<OsString>,
env: HashMap<OsString, OsString>,
current_dir: Option<PathBuf>,
pty_size: PtySize,
}
#[cfg(unix)]
impl Default for PtyCommand {
fn default() -> Self {
Self::new("sh")
}
}
#[cfg(windows)]
impl Default for PtyCommand {
fn default() -> Self {
let mut cmd = Self::new("cmd");
cmd.arg("/Q").arg("/K").arg("echo off && chcp 65001");
cmd
}
}
impl PtyCommand {
/// Creates a new command based on the executable.
///
/// This uses a reasonable default for the PTY size (19 character rows, 80 columns).
pub fn new(command: impl Into<OsString>) -> Self {
Self {
args: vec![command.into()],
env: HashMap::new(),
current_dir: None,
pty_size: PtySize {
rows: 19,
cols: 80,
pixel_width: 0,
pixel_height: 0,
},
}
}
/// Sets the size of the PTY in characters.
pub fn with_size(&mut self, rows: u16, cols: u16) -> &mut Self {
self.pty_size.rows = rows;
self.pty_size.cols = cols;
self
}
/// Adds a command argument.
pub fn arg(&mut self, arg: impl Into<OsString>) -> &mut Self {
self.args.push(arg.into());
self
}
fn to_command_builder(&self) -> CommandBuilder {
let mut builder = CommandBuilder::from_argv(self.args.clone());
for (name, value) in &self.env {
builder.env(name, value);
}
if let Some(current_dir) = &self.current_dir {
builder.cwd(current_dir);
}
builder
}
}
impl ConfigureCommand for PtyCommand {
fn current_dir(&mut self, dir: &Path) {
self.current_dir = Some(dir.to_owned());
}
fn env(&mut self, name: &str, value: &OsStr) {
self.env
.insert(OsStr::new(name).to_owned(), value.to_owned());
}
}
impl SpawnShell for PtyCommand {
type ShellProcess = PtyShell;
type Reader = Box<dyn io::Read + Send>;
type Writer = Box<dyn MasterPty + Send>;
fn spawn_shell(&mut self) -> io::Result<SpawnedShell<Self>> {
let pty_system = native_pty_system();
let PtyPair { master, slave } = pty_system
.openpty(self.pty_size)
.map_err(|err| into_io_error(err.into()))?;
let child = slave
.spawn_command(self.to_command_builder())
.map_err(|err| into_io_error(err.into()))?;
let reader = master
.try_clone_reader()
.map_err(|err| into_io_error(err.into()))?;
Ok(SpawnedShell {
shell: PtyShell { child },
reader,
writer: master,
})
}
}
/// Spawned shell process connected to pseudo-terminal (PTY).
#[cfg_attr(docsrs, doc(cfg(feature = "portable-pty")))]
#[derive(Debug)]
pub struct PtyShell {
child: Box<dyn Child + Send + Sync>,
}
impl ShellProcess for PtyShell {
fn is_echoing(&self) -> bool {
true
}
fn check_is_alive(&mut self) -> io::Result<()> {
if let Some(exit_status) = self.child.try_wait()? {
let message = format!(
"Shell process has prematurely exited with {} exit status",
if exit_status.success() {
"zero"
} else {
"non-zero"
}
);
Err(io::Error::new(io::ErrorKind::BrokenPipe, message))
} else {
Ok(())
}
}
fn terminate(mut self) -> io::Result<()> {
if self.child.try_wait()?.is_none() {
self.child.kill().or_else(|err| {
if is_recoverable_kill_error(&err) {
// The shell has already exited. We don't consider this an error.
Ok(())
} else {
Err(err)
}
})?;
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{ShellOptions, Transcript, UserInput};
use std::{
io::{Read, Write},
thread,
time::Duration,
};
#[test]
fn pty_trait_implementation() -> anyhow::Result<()> {
let mut pty_command = PtyCommand::default();
let mut spawned = pty_command.spawn_shell()?;
thread::sleep(Duration::from_millis(100));
spawned.shell.check_is_alive()?;
writeln!(spawned.writer, "echo Hello")?;
thread::sleep(Duration::from_millis(100));
spawned.shell.check_is_alive()?;
drop(spawned.writer); // should be enough to terminate the shell
thread::sleep(Duration::from_millis(100));
spawned.shell.terminate()?;
let mut buffer = String::new();
spawned.reader.read_to_string(&mut buffer)?;
assert!(buffer.contains("Hello"), "Unexpected buffer: {:?}", buffer);
Ok(())
}
#[test]
fn creating_transcript_with_pty() -> anyhow::Result<()> {
let mut options = ShellOptions::new(PtyCommand::default());
let inputs = vec![
UserInput::command("echo hello"),
UserInput::command("echo foo && echo bar >&2"),
];
let transcript = Transcript::from_inputs(&mut options, inputs)?;
assert_eq!(transcript.interactions().len(), 2);
{
let interaction = &transcript.interactions()[0];
assert_eq!(interaction.input().text, "echo hello");
let output = interaction.output().as_ref();
assert_eq!(output.trim(), "hello");
}
let interaction = &transcript.interactions()[1];
assert_eq!(interaction.input().text, "echo foo && echo bar >&2");
let output = interaction.output().as_ref();
assert_eq!(
output.split_whitespace().collect::<Vec<_>>(),
["foo", "bar"]
);
Ok(())
}
#[cfg(unix)]
#[test]
fn pty_transcript_with_multiline_input() -> anyhow::Result<()> {
let mut options = ShellOptions::new(PtyCommand::default());
let inputs = vec![UserInput::command("echo \\\nhello")];
let transcript = Transcript::from_inputs(&mut options, inputs)?;
assert_eq!(transcript.interactions().len(), 1);
let interaction = &transcript.interactions()[0];
let output = interaction.output().as_ref();
assert_eq!(output.trim(), "hello");
Ok(())
}
}
| 29.455556 | 95 | 0.562806 |
69c496038feac585cc1fd06cf92ea206ee60cd8a
| 836 |
use crate::sockets;
use std::{ffi::CStr, io};
use tokio::io::{AsyncReadExt, AsyncWriteExt, PollEvented};
pub struct AsyncBoundSocket {
sock: PollEvented<sockets::BoundSocket>,
}
impl AsyncBoundSocket {
pub fn from_interface(iface: impl AsRef<CStr>) -> io::Result<Self> {
let mut sock = sockets::Socket::new()?;
sock.set_nonblocking(true)?;
let sock = sock.bind(iface)?;
Ok(Self {
sock: PollEvented::new(sock)?,
})
}
pub fn set_promiscuous(&mut self, p: bool) -> io::Result<()> {
self.sock.get_mut().set_promiscuous(p)
}
pub async fn send(&mut self, frame: &[u8]) -> io::Result<usize> {
self.sock.write(frame).await
}
pub async fn recv(&mut self, frame: &mut [u8]) -> io::Result<usize> {
self.sock.read(frame).await
}
}
| 26.967742 | 73 | 0.599282 |
214bdb729f9818187849102a55240a02fe1086eb
| 22,671 |
// Import from `core` instead of from `std` since we are in no-std mode
use core::result::Result;
// Import heap related library from `alloc`
// https://doc.rust-lang.org/alloc/index.html
use alloc::{collections::BTreeMap, vec, vec::Vec};
// Import CKB syscalls and structures
// https://nervosnetwork.github.io/ckb-std/riscv64imac-unknown-none-elf/doc/ckb_std/index.html
use crate::{
cells::{
build_l2_sudt_script, collect_custodian_locks, collect_deposition_locks,
collect_withdrawal_locks, find_challenge_cell, find_one_stake_cell,
},
ckb_std::{ckb_constants::Source, debug},
types::{CellValue, DepositionRequestCell, WithdrawalCell},
};
use validator_utils::gw_common;
use validator_utils::gw_types;
use super::check_status;
use crate::types::BlockContext;
use validator_utils::error::Error;
use gw_common::{
builtins::CKB_SUDT_ACCOUNT_ID,
error::Error as StateError,
h256_ext::H256Ext,
merkle_utils::{calculate_compacted_account_root, calculate_merkle_root},
smt::{Blake2bHasher, CompiledMerkleProof},
state::State,
CKB_SUDT_SCRIPT_ARGS, H256,
};
use gw_types::{
bytes::Bytes,
core::{ScriptHashType, Status},
packed::{
AccountMerkleState, Byte32, GlobalState, L2Block, RawL2Block, RollupConfig,
WithdrawalRequest,
},
prelude::*,
};
fn build_assets_map_from_cells<'a, I: Iterator<Item = &'a CellValue>>(
cells: I,
) -> Result<BTreeMap<H256, u128>, Error> {
let mut assets = BTreeMap::new();
for cell in cells {
let sudt_balance = assets.entry(cell.sudt_script_hash).or_insert(0u128);
*sudt_balance = sudt_balance
.checked_add(cell.amount)
.ok_or(Error::AmountOverflow)?;
let ckb_balance = assets.entry(CKB_SUDT_SCRIPT_ARGS.into()).or_insert(0u128);
*ckb_balance = ckb_balance
.checked_add(cell.capacity.into())
.ok_or(Error::AmountOverflow)?;
}
Ok(assets)
}
fn check_withdrawal_cells(
context: &BlockContext,
mut withdrawal_requests: Vec<WithdrawalRequest>,
withdrawal_cells: &[WithdrawalCell],
) -> Result<(), Error> {
// iter outputs withdrawal cells, check each cell has a corresponded withdrawal request
for cell in withdrawal_cells {
// check withdrawal cell block info
let withdrawal_block_hash: H256 = cell.args.withdrawal_block_hash().unpack();
if withdrawal_block_hash != context.block_hash
|| cell.args.withdrawal_block_number().unpack() != context.number
{
return Err(Error::InvalidWithdrawalCell);
}
let cell_account_script_hash: H256 = cell.args.account_script_hash().unpack();
// check that there is a corresponded withdrawal request
match withdrawal_requests.iter().position(|request| {
let raw = request.raw();
let account_script_hash: H256 = raw.account_script_hash().unpack();
let sudt_script_hash: H256 = raw.sudt_script_hash().unpack();
let amount: u128 = raw.amount().unpack();
let capacity: u64 = raw.capacity().unpack();
account_script_hash == cell_account_script_hash
&& sudt_script_hash == cell.value.sudt_script_hash
&& amount == cell.value.amount
&& capacity == cell.value.capacity
}) {
Some(index) => {
withdrawal_requests.remove(index);
}
None => {
return Err(Error::InvalidWithdrawalCell);
}
}
}
// Some withdrawal requests hasn't has a corresponded withdrawal cell
if !withdrawal_requests.is_empty() {
return Err(Error::InvalidWithdrawalCell);
}
Ok(())
}
fn check_input_custodian_cells(
config: &RollupConfig,
context: &BlockContext,
output_withdrawal_cells: Vec<WithdrawalCell>,
) -> Result<BTreeMap<H256, u128>, Error> {
// collect input custodian cells
let (finalized_custodian_cells, unfinalized_custodian_cells): (Vec<_>, Vec<_>) =
collect_custodian_locks(&context.rollup_type_hash, config, Source::Input)?
.into_iter()
.partition(|cell| {
let number: u64 = cell.args.deposition_block_number().unpack();
number <= context.finalized_number
});
// check unfinalized custodian cells == reverted deposition requests
let mut reverted_deposit_cells =
collect_deposition_locks(&context.rollup_type_hash, config, Source::Output)?;
for custodian_cell in unfinalized_custodian_cells {
let index = reverted_deposit_cells
.iter()
.position(|cell| {
custodian_cell.args.deposition_lock_args() == cell.args
&& custodian_cell.value == cell.value
})
.ok_or(Error::InvalidWithdrawalCell)?;
reverted_deposit_cells.remove(index);
}
if !reverted_deposit_cells.is_empty() {
return Err(Error::InvalidWithdrawalCell);
}
// check input finalized custodian cells >= withdrawal cells
let withdrawal_assets =
build_assets_map_from_cells(output_withdrawal_cells.iter().map(|c| &c.value))?;
let mut input_finalized_assets =
build_assets_map_from_cells(finalized_custodian_cells.iter().map(|c| &c.value))?;
// calculate input finalized custodian assets - withdrawal assets
for (k, v) in withdrawal_assets {
let balance = input_finalized_assets.entry(k).or_insert(0);
*balance = balance
.checked_sub(v)
.ok_or(Error::InsufficientInputFinalizedAssets)?;
}
Ok(input_finalized_assets)
}
fn check_output_custodian_cells(
config: &RollupConfig,
context: &BlockContext,
mut deposit_cells: Vec<DepositionRequestCell>,
input_finalized_assets: BTreeMap<H256, u128>,
) -> Result<(), Error> {
// collect output custodian cells
let (finalized_custodian_cells, unfinalized_custodian_cells): (Vec<_>, Vec<_>) =
collect_custodian_locks(&context.rollup_type_hash, config, Source::Output)?
.into_iter()
.partition(|cell| {
let number: u64 = cell.args.deposition_block_number().unpack();
number <= context.finalized_number
});
// check depositions request cells == unfinalized custodian cells
for custodian_cell in unfinalized_custodian_cells {
let index = deposit_cells
.iter()
.position(|cell| {
custodian_cell.args.deposition_lock_args() == cell.args
&& custodian_cell.value == cell.value
})
.ok_or(Error::InvalidCustodianCell)?;
deposit_cells.remove(index);
}
if !deposit_cells.is_empty() {
return Err(Error::InvalidDepositCell);
}
// check reverted withdrawals <= finalized custodian cells
{
let reverted_withdrawals =
collect_withdrawal_locks(&context.rollup_type_hash, config, Source::Input)?;
let reverted_withdrawal_assets =
build_assets_map_from_cells(reverted_withdrawals.iter().map(|c| &c.value))?;
let mut output_finalized_assets =
build_assets_map_from_cells(finalized_custodian_cells.iter().map(|c| &c.value))?;
// calculate output finalized assets - reverted withdrawal assets
for (k, v) in reverted_withdrawal_assets {
let balance = output_finalized_assets.entry(k).or_insert(0);
*balance = balance
.checked_sub(v)
.ok_or(Error::InsufficientOutputFinalizedAssets)?;
}
// check the remain inputs finalized assets == outputs finalized assets
// 1. output finalized assets - input finalized assets
for (k, v) in input_finalized_assets {
let balance = output_finalized_assets.entry(k).or_insert(0);
*balance = balance
.checked_sub(v)
.ok_or(Error::InsufficientOutputFinalizedAssets)?;
}
// 2. check output finalized assets is empty
let output_assets_is_empty = output_finalized_assets.iter().all(|(_k, v)| v == &0);
if !output_assets_is_empty {
return Err(Error::InsufficientInputFinalizedAssets);
}
}
Ok(())
}
fn mint_layer2_sudt(
rollup_type_hash: &H256,
config: &RollupConfig,
context: &mut BlockContext,
deposit_cells: &[DepositionRequestCell],
) -> Result<(), Error> {
for request in deposit_cells {
// check that account's script is a valid EOA script
if request.account_script.hash_type() != ScriptHashType::Type.into() {
return Err(Error::UnknownEOAScript);
}
if config
.allowed_eoa_type_hashes()
.into_iter()
.find(|code_hash| code_hash == &request.account_script.code_hash())
.is_none()
{
return Err(Error::UnknownEOAScript);
}
// find or create EOA
let id = match context.get_account_id_by_script_hash(&request.account_script_hash)? {
Some(id) => id,
None => context.create_account(request.account_script_hash)?,
};
// mint CKB
context.mint_sudt(CKB_SUDT_ACCOUNT_ID, id, request.value.capacity.into())?;
if request.value.sudt_script_hash.as_slice() == CKB_SUDT_SCRIPT_ARGS {
if request.value.amount != 0 {
// SUDT amount must equals to zero if sudt script hash is equals to CKB_SUDT_SCRIPT_ARGS
return Err(Error::InvalidDepositCell);
}
continue;
}
// find or create Simple UDT account
let l2_sudt_script =
build_l2_sudt_script(rollup_type_hash, config, &request.value.sudt_script_hash);
let l2_sudt_script_hash: [u8; 32] = l2_sudt_script.hash();
let sudt_id = match context.get_account_id_by_script_hash(&l2_sudt_script_hash.into())? {
Some(id) => id,
None => context.create_account(l2_sudt_script_hash.into())?,
};
// prevent fake CKB SUDT, the caller should filter these invalid depositions
if sudt_id == CKB_SUDT_ACCOUNT_ID {
return Err(Error::InvalidDepositCell);
}
// mint SUDT
context.mint_sudt(sudt_id, id, request.value.amount)?;
}
Ok(())
}
fn burn_layer2_sudt(
rollup_type_hash: &H256,
config: &RollupConfig,
context: &mut BlockContext,
block: &L2Block,
) -> Result<(), Error> {
for request in block.withdrawals() {
let raw = request.raw();
let l2_sudt_script_hash: [u8; 32] =
build_l2_sudt_script(rollup_type_hash, config, &raw.sudt_script_hash().unpack()).hash();
// find EOA
let id = context
.get_account_id_by_script_hash(&raw.account_script_hash().unpack())?
.ok_or(StateError::MissingKey)?;
// burn CKB
context.burn_sudt(CKB_SUDT_ACCOUNT_ID, id, raw.capacity().unpack() as u128)?;
// find Simple UDT account
let sudt_id = context
.get_account_id_by_script_hash(&l2_sudt_script_hash.into())?
.ok_or(StateError::MissingKey)?;
// burn sudt
context.burn_sudt(sudt_id, id, raw.amount().unpack())?;
// update nonce
let nonce = context.get_nonce(id)?;
let withdrawal_nonce: u32 = raw.nonce().unpack();
if nonce != withdrawal_nonce {
return Err(Error::InvalidWithdrawalRequest);
}
context.set_nonce(id, nonce.saturating_add(1))?;
}
Ok(())
}
fn load_l2block_context(
rollup_type_hash: H256,
config: &RollupConfig,
l2block: &L2Block,
prev_global_state: &GlobalState,
post_global_state: &GlobalState,
) -> Result<BlockContext, Error> {
let raw_block = l2block.raw();
// Check pre block merkle proof
let number: u64 = raw_block.number().unpack();
if number != prev_global_state.block().count().unpack() {
return Err(Error::InvalidBlock);
}
// verify parent block hash
if raw_block.parent_block_hash() != prev_global_state.tip_block_hash() {
return Err(Error::InvalidBlock);
}
let block_smt_key = RawL2Block::compute_smt_key(number);
let block_proof: Bytes = l2block.block_proof().unpack();
let block_merkle_proof = CompiledMerkleProof(block_proof.to_vec());
let prev_block_root: [u8; 32] = prev_global_state.block().merkle_root().unpack();
if !block_merkle_proof
.verify::<Blake2bHasher>(
&prev_block_root.into(),
vec![(block_smt_key.into(), H256::zero())],
)
.map_err(|_| Error::MerkleProof)?
{
return Err(Error::MerkleProof);
}
// Check post block merkle proof
if number + 1 != post_global_state.block().count().unpack() {
return Err(Error::InvalidBlock);
}
let post_block_root: [u8; 32] = post_global_state.block().merkle_root().unpack();
let block_hash: H256 = raw_block.hash().into();
if !block_merkle_proof
.verify::<Blake2bHasher>(
&post_block_root.into(),
vec![(block_smt_key.into(), block_hash)],
)
.map_err(|_| Error::MerkleProof)?
{
return Err(Error::MerkleProof);
}
// Check pre account merkle proof
let kv_state_proof: Bytes = l2block.kv_state_proof().unpack();
let kv_merkle_proof = CompiledMerkleProof(kv_state_proof.to_vec());
let kv_pairs: BTreeMap<_, _> = l2block
.kv_state()
.into_iter()
.map(|kv| {
let k: [u8; 32] = kv.k().unpack();
let v: [u8; 32] = kv.v().unpack();
(k.into(), v.into())
})
.collect();
let prev_account_root: [u8; 32] = prev_global_state.account().merkle_root().unpack();
let is_blank_kv = kv_merkle_proof.0.is_empty() && kv_pairs.is_empty();
if !is_blank_kv
&& !kv_merkle_proof
.verify::<Blake2bHasher>(
&prev_account_root.into(),
kv_pairs.iter().map(|(k, v)| (*k, *v)).collect(),
)
.map_err(|_| Error::MerkleProof)?
{
return Err(Error::MerkleProof);
}
// Check prev account state
if raw_block.prev_account().as_slice() != prev_global_state.account().as_slice() {
return Err(Error::InvalidBlock);
}
// Check post account state
// Note: Because of the optimistic mechanism, we do not need to verify post account merkle root
if raw_block.post_account().as_slice() != post_global_state.account().as_slice() {
return Err(Error::InvalidPostGlobalState);
}
// Generate context
let account_count: u32 = prev_global_state.account().count().unpack();
let prev_account_root = prev_global_state.account().merkle_root().unpack();
let finalized_number = number.saturating_sub(config.finality_blocks().unpack());
let context = BlockContext {
number,
finalized_number,
kv_pairs,
kv_merkle_proof,
account_count,
rollup_type_hash,
block_hash,
prev_account_root,
};
Ok(context)
}
fn verify_block_producer(
config: &RollupConfig,
context: &BlockContext,
block: &L2Block,
) -> Result<(), Error> {
let raw_block = block.raw();
let owner_lock_hash = raw_block.stake_cell_owner_lock_hash();
let stake_cell = find_one_stake_cell(
&context.rollup_type_hash,
config,
Source::Input,
&owner_lock_hash,
)?;
// check stake cell capacity
if stake_cell.value.capacity < config.required_staking_capacity().unpack() {
debug!("stake cell's capacity is insufficient");
return Err(Error::InvalidStakeCell);
}
// expected output stake args
let expected_stake_lock_args = stake_cell
.args
.as_builder()
.stake_block_number(raw_block.number())
.build();
let output_stake_cell = find_one_stake_cell(
&context.rollup_type_hash,
config,
Source::Output,
&owner_lock_hash,
)?;
if expected_stake_lock_args != output_stake_cell.args
|| stake_cell.value != output_stake_cell.value
{
debug!("the output stake cell isn't corresponded to the input one");
return Err(Error::InvalidStakeCell);
}
Ok(())
}
fn check_block_transactions(context: &BlockContext, block: &L2Block) -> Result<(), Error> {
// check tx_witness_root
let raw_block = block.raw();
let submit_transactions = raw_block.submit_transactions();
let tx_witness_root: [u8; 32] = submit_transactions.tx_witness_root().unpack();
let tx_count: u32 = submit_transactions.tx_count().unpack();
let compacted_post_root_list = submit_transactions.compacted_post_root_list();
if tx_count != compacted_post_root_list.item_count() as u32
|| tx_count != block.transactions().len() as u32
{
return Err(Error::InvalidTxsState);
}
let leaves = block
.transactions()
.into_iter()
.map(|tx| tx.witness_hash())
.collect();
let merkle_root: [u8; 32] = calculate_merkle_root(leaves)?;
if tx_witness_root != merkle_root {
return Err(Error::MerkleProof);
}
// check current account tree state
let compacted_prev_root_hash: H256 = submit_transactions.compacted_prev_root_hash().unpack();
if context.calculate_compacted_account_root()? != compacted_prev_root_hash {
return Err(Error::InvalidTxsState);
}
// check post account tree state
let post_compacted_account_root = submit_transactions
.compacted_post_root_list()
.into_iter()
.last()
.unwrap_or_else(|| submit_transactions.compacted_prev_root_hash());
let block_post_compacted_account_root: Byte32 = {
let account = raw_block.post_account();
calculate_compacted_account_root(&account.merkle_root().unpack(), account.count().unpack())
.pack()
};
if post_compacted_account_root != block_post_compacted_account_root {
return Err(Error::InvalidTxsState);
}
Ok(())
}
fn check_block_withdrawals_root(block: &L2Block) -> Result<(), Error> {
// check withdrawal_witness_root
let submit_withdrawals = block.raw().submit_withdrawals();
let withdrawal_witness_root: [u8; 32] = submit_withdrawals.withdrawal_witness_root().unpack();
let withdrawal_count: u32 = submit_withdrawals.withdrawal_count().unpack();
if withdrawal_count != block.withdrawals().len() as u32 {
return Err(Error::InvalidBlock);
}
let leaves = block
.withdrawals()
.into_iter()
.map(|withdrawal| withdrawal.witness_hash())
.collect();
let merkle_root: [u8; 32] = calculate_merkle_root(leaves)?;
if withdrawal_witness_root != merkle_root {
return Err(Error::MerkleProof);
}
Ok(())
}
/// Verify Deposition & Withdrawal
pub fn verify(
rollup_type_hash: H256,
config: &RollupConfig,
block: &L2Block,
prev_global_state: &GlobalState,
post_global_state: &GlobalState,
) -> Result<(), Error> {
check_status(&prev_global_state, Status::Running)?;
// Check withdrawals root
check_block_withdrawals_root(block)?;
let mut context = load_l2block_context(
rollup_type_hash,
config,
block,
prev_global_state,
post_global_state,
)?;
// Verify block producer
verify_block_producer(config, &context, block)?;
// collect withdrawal cells
let withdrawal_cells: Vec<_> =
collect_withdrawal_locks(&context.rollup_type_hash, config, Source::Output)?;
// collect deposit cells
let deposit_cells = collect_deposition_locks(&context.rollup_type_hash, config, Source::Input)?;
// Check new cells and reverted cells: deposition / withdrawal / custodian
let withdrawal_requests = block.withdrawals().into_iter().collect();
check_withdrawal_cells(&context, withdrawal_requests, &withdrawal_cells)?;
let input_finalized_assets = check_input_custodian_cells(config, &context, withdrawal_cells)?;
check_output_custodian_cells(
config,
&context,
deposit_cells.clone(),
input_finalized_assets,
)?;
// Ensure no challenge cells in submitting block transaction
if find_challenge_cell(&rollup_type_hash, config, Source::Input)?.is_some()
|| find_challenge_cell(&rollup_type_hash, config, Source::Output)?.is_some()
{
return Err(Error::InvalidChallengeCell);
}
// Withdrawal token: Layer2 SUDT -> withdrawals
burn_layer2_sudt(&rollup_type_hash, config, &mut context, block)?;
// Mint token: deposition requests -> layer2 SUDT
mint_layer2_sudt(&rollup_type_hash, config, &mut context, &deposit_cells)?;
// Check transactions
check_block_transactions(&context, block)?;
// Verify Post state
let actual_post_global_state = {
let root = context.calculate_root()?;
let count = context.get_account_count()?;
// calculate new account merkle state from block_context
let account_merkle_state = AccountMerkleState::new_builder()
.merkle_root(root.pack())
.count(count.pack())
.build();
// we have verified the post block merkle state
let block_merkle_state = post_global_state.block();
// last finalized block number
let last_finalized_block_number = context.finalized_number;
prev_global_state
.clone()
.as_builder()
.account(account_merkle_state)
.block(block_merkle_state)
.tip_block_hash(context.block_hash.pack())
.last_finalized_block_number(last_finalized_block_number.pack())
.build()
};
if &actual_post_global_state != post_global_state {
return Err(Error::InvalidPostGlobalState);
}
Ok(())
}
// Verify reverted_block_root
pub fn verify_reverted_block_hashes(
reverted_block_hashes: Vec<H256>,
reverted_block_proof: Bytes,
prev_global_state: &GlobalState,
) -> Result<(), Error> {
let reverted_block_root = prev_global_state.reverted_block_root().unpack();
let merkle_proof = CompiledMerkleProof(reverted_block_proof.into());
let leaves: Vec<_> = reverted_block_hashes
.into_iter()
.map(|k| (k, H256::one()))
.collect();
if leaves.is_empty() && merkle_proof.0.is_empty() {
return Ok(());
}
let valid = merkle_proof.verify::<Blake2bHasher>(&reverted_block_root, leaves)?;
if !valid {
return Err(Error::MerkleProof);
}
Ok(())
}
| 37.044118 | 104 | 0.646818 |
4ba16239fdfd4718276e2d1e4fa3507345bffbc2
| 25,685 |
use crate::err::{self, Result};
use crate::evtx_parser::ReadSeek;
use encoding::EncodingRef;
use snafu::{OptionExt, ResultExt};
pub use byteorder::{LittleEndian, ReadBytesExt};
use crate::binxml::deserializer::BinXmlDeserializer;
use winstructs::guid::Guid;
use crate::model::deserialized::BinXMLDeserializedTokens;
use crate::utils::{
read_ansi_encoded_string, read_len_prefixed_utf16_string, read_null_terminated_utf16_string,
read_systemtime, read_utf16_by_size,
};
use chrono::{DateTime, Utc};
use log::trace;
use serde_json::{json, Value};
use std::borrow::Cow;
use std::io::{Cursor, Read, Seek, SeekFrom};
use std::string::ToString;
use winstructs::security::Sid;
use crate::evtx_chunk::EvtxChunk;
use std::fmt::Write;
#[derive(Debug, PartialOrd, PartialEq, Clone)]
pub enum BinXmlValue<'a> {
NullType,
// String may originate in substitution.
StringType(Cow<'a, str>),
AnsiStringType(Cow<'a, str>),
Int8Type(i8),
UInt8Type(u8),
Int16Type(i16),
UInt16Type(u16),
Int32Type(i32),
UInt32Type(u32),
Int64Type(i64),
UInt64Type(u64),
Real32Type(f32),
Real64Type(f64),
BoolType(bool),
BinaryType(&'a [u8]),
GuidType(Guid),
SizeTType(usize),
FileTimeType(DateTime<Utc>),
SysTimeType(DateTime<Utc>),
SidType(Sid),
HexInt32Type(Cow<'a, str>),
HexInt64Type(Cow<'a, str>),
EvtHandle,
// Because of the recursive type, we instantiate this enum via a method of the Deserializer
BinXmlType(Vec<BinXMLDeserializedTokens<'a>>),
EvtXml,
StringArrayType(Vec<Cow<'a, str>>),
AnsiStringArrayType,
Int8ArrayType(Vec<i8>),
UInt8ArrayType(Vec<u8>),
Int16ArrayType(Vec<i16>),
UInt16ArrayType(Vec<u16>),
Int32ArrayType(Vec<i32>),
UInt32ArrayType(Vec<u32>),
Int64ArrayType(Vec<i64>),
UInt64ArrayType(Vec<u64>),
Real32ArrayType(Vec<f32>),
Real64ArrayType(Vec<f64>),
BoolArrayType(Vec<bool>),
BinaryArrayType,
GuidArrayType(Vec<Guid>),
SizeTArrayType,
FileTimeArrayType(Vec<DateTime<Utc>>),
SysTimeArrayType(Vec<DateTime<Utc>>),
SidArrayType(Vec<Sid>),
HexInt32ArrayType(Vec<Cow<'a, str>>),
HexInt64ArrayType(Vec<Cow<'a, str>>),
EvtArrayHandle,
BinXmlArrayType,
EvtXmlArrayType,
}
#[derive(Debug, PartialOrd, PartialEq, Clone)]
pub enum BinXmlValueType {
NullType,
StringType,
AnsiStringType,
Int8Type,
UInt8Type,
Int16Type,
UInt16Type,
Int32Type,
UInt32Type,
Int64Type,
UInt64Type,
Real32Type,
Real64Type,
BoolType,
BinaryType,
GuidType,
SizeTType,
FileTimeType,
SysTimeType,
SidType,
HexInt32Type,
HexInt64Type,
EvtHandle,
BinXmlType,
EvtXmlType,
StringArrayType,
AnsiStringArrayType,
Int8ArrayType,
UInt8ArrayType,
Int16ArrayType,
UInt16ArrayType,
Int32ArrayType,
UInt32ArrayType,
Int64ArrayType,
UInt64ArrayType,
Real32ArrayType,
Real64ArrayType,
BoolArrayType,
BinaryArrayType,
GuidArrayType,
SizeTArrayType,
FileTimeArrayType,
SysTimeArrayType,
SidArrayType,
HexInt32ArrayType,
HexInt64ArrayType,
EvtHandleArray,
BinXmlArrayType,
EvtXmlArrayType,
}
impl BinXmlValueType {
pub fn from_u8(byte: u8) -> Option<BinXmlValueType> {
match byte {
0x00 => Some(BinXmlValueType::NullType),
0x01 => Some(BinXmlValueType::StringType),
0x02 => Some(BinXmlValueType::AnsiStringType),
0x03 => Some(BinXmlValueType::Int8Type),
0x04 => Some(BinXmlValueType::UInt8Type),
0x05 => Some(BinXmlValueType::Int16Type),
0x06 => Some(BinXmlValueType::UInt16Type),
0x07 => Some(BinXmlValueType::Int32Type),
0x08 => Some(BinXmlValueType::UInt32Type),
0x09 => Some(BinXmlValueType::Int64Type),
0x0a => Some(BinXmlValueType::UInt64Type),
0x0b => Some(BinXmlValueType::Real32Type),
0x0c => Some(BinXmlValueType::Real64Type),
0x0d => Some(BinXmlValueType::BoolType),
0x0e => Some(BinXmlValueType::BinaryType),
0x0f => Some(BinXmlValueType::GuidType),
0x10 => Some(BinXmlValueType::SizeTType),
0x11 => Some(BinXmlValueType::FileTimeType),
0x12 => Some(BinXmlValueType::SysTimeType),
0x13 => Some(BinXmlValueType::SidType),
0x14 => Some(BinXmlValueType::HexInt32Type),
0x15 => Some(BinXmlValueType::HexInt64Type),
0x20 => Some(BinXmlValueType::EvtHandle),
0x21 => Some(BinXmlValueType::BinXmlType),
0x23 => Some(BinXmlValueType::EvtXmlType),
0x81 => Some(BinXmlValueType::StringArrayType),
0x82 => Some(BinXmlValueType::AnsiStringArrayType),
0x83 => Some(BinXmlValueType::Int8ArrayType),
0x84 => Some(BinXmlValueType::UInt8ArrayType),
0x85 => Some(BinXmlValueType::Int16ArrayType),
0x86 => Some(BinXmlValueType::UInt16ArrayType),
0x87 => Some(BinXmlValueType::Int32ArrayType),
0x88 => Some(BinXmlValueType::UInt32ArrayType),
0x89 => Some(BinXmlValueType::Int64ArrayType),
0x8a => Some(BinXmlValueType::UInt64ArrayType),
0x8b => Some(BinXmlValueType::Real32ArrayType),
0x8c => Some(BinXmlValueType::Real64ArrayType),
0x8d => Some(BinXmlValueType::BoolArrayType),
0x8e => Some(BinXmlValueType::BinaryArrayType),
0x8f => Some(BinXmlValueType::GuidArrayType),
0x90 => Some(BinXmlValueType::SizeTArrayType),
0x91 => Some(BinXmlValueType::FileTimeArrayType),
0x92 => Some(BinXmlValueType::SysTimeArrayType),
0x93 => Some(BinXmlValueType::SidArrayType),
0x94 => Some(BinXmlValueType::HexInt32ArrayType),
0x95 => Some(BinXmlValueType::HexInt64ArrayType),
_ => None,
}
}
}
impl<'a> BinXmlValue<'a> {
pub fn from_binxml_stream(
cursor: &mut Cursor<&'a [u8]>,
chunk: Option<&'a EvtxChunk<'a>>,
size: Option<u16>,
ansi_codec: EncodingRef,
) -> Result<BinXmlValue<'a>> {
let value_type_token = try_read!(cursor, u8);
let value_type =
BinXmlValueType::from_u8(value_type_token).context(err::InvalidValueVariant {
value: value_type_token,
offset: cursor.position(),
})?;
let data = Self::deserialize_value_type(&value_type, cursor, chunk, size, ansi_codec)?;
Ok(data)
}
pub fn deserialize_value_type(
value_type: &BinXmlValueType,
cursor: &mut Cursor<&'a [u8]>,
chunk: Option<&'a EvtxChunk<'a>>,
size: Option<u16>,
ansi_codec: EncodingRef,
) -> Result<BinXmlValue<'a>> {
trace!("deserialize_value_type: {:?}, {:?}", value_type, size);
let value = match (value_type, size) {
(BinXmlValueType::NullType, _) => BinXmlValue::NullType,
(BinXmlValueType::StringType, Some(sz)) => BinXmlValue::StringType(Cow::Owned(
read_utf16_by_size(cursor, u64::from(sz))
.context(err::FailedToDecodeUTF16String {
offset: cursor.position(),
})?
.unwrap_or_else(|| "".to_owned()),
)),
(BinXmlValueType::StringType, None) => {
BinXmlValue::StringType(try_read!(cursor, utf_16_str))
}
(BinXmlValueType::AnsiStringType, Some(sz)) => BinXmlValue::AnsiStringType(Cow::Owned(
read_ansi_encoded_string(cursor, u64::from(sz), ansi_codec)?
.unwrap_or_else(|| "".to_owned()),
)),
// AnsiString are always sized according to docs
(BinXmlValueType::AnsiStringType, None) => err::UnimplementedValueVariant {
name: "AnsiString",
size: None,
offset: cursor.position(),
}
.fail()?,
(BinXmlValueType::Int8Type, _) => BinXmlValue::Int8Type(try_read!(cursor, i8)),
(BinXmlValueType::UInt8Type, _) => BinXmlValue::UInt8Type(try_read!(cursor, u8)),
(BinXmlValueType::Int16Type, _) => BinXmlValue::Int16Type(try_read!(cursor, i16)),
(BinXmlValueType::UInt16Type, _) => BinXmlValue::UInt16Type(try_read!(cursor, u16)),
(BinXmlValueType::Int32Type, _) => BinXmlValue::Int32Type(try_read!(cursor, i32)),
(BinXmlValueType::UInt32Type, _) => BinXmlValue::UInt32Type(try_read!(cursor, u32)),
(BinXmlValueType::Int64Type, _) => BinXmlValue::Int64Type(try_read!(cursor, i64)),
(BinXmlValueType::UInt64Type, _) => BinXmlValue::UInt64Type(try_read!(cursor, u64)),
(BinXmlValueType::Real32Type, _) => BinXmlValue::Real32Type(try_read!(cursor, f32)),
(BinXmlValueType::Real64Type, _) => BinXmlValue::Real64Type(try_read!(cursor, f64)),
(BinXmlValueType::BoolType, _) => BinXmlValue::BoolType(try_read!(cursor, bool)),
(BinXmlValueType::GuidType, _) => BinXmlValue::GuidType(try_read!(cursor, guid)),
// TODO: find a sample with this token.
(BinXmlValueType::SizeTType, _) => err::UnimplementedValueVariant {
name: "SizeT",
size,
offset: cursor.position(),
}
.fail()?,
(BinXmlValueType::FileTimeType, _) => {
BinXmlValue::FileTimeType(try_read!(cursor, filetime))
}
(BinXmlValueType::SysTimeType, _) => {
BinXmlValue::SysTimeType(try_read!(cursor, systime))
}
(BinXmlValueType::SidType, _) => BinXmlValue::SidType(try_read!(cursor, sid)),
(BinXmlValueType::HexInt32Type, _) => {
BinXmlValue::HexInt32Type(try_read!(cursor, hex32))
}
(BinXmlValueType::HexInt64Type, _) => {
BinXmlValue::HexInt64Type(try_read!(cursor, hex64))
}
(BinXmlValueType::BinXmlType, None) => {
let tokens = BinXmlDeserializer::read_binxml_fragment(
cursor, chunk, None, true, ansi_codec,
)?;
BinXmlValue::BinXmlType(tokens)
}
(BinXmlValueType::BinXmlType, Some(sz)) => {
let tokens = BinXmlDeserializer::read_binxml_fragment(
cursor,
chunk,
Some(u32::from(sz)),
true,
ansi_codec,
)?;
BinXmlValue::BinXmlType(tokens)
}
(BinXmlValueType::BinaryType, Some(sz)) => {
// Borrow the underlying data from the cursor, and return a ref to it.
let data = *cursor.get_ref();
let bytes =
&data[cursor.position() as usize..(cursor.position() + u64::from(sz)) as usize];
cursor.seek(SeekFrom::Current(i64::from(sz)))?;
BinXmlValue::BinaryType(bytes)
}
// The array types are always sized.
(BinXmlValueType::StringArrayType, Some(sz)) => BinXmlValue::StringArrayType(
try_read_sized_array!(cursor, null_terminated_utf_16_str, sz),
),
(BinXmlValueType::Int8ArrayType, Some(sz)) => {
BinXmlValue::Int8ArrayType(try_read_sized_array!(cursor, i8, sz))
}
(BinXmlValueType::UInt8ArrayType, Some(sz)) => {
let mut data = vec![0; sz as usize];
cursor.read_exact(&mut data)?;
BinXmlValue::UInt8ArrayType(data)
}
(BinXmlValueType::Int16ArrayType, Some(sz)) => {
BinXmlValue::Int16ArrayType(try_read_sized_array!(cursor, i16, sz))
}
(BinXmlValueType::UInt16ArrayType, Some(sz)) => {
BinXmlValue::UInt16ArrayType(try_read_sized_array!(cursor, u16, sz))
}
(BinXmlValueType::Int32ArrayType, Some(sz)) => {
BinXmlValue::Int32ArrayType(try_read_sized_array!(cursor, i32, sz))
}
(BinXmlValueType::UInt32ArrayType, Some(sz)) => {
BinXmlValue::UInt32ArrayType(try_read_sized_array!(cursor, u32, sz))
}
(BinXmlValueType::Int64ArrayType, Some(sz)) => {
BinXmlValue::Int64ArrayType(try_read_sized_array!(cursor, i64, sz))
}
(BinXmlValueType::UInt64ArrayType, Some(sz)) => {
BinXmlValue::UInt64ArrayType(try_read_sized_array!(cursor, u64, sz))
}
(BinXmlValueType::Real32ArrayType, Some(sz)) => {
BinXmlValue::Real32ArrayType(try_read_sized_array!(cursor, f32, sz))
}
(BinXmlValueType::Real64ArrayType, Some(sz)) => {
BinXmlValue::Real64ArrayType(try_read_sized_array!(cursor, f64, sz))
}
(BinXmlValueType::BoolArrayType, Some(sz)) => {
BinXmlValue::BoolArrayType(try_read_sized_array!(cursor, bool, sz))
}
(BinXmlValueType::GuidArrayType, Some(sz)) => {
BinXmlValue::GuidArrayType(try_read_sized_array!(cursor, guid, sz))
}
(BinXmlValueType::FileTimeArrayType, Some(sz)) => {
BinXmlValue::FileTimeArrayType(try_read_sized_array!(cursor, filetime, sz))
}
(BinXmlValueType::SysTimeArrayType, Some(sz)) => {
BinXmlValue::SysTimeArrayType(try_read_sized_array!(cursor, systime, sz))
}
(BinXmlValueType::SidArrayType, Some(sz)) => {
BinXmlValue::SidArrayType(try_read_sized_array!(cursor, sid, sz))
}
(BinXmlValueType::HexInt32ArrayType, Some(sz)) => {
BinXmlValue::HexInt32ArrayType(try_read_sized_array!(cursor, hex32, sz))
}
(BinXmlValueType::HexInt64ArrayType, Some(sz)) => {
BinXmlValue::HexInt64ArrayType(try_read_sized_array!(cursor, hex64, sz))
}
_ => err::UnimplementedValueVariant {
name: format!("{:?}", value_type),
size,
offset: cursor.position(),
}
.fail()?,
};
Ok(value)
}
}
fn to_delimited_list<N: ToString>(ns: impl AsRef<Vec<N>>) -> String {
ns.as_ref()
.iter()
.map(ToString::to_string)
.collect::<Vec<String>>()
.join(",")
}
impl<'c> Into<serde_json::Value> for BinXmlValue<'c> {
fn into(self) -> Value {
match self {
BinXmlValue::NullType => Value::Null,
BinXmlValue::StringType(s) => json!(s.into_owned()),
BinXmlValue::AnsiStringType(s) => json!(s.into_owned()),
BinXmlValue::Int8Type(num) => json!(num),
BinXmlValue::UInt8Type(num) => json!(num),
BinXmlValue::Int16Type(num) => json!(num),
BinXmlValue::UInt16Type(num) => json!(num),
BinXmlValue::Int32Type(num) => json!(num),
BinXmlValue::UInt32Type(num) => json!(num),
BinXmlValue::Int64Type(num) => json!(num),
BinXmlValue::UInt64Type(num) => json!(num),
BinXmlValue::Real32Type(num) => json!(num),
BinXmlValue::Real64Type(num) => json!(num),
BinXmlValue::BoolType(num) => json!(num),
BinXmlValue::BinaryType(bytes) => {
// Bytes will be formatted as const length of 2 with '0' padding.
let repr: String = bytes.iter().map(|b| format!("{:02X}", b)).collect();
json!(repr)
}
BinXmlValue::GuidType(guid) => json!(guid.to_string()),
// BinXmlValue::SizeTType(sz) => json!(sz.to_string()),
BinXmlValue::FileTimeType(tm) => json!(tm),
BinXmlValue::SysTimeType(tm) => json!(tm),
BinXmlValue::SidType(sid) => json!(sid.to_string()),
BinXmlValue::HexInt32Type(hex_string) => json!(hex_string),
BinXmlValue::HexInt64Type(hex_string) => json!(hex_string),
BinXmlValue::StringArrayType(s) => json!(s),
BinXmlValue::Int8ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt8ArrayType(numbers) => json!(numbers),
BinXmlValue::Int16ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt16ArrayType(numbers) => json!(numbers),
BinXmlValue::Int32ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt32ArrayType(numbers) => json!(numbers),
BinXmlValue::Int64ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt64ArrayType(numbers) => json!(numbers),
BinXmlValue::Real32ArrayType(numbers) => json!(numbers),
BinXmlValue::Real64ArrayType(numbers) => json!(numbers),
BinXmlValue::BoolArrayType(bools) => json!(bools),
BinXmlValue::GuidArrayType(guids) => {
json!(guids.iter().map(Guid::to_string).collect::<Vec<String>>())
}
BinXmlValue::FileTimeArrayType(filetimes) => json!(filetimes),
BinXmlValue::SysTimeArrayType(systimes) => json!(systimes),
BinXmlValue::SidArrayType(sids) => {
json!(sids.iter().map(Sid::to_string).collect::<Vec<String>>())
}
BinXmlValue::HexInt32ArrayType(hex_strings) => json!(hex_strings),
BinXmlValue::HexInt64ArrayType(hex_strings) => json!(hex_strings),
BinXmlValue::EvtHandle => {
panic!("Unsupported conversion, call `expand_templates` first")
}
BinXmlValue::BinXmlType(_) => {
panic!("Unsupported conversion, call `expand_templates` first")
}
BinXmlValue::EvtXml => panic!("Unsupported conversion, call `expand_templates` first"),
_ => unimplemented!("{:?}", self),
}
}
}
impl<'c> Into<serde_json::Value> for &'c BinXmlValue<'c> {
fn into(self) -> Value {
match self {
BinXmlValue::NullType => Value::Null,
BinXmlValue::StringType(s) => json!(s.as_ref()),
BinXmlValue::AnsiStringType(s) => json!(s.as_ref()),
BinXmlValue::Int8Type(num) => json!(num),
BinXmlValue::UInt8Type(num) => json!(num),
BinXmlValue::Int16Type(num) => json!(num),
BinXmlValue::UInt16Type(num) => json!(num),
BinXmlValue::Int32Type(num) => json!(num),
BinXmlValue::UInt32Type(num) => json!(num),
BinXmlValue::Int64Type(num) => json!(num),
BinXmlValue::UInt64Type(num) => json!(num),
BinXmlValue::Real32Type(num) => json!(num),
BinXmlValue::Real64Type(num) => json!(num),
BinXmlValue::BoolType(num) => json!(num),
BinXmlValue::BinaryType(bytes) => {
// Bytes will be formatted as const length of 2 with '0' padding.
let repr: String = bytes.iter().map(|b| format!("{:02X}", b)).collect();
json!(repr)
}
BinXmlValue::GuidType(guid) => json!(guid.to_string()),
// BinXmlValue::SizeTType(sz) => json!(sz.to_string()),
BinXmlValue::FileTimeType(tm) => json!(tm),
BinXmlValue::SysTimeType(tm) => json!(tm),
BinXmlValue::SidType(sid) => json!(sid.to_string()),
BinXmlValue::HexInt32Type(hex_string) => json!(hex_string),
BinXmlValue::HexInt64Type(hex_string) => json!(hex_string),
BinXmlValue::StringArrayType(s) => json!(s),
BinXmlValue::Int8ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt8ArrayType(numbers) => json!(numbers),
BinXmlValue::Int16ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt16ArrayType(numbers) => json!(numbers),
BinXmlValue::Int32ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt32ArrayType(numbers) => json!(numbers),
BinXmlValue::Int64ArrayType(numbers) => json!(numbers),
BinXmlValue::UInt64ArrayType(numbers) => json!(numbers),
BinXmlValue::Real32ArrayType(numbers) => json!(numbers),
BinXmlValue::Real64ArrayType(numbers) => json!(numbers),
BinXmlValue::BoolArrayType(bools) => json!(bools),
BinXmlValue::GuidArrayType(guids) => {
json!(guids.iter().map(Guid::to_string).collect::<Vec<String>>())
}
BinXmlValue::FileTimeArrayType(filetimes) => json!(filetimes),
BinXmlValue::SysTimeArrayType(systimes) => json!(systimes),
BinXmlValue::SidArrayType(sids) => {
json!(sids.iter().map(Sid::to_string).collect::<Vec<String>>())
}
BinXmlValue::HexInt32ArrayType(hex_strings) => json!(hex_strings),
BinXmlValue::HexInt64ArrayType(hex_strings) => json!(hex_strings),
BinXmlValue::EvtHandle => {
panic!("Unsupported conversion, call `expand_templates` first")
}
BinXmlValue::BinXmlType(_) => {
panic!("Unsupported conversion, call `expand_templates` first")
}
BinXmlValue::EvtXml => panic!("Unsupported conversion, call `expand_templates` first"),
_ => unimplemented!("{:?}", self),
}
}
}
impl<'a> BinXmlValue<'a> {
pub fn as_cow_str(&self) -> Cow<str> {
match self {
BinXmlValue::NullType => Cow::Borrowed(""),
BinXmlValue::StringType(s) => Cow::Borrowed(s.as_ref()),
BinXmlValue::AnsiStringType(s) => Cow::Borrowed(s.as_ref()),
BinXmlValue::Int8Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::UInt8Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::Int16Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::UInt16Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::Int32Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::UInt32Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::Int64Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::UInt64Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::Real32Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::Real64Type(num) => Cow::Owned(num.to_string()),
BinXmlValue::BoolType(num) => Cow::Owned(num.to_string()),
BinXmlValue::BinaryType(bytes) => {
// Bytes will be formatted as const length of 2 with '0' padding.
let mut repr = String::with_capacity(bytes.len() * 2);
for b in bytes.iter() {
write!(repr, "{:02X}", b).expect("Writing to a String cannot fail");
}
Cow::Owned(repr)
}
BinXmlValue::GuidType(guid) => Cow::Owned(guid.to_string()),
BinXmlValue::SizeTType(sz) => Cow::Owned(sz.to_string()),
BinXmlValue::FileTimeType(tm) => Cow::Owned(tm.to_string()),
BinXmlValue::SysTimeType(tm) => Cow::Owned(tm.to_string()),
BinXmlValue::SidType(sid) => Cow::Owned(sid.to_string()),
BinXmlValue::HexInt32Type(hex_string) => hex_string.clone(),
BinXmlValue::HexInt64Type(hex_string) => hex_string.clone(),
BinXmlValue::StringArrayType(s) => Cow::Owned(s.join(",")),
BinXmlValue::Int8ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::UInt8ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::Int16ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::UInt16ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::Int32ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::UInt32ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::Int64ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::UInt64ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::Real32ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::Real64ArrayType(numbers) => Cow::Owned(to_delimited_list(numbers)),
BinXmlValue::BoolArrayType(bools) => Cow::Owned(to_delimited_list(bools)),
BinXmlValue::GuidArrayType(guids) => Cow::Owned(to_delimited_list(guids)),
BinXmlValue::FileTimeArrayType(filetimes) => Cow::Owned(to_delimited_list(filetimes)),
BinXmlValue::SysTimeArrayType(systimes) => Cow::Owned(to_delimited_list(systimes)),
BinXmlValue::SidArrayType(sids) => Cow::Owned(to_delimited_list(sids)),
BinXmlValue::HexInt32ArrayType(hex_strings) => Cow::Owned(hex_strings.join(",")),
BinXmlValue::HexInt64ArrayType(hex_strings) => Cow::Owned(hex_strings.join(",")),
BinXmlValue::EvtHandle => {
panic!("Unsupported conversion, call `expand_templates` first")
}
BinXmlValue::BinXmlType(_) => {
panic!("Unsupported conversion, call `expand_templates` first")
}
BinXmlValue::EvtXml => panic!("Unsupported conversion, call `expand_templates` first"),
_ => unimplemented!("{:?}", self),
}
}
}
| 44.982487 | 100 | 0.589644 |
71b32897f342170e85b2ba5701aed86ce28b2b6f
| 60 |
fn main() {
let _build = cxx_build::bridge("hdf.rs");
}
| 15 | 45 | 0.583333 |
917eab4eeb9918e547fc2e42f105b85c83c2d516
| 82,874 |
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Rustdoc's HTML Rendering module
//!
//! This modules contains the bulk of the logic necessary for rendering a
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
//! rendering process is largely driven by the `format!` syntax extension to
//! perform all I/O into files and streams.
//!
//! The rendering process is largely driven by the `Context` and `Cache`
//! structures. The cache is pre-populated by crawling the crate in question,
//! and then it is shared among the various rendering tasks. The cache is meant
//! to be a fairly large structure not implementing `Clone` (because it's shared
//! among tasks). The context, however, should be a lightweight structure. This
//! is cloned per-task and contains information about what is currently being
//! rendered.
//!
//! In order to speed up rendering (mostly because of markdown rendering), the
//! rendering process has been parallelized. This parallelization is only
//! exposed through the `crate` method on the context, and then also from the
//! fact that the shared cache is stored in TLS (and must be accessed as such).
//!
//! In addition to rendering the crate itself, this module is also responsible
//! for creating the corresponding search index and source file renderings.
//! These tasks are not parallelized (they haven't been a bottleneck yet), and
//! both occur before the crate is rendered.
use std::collections::{HashMap, HashSet};
use std::fmt;
use std::io::{fs, File, BufferedWriter, MemWriter, BufferedReader};
use std::io;
use std::str;
use std::string::String;
use std::sync::Arc;
use externalfiles::ExternalHtml;
use serialize::json::ToJson;
use syntax::ast;
use syntax::ast_util;
use rustc::util::nodemap::NodeSet;
use clean;
use doctree;
use fold::DocFolder;
use html::format::{VisSpace, Method, FnStyleSpace, MutableSpace, Stability};
use html::format::{ConciseStability};
use html::highlight;
use html::item_type::{ItemType, shortty};
use html::item_type;
use html::layout;
use html::markdown::Markdown;
use html::markdown;
/// Major driving force in all rustdoc rendering. This contains information
/// about where in the tree-like hierarchy rendering is occurring and controls
/// how the current page is being rendered.
///
/// It is intended that this context is a lightweight object which can be fairly
/// easily cloned because it is cloned per work-job (about once per item in the
/// rustdoc tree).
#[deriving(Clone)]
pub struct Context {
/// Current hierarchy of components leading down to what's currently being
/// rendered
pub current: Vec<String>,
/// String representation of how to get back to the root path of the 'doc/'
/// folder in terms of a relative URL.
pub root_path: String,
/// The current destination folder of where HTML artifacts should be placed.
/// This changes as the context descends into the module hierarchy.
pub dst: Path,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
/// This map is a list of what should be displayed on the sidebar of the
/// current page. The key is the section header (traits, modules,
/// functions), and the value is the list of containers belonging to this
/// header. This map will change depending on the surrounding context of the
/// page.
pub sidebar: HashMap<String, Vec<String>>,
/// This flag indicates whether [src] links should be generated or not. If
/// the source files are present in the html rendering, then this will be
/// `true`.
pub include_sources: bool,
/// A flag, which when turned off, will render pages which redirect to the
/// real location of an item. This is used to allow external links to
/// publicly reused items to redirect to the right location.
pub render_redirect_pages: bool,
}
/// Indicates where an external crate can be found.
pub enum ExternalLocation {
/// Remote URL root of the external crate
Remote(String),
/// This external crate can be found in the local doc/ folder
Local,
/// The external crate could not be found.
Unknown,
}
/// Metadata about an implementor of a trait.
pub struct Implementor {
def_id: ast::DefId,
generics: clean::Generics,
trait_: clean::Type,
for_: clean::Type,
stability: Option<clean::Stability>,
}
/// Metadata about implementations for a type.
#[deriving(Clone)]
pub struct Impl {
impl_: clean::Impl,
dox: Option<String>,
stability: Option<clean::Stability>,
}
/// This cache is used to store information about the `clean::Crate` being
/// rendered in order to provide more useful documentation. This contains
/// information like all implementors of a trait, all traits a type implements,
/// documentation for all known traits, etc.
///
/// This structure purposefully does not implement `Clone` because it's intended
/// to be a fairly large and expensive structure to clone. Instead this adheres
/// to `Send` so it may be stored in a `Arc` instance and shared among the various
/// rendering tasks.
pub struct Cache {
/// Mapping of typaram ids to the name of the type parameter. This is used
/// when pretty-printing a type (so pretty printing doesn't have to
/// painfully maintain a context like this)
pub typarams: HashMap<ast::DefId, String>,
/// Maps a type id to all known implementations for that type. This is only
/// recognized for intra-crate `ResolvedPath` types, and is used to print
/// out extra documentation on the page of an enum/struct.
///
/// The values of the map are a list of implementations and documentation
/// found on that implementation.
pub impls: HashMap<ast::DefId, Vec<Impl>>,
/// Maintains a mapping of local crate node ids to the fully qualified name
/// and "short type description" of that node. This is used when generating
/// URLs when a type is being linked to. External paths are not located in
/// this map because the `External` type itself has all the information
/// necessary.
pub paths: HashMap<ast::DefId, (Vec<String>, ItemType)>,
/// Similar to `paths`, but only holds external paths. This is only used for
/// generating explicit hyperlinks to other crates.
pub external_paths: HashMap<ast::DefId, Vec<String>>,
/// This map contains information about all known traits of this crate.
/// Implementations of a crate should inherit the documentation of the
/// parent trait if no extra documentation is specified, and default methods
/// should show up in documentation about trait implementations.
pub traits: HashMap<ast::DefId, clean::Trait>,
/// When rendering traits, it's often useful to be able to list all
/// implementors of the trait, and this mapping is exactly, that: a mapping
/// of trait ids to the list of known implementors of the trait
pub implementors: HashMap<ast::DefId, Vec<Implementor>>,
/// Cache of where external crate documentation can be found.
pub extern_locations: HashMap<ast::CrateNum, ExternalLocation>,
/// Cache of where documentation for primitives can be found.
pub primitive_locations: HashMap<clean::Primitive, ast::CrateNum>,
/// Set of definitions which have been inlined from external crates.
pub inlined: HashSet<ast::DefId>,
// Private fields only used when initially crawling a crate to build a cache
stack: Vec<String>,
parent_stack: Vec<ast::DefId>,
search_index: Vec<IndexItem>,
privmod: bool,
public_items: NodeSet,
// In rare case where a structure is defined in one module but implemented
// in another, if the implementing module is parsed before defining module,
// then the fully qualified name of the structure isn't presented in `paths`
// yet when its implementation methods are being indexed. Caches such methods
// and their parent id here and indexes them at the end of crate parsing.
orphan_methods: Vec<(ast::NodeId, clean::Item)>,
}
/// Helper struct to render all source code to HTML pages
struct SourceCollector<'a> {
cx: &'a mut Context,
/// Processed source-file paths
seen: HashSet<String>,
/// Root destination to place all HTML output into
dst: Path,
}
/// Wrapper struct to render the source code of a file. This will do things like
/// adding line numbers to the left-hand side.
struct Source<'a>(&'a str);
// Helper structs for rendering items/sidebars and carrying along contextual
// information
struct Item<'a> { cx: &'a Context, item: &'a clean::Item, }
struct Sidebar<'a> { cx: &'a Context, item: &'a clean::Item, }
/// Struct representing one entry in the JS search index. These are all emitted
/// by hand to a large JS file at the end of cache-creation.
struct IndexItem {
ty: ItemType,
name: String,
path: String,
desc: String,
parent: Option<ast::DefId>,
}
// TLS keys used to carry information around during rendering.
local_data_key!(pub cache_key: Arc<Cache>)
local_data_key!(pub current_location_key: Vec<String> )
/// Generates the documentation for `crate` into the directory `dst`
pub fn run(mut krate: clean::Crate, external_html: &ExternalHtml, dst: Path) -> io::IoResult<()> {
let mut cx = Context {
dst: dst,
current: Vec::new(),
root_path: String::new(),
sidebar: HashMap::new(),
layout: layout::Layout {
logo: "".to_string(),
favicon: "".to_string(),
external_html: external_html.clone(),
krate: krate.name.clone(),
playground_url: "".to_string(),
},
include_sources: true,
render_redirect_pages: false,
};
try!(mkdir(&cx.dst));
// Crawl the crate attributes looking for attributes which control how we're
// going to emit HTML
match krate.module.as_ref().map(|m| m.doc_list().unwrap_or(&[])) {
Some(attrs) => {
for attr in attrs.iter() {
match *attr {
clean::NameValue(ref x, ref s)
if "html_favicon_url" == x.as_slice() => {
cx.layout.favicon = s.to_string();
}
clean::NameValue(ref x, ref s)
if "html_logo_url" == x.as_slice() => {
cx.layout.logo = s.to_string();
}
clean::NameValue(ref x, ref s)
if "html_playground_url" == x.as_slice() => {
cx.layout.playground_url = s.to_string();
let name = krate.name.clone();
if markdown::playground_krate.get().is_none() {
markdown::playground_krate.replace(Some(Some(name)));
}
}
clean::Word(ref x)
if "html_no_source" == x.as_slice() => {
cx.include_sources = false;
}
_ => {}
}
}
}
None => {}
}
// Crawl the crate to build various caches used for the output
let analysis = ::analysiskey.get();
let public_items = analysis.as_ref().map(|a| a.public_items.clone());
let public_items = public_items.unwrap_or(NodeSet::new());
let paths: HashMap<ast::DefId, (Vec<String>, ItemType)> =
analysis.as_ref().map(|a| {
let paths = a.external_paths.borrow_mut().take_unwrap();
paths.move_iter().map(|(k, (v, t))| {
(k, (v, match t {
clean::TypeStruct => item_type::Struct,
clean::TypeEnum => item_type::Enum,
clean::TypeFunction => item_type::Function,
clean::TypeTrait => item_type::Trait,
clean::TypeModule => item_type::Module,
clean::TypeStatic => item_type::Static,
clean::TypeVariant => item_type::Variant,
}))
}).collect()
}).unwrap_or(HashMap::new());
let mut cache = Cache {
impls: HashMap::new(),
external_paths: paths.iter().map(|(&k, &(ref v, _))| (k, v.clone()))
.collect(),
paths: paths,
implementors: HashMap::new(),
stack: Vec::new(),
parent_stack: Vec::new(),
search_index: Vec::new(),
extern_locations: HashMap::new(),
primitive_locations: HashMap::new(),
privmod: false,
public_items: public_items,
orphan_methods: Vec::new(),
traits: analysis.as_ref().map(|a| {
a.external_traits.borrow_mut().take_unwrap()
}).unwrap_or(HashMap::new()),
typarams: analysis.as_ref().map(|a| {
a.external_typarams.borrow_mut().take_unwrap()
}).unwrap_or(HashMap::new()),
inlined: analysis.as_ref().map(|a| {
a.inlined.borrow_mut().take_unwrap()
}).unwrap_or(HashSet::new()),
};
cache.stack.push(krate.name.clone());
krate = cache.fold_crate(krate);
// Cache where all our extern crates are located
for &(n, ref e) in krate.externs.iter() {
cache.extern_locations.insert(n, extern_location(e, &cx.dst));
let did = ast::DefId { krate: n, node: ast::CRATE_NODE_ID };
cache.paths.insert(did, (vec![e.name.to_string()], item_type::Module));
}
// Cache where all known primitives have their documentation located.
//
// Favor linking to as local extern as possible, so iterate all crates in
// reverse topological order.
for &(n, ref e) in krate.externs.iter().rev() {
for &prim in e.primitives.iter() {
cache.primitive_locations.insert(prim, n);
}
}
for &prim in krate.primitives.iter() {
cache.primitive_locations.insert(prim, ast::LOCAL_CRATE);
}
// Build our search index
let index = try!(build_index(&krate, &mut cache));
// Freeze the cache now that the index has been built. Put an Arc into TLS
// for future parallelization opportunities
let cache = Arc::new(cache);
cache_key.replace(Some(cache.clone()));
current_location_key.replace(Some(Vec::new()));
try!(write_shared(&cx, &krate, &*cache, index));
let krate = try!(render_sources(&mut cx, krate));
// And finally render the whole crate's documentation
cx.krate(krate)
}
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::IoResult<String> {
// Build the search index from the collected metadata
let mut nodeid_to_pathid = HashMap::new();
let mut pathid_to_nodeid = Vec::new();
{
let Cache { ref mut search_index,
ref orphan_methods,
ref mut paths, .. } = *cache;
// Attach all orphan methods to the type's definition if the type
// has since been learned.
for &(pid, ref item) in orphan_methods.iter() {
let did = ast_util::local_def(pid);
match paths.find(&did) {
Some(&(ref fqp, _)) => {
search_index.push(IndexItem {
ty: shortty(item),
name: item.name.clone().unwrap(),
path: fqp.slice_to(fqp.len() - 1).connect("::"),
desc: shorter(item.doc_value()).to_string(),
parent: Some(did),
});
},
None => {}
}
};
// Reduce `NodeId` in paths into smaller sequential numbers,
// and prune the paths that do not appear in the index.
for item in search_index.iter() {
match item.parent {
Some(nodeid) => {
if !nodeid_to_pathid.contains_key(&nodeid) {
let pathid = pathid_to_nodeid.len();
nodeid_to_pathid.insert(nodeid, pathid);
pathid_to_nodeid.push(nodeid);
}
}
None => {}
}
}
assert_eq!(nodeid_to_pathid.len(), pathid_to_nodeid.len());
}
// Collect the index into a string
let mut w = MemWriter::new();
try!(write!(&mut w, r#"searchIndex['{}'] = {{"items":["#, krate.name));
let mut lastpath = "".to_string();
for (i, item) in cache.search_index.iter().enumerate() {
// Omit the path if it is same to that of the prior item.
let path;
if lastpath.as_slice() == item.path.as_slice() {
path = "";
} else {
lastpath = item.path.to_string();
path = item.path.as_slice();
};
if i > 0 {
try!(write!(&mut w, ","));
}
try!(write!(&mut w, r#"[{:u},"{}","{}",{}"#,
item.ty, item.name, path,
item.desc.to_json().to_str()));
match item.parent {
Some(nodeid) => {
let pathid = *nodeid_to_pathid.find(&nodeid).unwrap();
try!(write!(&mut w, ",{}", pathid));
}
None => {}
}
try!(write!(&mut w, "]"));
}
try!(write!(&mut w, r#"],"paths":["#));
for (i, &did) in pathid_to_nodeid.iter().enumerate() {
let &(ref fqp, short) = cache.paths.find(&did).unwrap();
if i > 0 {
try!(write!(&mut w, ","));
}
try!(write!(&mut w, r#"[{:u},"{}"]"#,
short, *fqp.last().unwrap()));
}
try!(write!(&mut w, "]}};"));
Ok(str::from_utf8(w.unwrap().as_slice()).unwrap().to_string())
}
fn write_shared(cx: &Context,
krate: &clean::Crate,
cache: &Cache,
search_index: String) -> io::IoResult<()> {
// Write out the shared files. Note that these are shared among all rustdoc
// docs placed in the output directory, so this needs to be a synchronized
// operation with respect to all other rustdocs running around.
try!(mkdir(&cx.dst));
let _lock = ::flock::Lock::new(&cx.dst.join(".lock"));
// Add all the static files. These may already exist, but we just
// overwrite them anyway to make sure that they're fresh and up-to-date.
try!(write(cx.dst.join("jquery.js"),
include_bin!("static/jquery-2.1.0.min.js")));
try!(write(cx.dst.join("main.js"), include_bin!("static/main.js")));
try!(write(cx.dst.join("playpen.js"), include_bin!("static/playpen.js")));
try!(write(cx.dst.join("main.css"), include_bin!("static/main.css")));
try!(write(cx.dst.join("normalize.css"),
include_bin!("static/normalize.css")));
try!(write(cx.dst.join("FiraSans-Regular.woff"),
include_bin!("static/FiraSans-Regular.woff")));
try!(write(cx.dst.join("FiraSans-Medium.woff"),
include_bin!("static/FiraSans-Medium.woff")));
try!(write(cx.dst.join("Heuristica-Regular.woff"),
include_bin!("static/Heuristica-Regular.woff")));
try!(write(cx.dst.join("Heuristica-Italic.woff"),
include_bin!("static/Heuristica-Italic.woff")));
try!(write(cx.dst.join("Heuristica-Bold.woff"),
include_bin!("static/Heuristica-Bold.woff")));
fn collect(path: &Path, krate: &str,
key: &str) -> io::IoResult<Vec<String>> {
let mut ret = Vec::new();
if path.exists() {
for line in BufferedReader::new(File::open(path)).lines() {
let line = try!(line);
if !line.as_slice().starts_with(key) {
continue
}
if line.as_slice().starts_with(
format!("{}['{}']", key, krate).as_slice()) {
continue
}
ret.push(line.to_string());
}
}
return Ok(ret);
}
// Update the search index
let dst = cx.dst.join("search-index.js");
let all_indexes = try!(collect(&dst, krate.name.as_slice(),
"searchIndex"));
let mut w = try!(File::create(&dst));
try!(writeln!(&mut w, "var searchIndex = {{}};"));
try!(writeln!(&mut w, "{}", search_index));
for index in all_indexes.iter() {
try!(writeln!(&mut w, "{}", *index));
}
try!(writeln!(&mut w, "initSearch(searchIndex);"));
// Update the list of all implementors for traits
let dst = cx.dst.join("implementors");
try!(mkdir(&dst));
for (&did, imps) in cache.implementors.iter() {
// Private modules can leak through to this phase of rustdoc, which
// could contain implementations for otherwise private types. In some
// rare cases we could find an implementation for an item which wasn't
// indexed, so we just skip this step in that case.
//
// FIXME: this is a vague explanation for why this can't be a `get`, in
// theory it should be...
let &(ref remote_path, remote_item_type) = match cache.paths.find(&did) {
Some(p) => p,
None => continue,
};
let mut mydst = dst.clone();
for part in remote_path.slice_to(remote_path.len() - 1).iter() {
mydst.push(part.as_slice());
try!(mkdir(&mydst));
}
mydst.push(format!("{}.{}.js",
remote_item_type.to_static_str(),
*remote_path.get(remote_path.len() - 1)));
let all_implementors = try!(collect(&mydst, krate.name.as_slice(),
"implementors"));
try!(mkdir(&mydst.dir_path()));
let mut f = BufferedWriter::new(try!(File::create(&mydst)));
try!(writeln!(&mut f, "(function() {{var implementors = {{}};"));
for implementor in all_implementors.iter() {
try!(write!(&mut f, "{}", *implementor));
}
try!(write!(&mut f, r"implementors['{}'] = [", krate.name));
for imp in imps.iter() {
// If the trait and implementation are in the same crate, then
// there's no need to emit information about it (there's inlining
// going on). If they're in different crates then the crate defining
// the trait will be interested in our implementation.
if imp.def_id.krate == did.krate { continue }
try!(write!(&mut f, r#""{}impl{} {} for {}","#,
ConciseStability(&imp.stability),
imp.generics, imp.trait_, imp.for_));
}
try!(writeln!(&mut f, r"];"));
try!(writeln!(&mut f, "{}", r"
if (window.register_implementors) {
window.register_implementors(implementors);
} else {
window.pending_implementors = implementors;
}
"));
try!(writeln!(&mut f, r"}})()"));
}
Ok(())
}
fn render_sources(cx: &mut Context,
krate: clean::Crate) -> io::IoResult<clean::Crate> {
info!("emitting source files");
let dst = cx.dst.join("src");
try!(mkdir(&dst));
let dst = dst.join(krate.name.as_slice());
try!(mkdir(&dst));
let mut folder = SourceCollector {
dst: dst,
seen: HashSet::new(),
cx: cx,
};
// skip all invalid spans
folder.seen.insert("".to_string());
Ok(folder.fold_crate(krate))
}
/// Writes the entire contents of a string to a destination, not attempting to
/// catch any errors.
fn write(dst: Path, contents: &[u8]) -> io::IoResult<()> {
File::create(&dst).write(contents)
}
/// Makes a directory on the filesystem, failing the task if an error occurs and
/// skipping if the directory already exists.
fn mkdir(path: &Path) -> io::IoResult<()> {
if !path.exists() {
fs::mkdir(path, io::UserRWX)
} else {
Ok(())
}
}
/// Takes a path to a source file and cleans the path to it. This canonicalizes
/// things like ".." to components which preserve the "top down" hierarchy of a
/// static HTML tree.
// FIXME (#9639): The closure should deal with &[u8] instead of &str
fn clean_srcpath(src: &[u8], f: |&str|) {
let p = Path::new(src);
if p.as_vec() != b"." {
for c in p.str_components().map(|x|x.unwrap()) {
if ".." == c {
f("up");
} else {
f(c.as_slice())
}
}
}
}
/// Attempts to find where an external crate is located, given that we're
/// rendering in to the specified source destination.
fn extern_location(e: &clean::ExternalCrate, dst: &Path) -> ExternalLocation {
// See if there's documentation generated into the local directory
let local_location = dst.join(e.name.as_slice());
if local_location.is_dir() {
return Local;
}
// Failing that, see if there's an attribute specifying where to find this
// external crate
for attr in e.attrs.iter() {
match *attr {
clean::List(ref x, ref list) if "doc" == x.as_slice() => {
for attr in list.iter() {
match *attr {
clean::NameValue(ref x, ref s)
if "html_root_url" == x.as_slice() => {
if s.as_slice().ends_with("/") {
return Remote(s.to_string());
}
return Remote(format!("{}/", s));
}
_ => {}
}
}
}
_ => {}
}
}
// Well, at least we tried.
return Unknown;
}
impl<'a> DocFolder for SourceCollector<'a> {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If we're including source files, and we haven't seen this file yet,
// then we need to render it out to the filesystem
if self.cx.include_sources && !self.seen.contains(&item.source.filename) {
// If it turns out that we couldn't read this file, then we probably
// can't read any of the files (generating html output from json or
// something like that), so just don't include sources for the
// entire crate. The other option is maintaining this mapping on a
// per-file basis, but that's probably not worth it...
self.cx
.include_sources = match self.emit_source(item.source
.filename
.as_slice()) {
Ok(()) => true,
Err(e) => {
println!("warning: source code was requested to be rendered, \
but processing `{}` had an error: {}",
item.source.filename, e);
println!(" skipping rendering of source code");
false
}
};
self.seen.insert(item.source.filename.clone());
}
self.fold_item_recur(item)
}
}
impl<'a> SourceCollector<'a> {
/// Renders the given filename into its corresponding HTML source file.
fn emit_source(&mut self, filename: &str) -> io::IoResult<()> {
let p = Path::new(filename);
// If we couldn't open this file, then just returns because it
// probably means that it's some standard library macro thing and we
// can't have the source to it anyway.
let contents = match File::open(&p).read_to_end() {
Ok(r) => r,
// macros from other libraries get special filenames which we can
// safely ignore
Err(..) if filename.starts_with("<") &&
filename.ends_with("macros>") => return Ok(()),
Err(e) => return Err(e)
};
let contents = str::from_utf8(contents.as_slice()).unwrap();
// Remove the utf-8 BOM if any
let contents = if contents.starts_with("\ufeff") {
contents.as_slice().slice_from(3)
} else {
contents.as_slice()
};
// Create the intermediate directories
let mut cur = self.dst.clone();
let mut root_path = String::from_str("../../");
clean_srcpath(p.dirname(), |component| {
cur.push(component);
mkdir(&cur).unwrap();
root_path.push_str("../");
});
cur.push(Vec::from_slice(p.filename().expect("source has no filename"))
.append(b".html"));
let mut w = BufferedWriter::new(try!(File::create(&cur)));
let title = format!("{} -- source", cur.filename_display());
let page = layout::Page {
title: title.as_slice(),
ty: "source",
root_path: root_path.as_slice(),
};
try!(layout::render(&mut w as &mut Writer, &self.cx.layout,
&page, &(""), &Source(contents)));
try!(w.flush());
return Ok(());
}
}
impl DocFolder for Cache {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If this is a private module, we don't want it in the search index.
let orig_privmod = match item.inner {
clean::ModuleItem(..) => {
let prev = self.privmod;
self.privmod = prev || item.visibility != Some(ast::Public);
prev
}
_ => self.privmod,
};
// Register any generics to their corresponding string. This is used
// when pretty-printing types
match item.inner {
clean::StructItem(ref s) => self.generics(&s.generics),
clean::EnumItem(ref e) => self.generics(&e.generics),
clean::FunctionItem(ref f) => self.generics(&f.generics),
clean::TypedefItem(ref t) => self.generics(&t.generics),
clean::TraitItem(ref t) => self.generics(&t.generics),
clean::ImplItem(ref i) => self.generics(&i.generics),
clean::TyMethodItem(ref i) => self.generics(&i.generics),
clean::MethodItem(ref i) => self.generics(&i.generics),
clean::ForeignFunctionItem(ref f) => self.generics(&f.generics),
_ => {}
}
// Propagate a trait methods' documentation to all implementors of the
// trait
match item.inner {
clean::TraitItem(ref t) => {
self.traits.insert(item.def_id, t.clone());
}
_ => {}
}
// Collect all the implementors of traits.
match item.inner {
clean::ImplItem(ref i) => {
match i.trait_ {
Some(clean::ResolvedPath{ did, .. }) => {
let v = self.implementors.find_or_insert_with(did, |_| {
Vec::new()
});
v.push(Implementor {
def_id: item.def_id,
generics: i.generics.clone(),
trait_: i.trait_.get_ref().clone(),
for_: i.for_.clone(),
stability: item.stability.clone(),
});
}
Some(..) | None => {}
}
}
_ => {}
}
// Index this method for searching later on
match item.name {
Some(ref s) => {
let parent = match item.inner {
clean::TyMethodItem(..) |
clean::StructFieldItem(..) |
clean::VariantItem(..) => {
(Some(*self.parent_stack.last().unwrap()),
Some(self.stack.slice_to(self.stack.len() - 1)))
}
clean::MethodItem(..) => {
if self.parent_stack.len() == 0 {
(None, None)
} else {
let last = self.parent_stack.last().unwrap();
let did = *last;
let path = match self.paths.find(&did) {
Some(&(_, item_type::Trait)) =>
Some(self.stack.slice_to(self.stack.len() - 1)),
// The current stack not necessarily has correlation for
// where the type was defined. On the other hand,
// `paths` always has the right information if present.
Some(&(ref fqp, item_type::Struct)) |
Some(&(ref fqp, item_type::Enum)) =>
Some(fqp.slice_to(fqp.len() - 1)),
Some(..) => Some(self.stack.as_slice()),
None => None
};
(Some(*last), path)
}
}
_ => (None, Some(self.stack.as_slice()))
};
match parent {
(parent, Some(path)) if !self.privmod => {
self.search_index.push(IndexItem {
ty: shortty(&item),
name: s.to_string(),
path: path.connect("::").to_string(),
desc: shorter(item.doc_value()).to_string(),
parent: parent,
});
}
(Some(parent), None) if !self.privmod => {
if ast_util::is_local(parent) {
// We have a parent, but we don't know where they're
// defined yet. Wait for later to index this item.
self.orphan_methods.push((parent.node, item.clone()))
}
}
_ => {}
}
}
None => {}
}
// Keep track of the fully qualified path for this item.
let pushed = if item.name.is_some() {
let n = item.name.get_ref();
if n.len() > 0 {
self.stack.push(n.to_string());
true
} else { false }
} else { false };
match item.inner {
clean::StructItem(..) | clean::EnumItem(..) |
clean::TypedefItem(..) | clean::TraitItem(..) |
clean::FunctionItem(..) | clean::ModuleItem(..) |
clean::ForeignFunctionItem(..) if !self.privmod => {
// Reexported items mean that the same id can show up twice
// in the rustdoc ast that we're looking at. We know,
// however, that a reexported item doesn't show up in the
// `public_items` map, so we can skip inserting into the
// paths map if there was already an entry present and we're
// not a public item.
let id = item.def_id.node;
if !self.paths.contains_key(&item.def_id) ||
!ast_util::is_local(item.def_id) ||
self.public_items.contains(&id) {
self.paths.insert(item.def_id,
(self.stack.clone(), shortty(&item)));
}
}
// link variants to their parent enum because pages aren't emitted
// for each variant
clean::VariantItem(..) if !self.privmod => {
let mut stack = self.stack.clone();
stack.pop();
self.paths.insert(item.def_id, (stack, item_type::Enum));
}
clean::PrimitiveItem(..) if item.visibility.is_some() => {
self.paths.insert(item.def_id, (self.stack.clone(),
shortty(&item)));
}
_ => {}
}
// Maintain the parent stack
let parent_pushed = match item.inner {
clean::TraitItem(..) | clean::EnumItem(..) | clean::StructItem(..) => {
self.parent_stack.push(item.def_id);
true
}
clean::ImplItem(ref i) => {
match i.for_ {
clean::ResolvedPath{ did, .. } => {
self.parent_stack.push(did);
true
}
_ => false
}
}
_ => false
};
// Once we've recursively found all the generics, then hoard off all the
// implementations elsewhere
let ret = match self.fold_item_recur(item) {
Some(item) => {
match item {
clean::Item{ attrs, inner: clean::ImplItem(i), .. } => {
use clean::{Primitive, Vector, ResolvedPath, BorrowedRef};
use clean::{FixedVector, Slice, Tuple, PrimitiveTuple};
// extract relevant documentation for this impl
let dox = match attrs.move_iter().find(|a| {
match *a {
clean::NameValue(ref x, _)
if "doc" == x.as_slice() => {
true
}
_ => false
}
}) {
Some(clean::NameValue(_, dox)) => Some(dox),
Some(..) | None => None,
};
// Figure out the id of this impl. This may map to a
// primitive rather than always to a struct/enum.
let did = match i.for_ {
ResolvedPath { did, .. } => Some(did),
// References to primitives are picked up as well to
// recognize implementations for &str, this may not
// be necessary in a DST world.
Primitive(p) |
BorrowedRef { type_: box Primitive(p), ..} =>
{
Some(ast_util::local_def(p.to_node_id()))
}
// In a DST world, we may only need
// Vector/FixedVector, but for now we also pick up
// borrowed references
Vector(..) | FixedVector(..) |
BorrowedRef{ type_: box Vector(..), .. } |
BorrowedRef{ type_: box FixedVector(..), .. } =>
{
Some(ast_util::local_def(Slice.to_node_id()))
}
Tuple(..) => {
let id = PrimitiveTuple.to_node_id();
Some(ast_util::local_def(id))
}
_ => None,
};
match did {
Some(did) => {
let v = self.impls.find_or_insert_with(did, |_| {
Vec::new()
});
v.push(Impl {
impl_: i,
dox: dox,
stability: item.stability.clone(),
});
}
None => {}
}
None
}
i => Some(i),
}
}
i => i,
};
if pushed { self.stack.pop().unwrap(); }
if parent_pushed { self.parent_stack.pop().unwrap(); }
self.privmod = orig_privmod;
return ret;
}
}
impl<'a> Cache {
fn generics(&mut self, generics: &clean::Generics) {
for typ in generics.type_params.iter() {
self.typarams.insert(typ.did, typ.name.clone());
}
}
}
impl Context {
/// Recurse in the directory structure and change the "root path" to make
/// sure it always points to the top (relatively)
fn recurse<T>(&mut self, s: String, f: |&mut Context| -> T) -> T {
if s.len() == 0 {
fail!("what {:?}", self);
}
let prev = self.dst.clone();
self.dst.push(s.as_slice());
self.root_path.push_str("../");
self.current.push(s);
info!("Recursing into {}", self.dst.display());
mkdir(&self.dst).unwrap();
let ret = f(self);
info!("Recursed; leaving {}", self.dst.display());
// Go back to where we were at
self.dst = prev;
let len = self.root_path.len();
self.root_path.truncate(len - 3);
self.current.pop().unwrap();
return ret;
}
/// Main method for rendering a crate.
///
/// This currently isn't parallelized, but it'd be pretty easy to add
/// parallelization to this function.
fn krate(self, mut krate: clean::Crate) -> io::IoResult<()> {
let mut item = match krate.module.take() {
Some(i) => i,
None => return Ok(())
};
item.name = Some(krate.name);
let mut work = vec!((self, item));
loop {
match work.pop() {
Some((mut cx, item)) => try!(cx.item(item, |cx, item| {
work.push((cx.clone(), item));
})),
None => break,
}
}
Ok(())
}
/// Non-parellelized version of rendering an item. This will take the input
/// item, render its contents, and then invoke the specified closure with
/// all sub-items which need to be rendered.
///
/// The rendering driver uses this closure to queue up more work.
fn item(&mut self, item: clean::Item,
f: |&mut Context, clean::Item|) -> io::IoResult<()> {
fn render(w: io::File, cx: &Context, it: &clean::Item,
pushname: bool) -> io::IoResult<()> {
info!("Rendering an item to {}", w.path().display());
// A little unfortunate that this is done like this, but it sure
// does make formatting *a lot* nicer.
current_location_key.replace(Some(cx.current.clone()));
let mut title = cx.current.connect("::");
if pushname {
if title.len() > 0 {
title.push_str("::");
}
title.push_str(it.name.get_ref().as_slice());
}
title.push_str(" - Rust");
let page = layout::Page {
ty: shortty(it).to_static_str(),
root_path: cx.root_path.as_slice(),
title: title.as_slice(),
};
markdown::reset_headers();
// We have a huge number of calls to write, so try to alleviate some
// of the pain by using a buffered writer instead of invoking the
// write sycall all the time.
let mut writer = BufferedWriter::new(w);
if !cx.render_redirect_pages {
try!(layout::render(&mut writer, &cx.layout, &page,
&Sidebar{ cx: cx, item: it },
&Item{ cx: cx, item: it }));
} else {
let mut url = "../".repeat(cx.current.len());
match cache_key.get().unwrap().paths.find(&it.def_id) {
Some(&(ref names, _)) => {
for name in names.slice_to(names.len() - 1).iter() {
url.push_str(name.as_slice());
url.push_str("/");
}
url.push_str(item_path(it).as_slice());
try!(layout::redirect(&mut writer, url.as_slice()));
}
None => {}
}
}
writer.flush()
}
// Private modules may survive the strip-private pass if they
// contain impls for public types. These modules can also
// contain items such as publicly reexported structures.
//
// External crates will provide links to these structures, so
// these modules are recursed into, but not rendered normally (a
// flag on the context).
if !self.render_redirect_pages {
self.render_redirect_pages = ignore_private_item(&item);
}
match item.inner {
// modules are special because they add a namespace. We also need to
// recurse into the items of the module as well.
clean::ModuleItem(..) => {
let name = item.name.get_ref().to_string();
let mut item = Some(item);
self.recurse(name, |this| {
let item = item.take_unwrap();
let dst = this.dst.join("index.html");
let dst = try!(File::create(&dst));
try!(render(dst, this, &item, false));
let m = match item.inner {
clean::ModuleItem(m) => m,
_ => unreachable!()
};
this.sidebar = build_sidebar(&m);
for item in m.items.move_iter() {
f(this,item);
}
Ok(())
})
}
// Things which don't have names (like impls) don't get special
// pages dedicated to them.
_ if item.name.is_some() => {
let dst = self.dst.join(item_path(&item));
let dst = try!(File::create(&dst));
render(dst, self, &item, true)
}
_ => Ok(())
}
}
}
impl<'a> Item<'a> {
fn ismodule(&self) -> bool {
match self.item.inner {
clean::ModuleItem(..) => true, _ => false
}
}
/// Generate a url appropriate for an `href` attribute back to the source of
/// this item.
///
/// The url generated, when clicked, will redirect the browser back to the
/// original source code.
///
/// If `None` is returned, then a source link couldn't be generated. This
/// may happen, for example, with externally inlined items where the source
/// of their crate documentation isn't known.
fn href(&self) -> Option<String> {
// If this item is part of the local crate, then we're guaranteed to
// know the span, so we plow forward and generate a proper url. The url
// has anchors for the line numbers that we're linking to.
if ast_util::is_local(self.item.def_id) {
let mut path = Vec::new();
clean_srcpath(self.item.source.filename.as_bytes(), |component| {
path.push(component.to_string());
});
let href = if self.item.source.loline == self.item.source.hiline {
format!("{}", self.item.source.loline)
} else {
format!("{}-{}",
self.item.source.loline,
self.item.source.hiline)
};
Some(format!("{root}src/{krate}/{path}.html#{href}",
root = self.cx.root_path,
krate = self.cx.layout.krate,
path = path.connect("/"),
href = href))
// If this item is not part of the local crate, then things get a little
// trickier. We don't actually know the span of the external item, but
// we know that the documentation on the other end knows the span!
//
// In this case, we generate a link to the *documentation* for this type
// in the original crate. There's an extra URL parameter which says that
// we want to go somewhere else, and the JS on the destination page will
// pick it up and instantly redirect the browser to the source code.
//
// If we don't know where the external documentation for this crate is
// located, then we return `None`.
} else {
let cache = cache_key.get().unwrap();
let path = cache.external_paths.get(&self.item.def_id);
let root = match *cache.extern_locations.get(&self.item.def_id.krate) {
Remote(ref s) => s.to_string(),
Local => self.cx.root_path.clone(),
Unknown => return None,
};
Some(format!("{root}{path}/{file}?gotosrc={goto}",
root = root,
path = path.slice_to(path.len() - 1).connect("/"),
file = item_path(self.item),
goto = self.item.def_id.node))
}
}
}
impl<'a> fmt::Show for Item<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
// Write the breadcrumb trail header for the top
try!(write!(fmt, "\n<h1 class='fqn'>"));
match self.item.inner {
clean::ModuleItem(ref m) => if m.is_crate {
try!(write!(fmt, "Crate "));
} else {
try!(write!(fmt, "Module "));
},
clean::FunctionItem(..) => try!(write!(fmt, "Function ")),
clean::TraitItem(..) => try!(write!(fmt, "Trait ")),
clean::StructItem(..) => try!(write!(fmt, "Struct ")),
clean::EnumItem(..) => try!(write!(fmt, "Enum ")),
clean::PrimitiveItem(..) => try!(write!(fmt, "Primitive Type ")),
_ => {}
}
let is_primitive = match self.item.inner {
clean::PrimitiveItem(..) => true,
_ => false,
};
if !is_primitive {
let cur = self.cx.current.as_slice();
let amt = if self.ismodule() { cur.len() - 1 } else { cur.len() };
for (i, component) in cur.iter().enumerate().take(amt) {
try!(write!(fmt, "<a href='{}index.html'>{}</a>::",
"../".repeat(cur.len() - i - 1),
component.as_slice()));
}
}
try!(write!(fmt, "<a class='{}' href=''>{}</a>",
shortty(self.item), self.item.name.get_ref().as_slice()));
// Write stability level
try!(write!(fmt, "{}", Stability(&self.item.stability)));
// Write `src` tag
//
// When this item is part of a `pub use` in a downstream crate, the
// [src] link in the downstream documentation will actually come back to
// this page, and this link will be auto-clicked. The `id` attribute is
// used to find the link to auto-click.
if self.cx.include_sources && !is_primitive {
match self.href() {
Some(l) => {
try!(write!(fmt,
"<a class='source' id='src-{}' \
href='{}'>[src]</a>",
self.item.def_id.node, l));
}
None => {}
}
}
try!(write!(fmt, "</h1>\n"));
match self.item.inner {
clean::ModuleItem(ref m) => {
item_module(fmt, self.cx, self.item, m.items.as_slice())
}
clean::FunctionItem(ref f) | clean::ForeignFunctionItem(ref f) =>
item_function(fmt, self.item, f),
clean::TraitItem(ref t) => item_trait(fmt, self.cx, self.item, t),
clean::StructItem(ref s) => item_struct(fmt, self.item, s),
clean::EnumItem(ref e) => item_enum(fmt, self.item, e),
clean::TypedefItem(ref t) => item_typedef(fmt, self.item, t),
clean::MacroItem(ref m) => item_macro(fmt, self.item, m),
clean::PrimitiveItem(ref p) => item_primitive(fmt, self.item, p),
_ => Ok(())
}
}
}
fn item_path(item: &clean::Item) -> String {
match item.inner {
clean::ModuleItem(..) => {
format!("{}/index.html", item.name.get_ref())
}
_ => {
format!("{}.{}.html",
shortty(item).to_static_str(),
*item.name.get_ref())
}
}
}
fn full_path(cx: &Context, item: &clean::Item) -> String {
let mut s = cx.current.connect("::");
s.push_str("::");
s.push_str(item.name.get_ref().as_slice());
return s
}
fn blank<'a>(s: Option<&'a str>) -> &'a str {
match s {
Some(s) => s,
None => ""
}
}
fn shorter<'a>(s: Option<&'a str>) -> &'a str {
match s {
Some(s) => match s.find_str("\n\n") {
Some(pos) => s.slice_to(pos),
None => s,
},
None => ""
}
}
fn document(w: &mut fmt::Formatter, item: &clean::Item) -> fmt::Result {
match item.doc_value() {
Some(s) => {
try!(write!(w, "<div class='docblock'>{}</div>", Markdown(s)));
}
None => {}
}
Ok(())
}
fn item_module(w: &mut fmt::Formatter, cx: &Context,
item: &clean::Item, items: &[clean::Item]) -> fmt::Result {
try!(document(w, item));
let mut indices = range(0, items.len()).filter(|i| {
!ignore_private_item(&items[*i])
}).collect::<Vec<uint>>();
fn cmp(i1: &clean::Item, i2: &clean::Item, idx1: uint, idx2: uint) -> Ordering {
if shortty(i1) == shortty(i2) {
return i1.name.cmp(&i2.name);
}
match (&i1.inner, &i2.inner) {
(&clean::ViewItemItem(ref a), &clean::ViewItemItem(ref b)) => {
match (&a.inner, &b.inner) {
(&clean::ExternCrate(..), _) => Less,
(_, &clean::ExternCrate(..)) => Greater,
_ => idx1.cmp(&idx2),
}
}
(&clean::ViewItemItem(..), _) => Less,
(_, &clean::ViewItemItem(..)) => Greater,
(&clean::PrimitiveItem(..), _) => Less,
(_, &clean::PrimitiveItem(..)) => Greater,
(&clean::ModuleItem(..), _) => Less,
(_, &clean::ModuleItem(..)) => Greater,
(&clean::MacroItem(..), _) => Less,
(_, &clean::MacroItem(..)) => Greater,
(&clean::StructItem(..), _) => Less,
(_, &clean::StructItem(..)) => Greater,
(&clean::EnumItem(..), _) => Less,
(_, &clean::EnumItem(..)) => Greater,
(&clean::StaticItem(..), _) => Less,
(_, &clean::StaticItem(..)) => Greater,
(&clean::ForeignFunctionItem(..), _) => Less,
(_, &clean::ForeignFunctionItem(..)) => Greater,
(&clean::ForeignStaticItem(..), _) => Less,
(_, &clean::ForeignStaticItem(..)) => Greater,
(&clean::TraitItem(..), _) => Less,
(_, &clean::TraitItem(..)) => Greater,
(&clean::FunctionItem(..), _) => Less,
(_, &clean::FunctionItem(..)) => Greater,
(&clean::TypedefItem(..), _) => Less,
(_, &clean::TypedefItem(..)) => Greater,
_ => idx1.cmp(&idx2),
}
}
indices.sort_by(|&i1, &i2| cmp(&items[i1], &items[i2], i1, i2));
debug!("{:?}", indices);
let mut curty = None;
for &idx in indices.iter() {
let myitem = &items[idx];
let myty = Some(shortty(myitem));
if myty != curty {
if curty.is_some() {
try!(write!(w, "</table>"));
}
curty = myty;
let (short, name) = match myitem.inner {
clean::ModuleItem(..) => ("modules", "Modules"),
clean::StructItem(..) => ("structs", "Structs"),
clean::EnumItem(..) => ("enums", "Enums"),
clean::FunctionItem(..) => ("functions", "Functions"),
clean::TypedefItem(..) => ("types", "Type Definitions"),
clean::StaticItem(..) => ("statics", "Statics"),
clean::TraitItem(..) => ("traits", "Traits"),
clean::ImplItem(..) => ("impls", "Implementations"),
clean::ViewItemItem(..) => ("reexports", "Reexports"),
clean::TyMethodItem(..) => ("tymethods", "Type Methods"),
clean::MethodItem(..) => ("methods", "Methods"),
clean::StructFieldItem(..) => ("fields", "Struct Fields"),
clean::VariantItem(..) => ("variants", "Variants"),
clean::ForeignFunctionItem(..) => ("ffi-fns", "Foreign Functions"),
clean::ForeignStaticItem(..) => ("ffi-statics", "Foreign Statics"),
clean::MacroItem(..) => ("macros", "Macros"),
clean::PrimitiveItem(..) => ("primitives", "Primitive Types"),
};
try!(write!(w,
"<h2 id='{id}' class='section-header'>\
<a href=\"#{id}\">{name}</a></h2>\n<table>",
id = short, name = name));
}
match myitem.inner {
clean::StaticItem(ref s) | clean::ForeignStaticItem(ref s) => {
struct Initializer<'a>(&'a str, Item<'a>);
impl<'a> fmt::Show for Initializer<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Initializer(s, item) = *self;
if s.len() == 0 { return Ok(()); }
try!(write!(f, "<code> = </code>"));
if s.contains("\n") {
match item.href() {
Some(url) => {
write!(f, "<a href='{}'>[definition]</a>",
url)
}
None => Ok(()),
}
} else {
write!(f, "<code>{}</code>", s.as_slice())
}
}
}
try!(write!(w, "
<tr>
<td>{}<code>{}static {}{}: {}</code>{}</td>
<td class='docblock'>{} </td>
</tr>
",
ConciseStability(&myitem.stability),
VisSpace(myitem.visibility),
MutableSpace(s.mutability),
*myitem.name.get_ref(),
s.type_,
Initializer(s.expr.as_slice(), Item { cx: cx, item: myitem }),
Markdown(blank(myitem.doc_value()))));
}
clean::ViewItemItem(ref item) => {
match item.inner {
clean::ExternCrate(ref name, ref src, _) => {
try!(write!(w, "<tr><td><code>extern crate {}",
name.as_slice()));
match *src {
Some(ref src) => try!(write!(w, " = \"{}\"",
src.as_slice())),
None => {}
}
try!(write!(w, ";</code></td></tr>"));
}
clean::Import(ref import) => {
try!(write!(w, "<tr><td><code>{}{}</code></td></tr>",
VisSpace(myitem.visibility),
*import));
}
}
}
_ => {
if myitem.name.is_none() { continue }
try!(write!(w, "
<tr>
<td>{stab}<a class='{class}' href='{href}'
title='{title}'>{}</a></td>
<td class='docblock short'>{}</td>
</tr>
",
*myitem.name.get_ref(),
Markdown(shorter(myitem.doc_value())),
class = shortty(myitem),
href = item_path(myitem),
title = full_path(cx, myitem),
stab = ConciseStability(&myitem.stability)));
}
}
}
write!(w, "</table>")
}
fn item_function(w: &mut fmt::Formatter, it: &clean::Item,
f: &clean::Function) -> fmt::Result {
try!(write!(w, "<pre class='rust fn'>{vis}{fn_style}fn \
{name}{generics}{decl}</pre>",
vis = VisSpace(it.visibility),
fn_style = FnStyleSpace(f.fn_style),
name = it.name.get_ref().as_slice(),
generics = f.generics,
decl = f.decl));
document(w, it)
}
fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
t: &clean::Trait) -> fmt::Result {
let mut parents = String::new();
if t.parents.len() > 0 {
parents.push_str(": ");
for (i, p) in t.parents.iter().enumerate() {
if i > 0 { parents.push_str(" + "); }
parents.push_str(format!("{}", *p).as_slice());
}
}
// Output the trait definition
try!(write!(w, "<pre class='rust trait'>{}trait {}{}{} ",
VisSpace(it.visibility),
it.name.get_ref().as_slice(),
t.generics,
parents));
let required = t.methods.iter().filter(|m| m.is_req()).collect::<Vec<&clean::TraitMethod>>();
let provided = t.methods.iter().filter(|m| !m.is_req()).collect::<Vec<&clean::TraitMethod>>();
if t.methods.len() == 0 {
try!(write!(w, "{{ }}"));
} else {
try!(write!(w, "{{\n"));
for m in required.iter() {
try!(write!(w, " "));
try!(render_method(w, m.item()));
try!(write!(w, ";\n"));
}
if required.len() > 0 && provided.len() > 0 {
try!(w.write("\n".as_bytes()));
}
for m in provided.iter() {
try!(write!(w, " "));
try!(render_method(w, m.item()));
try!(write!(w, " {{ ... }}\n"));
}
try!(write!(w, "}}"));
}
try!(write!(w, "</pre>"));
// Trait documentation
try!(document(w, it));
fn meth(w: &mut fmt::Formatter, m: &clean::TraitMethod) -> fmt::Result {
try!(write!(w, "<h3 id='{}.{}' class='method'>{}<code>",
shortty(m.item()),
*m.item().name.get_ref(),
ConciseStability(&m.item().stability)));
try!(render_method(w, m.item()));
try!(write!(w, "</code></h3>"));
try!(document(w, m.item()));
Ok(())
}
// Output the documentation for each function individually
if required.len() > 0 {
try!(write!(w, "
<h2 id='required-methods'>Required Methods</h2>
<div class='methods'>
"));
for m in required.iter() {
try!(meth(w, *m));
}
try!(write!(w, "</div>"));
}
if provided.len() > 0 {
try!(write!(w, "
<h2 id='provided-methods'>Provided Methods</h2>
<div class='methods'>
"));
for m in provided.iter() {
try!(meth(w, *m));
}
try!(write!(w, "</div>"));
}
let cache = cache_key.get().unwrap();
try!(write!(w, "
<h2 id='implementors'>Implementors</h2>
<ul class='item-list' id='implementors-list'>
"));
match cache.implementors.find(&it.def_id) {
Some(implementors) => {
for i in implementors.iter() {
try!(writeln!(w, "<li>{}<code>impl{} {} for {}</code></li>",
ConciseStability(&i.stability),
i.generics, i.trait_, i.for_));
}
}
None => {}
}
try!(write!(w, "</ul>"));
try!(write!(w, r#"<script type="text/javascript" async
src="{root_path}/implementors/{path}/{ty}.{name}.js">
</script>"#,
root_path = Vec::from_elem(cx.current.len(), "..").connect("/"),
path = if ast_util::is_local(it.def_id) {
cx.current.connect("/")
} else {
let path = cache.external_paths.get(&it.def_id);
path.slice_to(path.len() - 1).connect("/")
},
ty = shortty(it).to_static_str(),
name = *it.name.get_ref()));
Ok(())
}
fn render_method(w: &mut fmt::Formatter, meth: &clean::Item) -> fmt::Result {
fn fun(w: &mut fmt::Formatter, it: &clean::Item, fn_style: ast::FnStyle,
g: &clean::Generics, selfty: &clean::SelfTy,
d: &clean::FnDecl) -> fmt::Result {
write!(w, "{}fn <a href='#{ty}.{name}' class='fnname'>{name}</a>\
{generics}{decl}",
match fn_style {
ast::UnsafeFn => "unsafe ",
_ => "",
},
ty = shortty(it),
name = it.name.get_ref().as_slice(),
generics = *g,
decl = Method(selfty, d))
}
match meth.inner {
clean::TyMethodItem(ref m) => {
fun(w, meth, m.fn_style, &m.generics, &m.self_, &m.decl)
}
clean::MethodItem(ref m) => {
fun(w, meth, m.fn_style, &m.generics, &m.self_, &m.decl)
}
_ => unreachable!()
}
}
fn item_struct(w: &mut fmt::Formatter, it: &clean::Item,
s: &clean::Struct) -> fmt::Result {
try!(write!(w, "<pre class='rust struct'>"));
try!(render_struct(w,
it,
Some(&s.generics),
s.struct_type,
s.fields.as_slice(),
"",
true));
try!(write!(w, "</pre>"));
try!(document(w, it));
let mut fields = s.fields.iter().filter(|f| {
match f.inner {
clean::StructFieldItem(clean::HiddenStructField) => false,
clean::StructFieldItem(clean::TypedStructField(..)) => true,
_ => false,
}
}).peekable();
match s.struct_type {
doctree::Plain if fields.peek().is_some() => {
try!(write!(w, "<h2 class='fields'>Fields</h2>\n<table>"));
for field in fields {
try!(write!(w, "<tr><td id='structfield.{name}'>\
{stab}<code>{name}</code></td><td>",
stab = ConciseStability(&field.stability),
name = field.name.get_ref().as_slice()));
try!(document(w, field));
try!(write!(w, "</td></tr>"));
}
try!(write!(w, "</table>"));
}
_ => {}
}
render_methods(w, it)
}
fn item_enum(w: &mut fmt::Formatter, it: &clean::Item,
e: &clean::Enum) -> fmt::Result {
try!(write!(w, "<pre class='rust enum'>{}enum {}{}",
VisSpace(it.visibility),
it.name.get_ref().as_slice(),
e.generics));
if e.variants.len() == 0 && !e.variants_stripped {
try!(write!(w, " {{}}"));
} else {
try!(write!(w, " {{\n"));
for v in e.variants.iter() {
try!(write!(w, " "));
let name = v.name.get_ref().as_slice();
match v.inner {
clean::VariantItem(ref var) => {
match var.kind {
clean::CLikeVariant => try!(write!(w, "{}", name)),
clean::TupleVariant(ref tys) => {
try!(write!(w, "{}(", name));
for (i, ty) in tys.iter().enumerate() {
if i > 0 {
try!(write!(w, ", "))
}
try!(write!(w, "{}", *ty));
}
try!(write!(w, ")"));
}
clean::StructVariant(ref s) => {
try!(render_struct(w,
v,
None,
s.struct_type,
s.fields.as_slice(),
" ",
false));
}
}
}
_ => unreachable!()
}
try!(write!(w, ",\n"));
}
if e.variants_stripped {
try!(write!(w, " // some variants omitted\n"));
}
try!(write!(w, "}}"));
}
try!(write!(w, "</pre>"));
try!(document(w, it));
if e.variants.len() > 0 {
try!(write!(w, "<h2 class='variants'>Variants</h2>\n<table>"));
for variant in e.variants.iter() {
try!(write!(w, "<tr><td id='variant.{name}'>{stab}<code>{name}</code></td><td>",
stab = ConciseStability(&variant.stability),
name = variant.name.get_ref().as_slice()));
try!(document(w, variant));
match variant.inner {
clean::VariantItem(ref var) => {
match var.kind {
clean::StructVariant(ref s) => {
let mut fields = s.fields.iter().filter(|f| {
match f.inner {
clean::StructFieldItem(ref t) => match *t {
clean::HiddenStructField => false,
clean::TypedStructField(..) => true,
},
_ => false,
}
});
try!(write!(w, "<h3 class='fields'>Fields</h3>\n
<table>"));
for field in fields {
try!(write!(w, "<tr><td \
id='variant.{v}.field.{f}'>\
<code>{f}</code></td><td>",
v = variant.name.get_ref().as_slice(),
f = field.name.get_ref().as_slice()));
try!(document(w, field));
try!(write!(w, "</td></tr>"));
}
try!(write!(w, "</table>"));
}
_ => ()
}
}
_ => ()
}
try!(write!(w, "</td></tr>"));
}
try!(write!(w, "</table>"));
}
try!(render_methods(w, it));
Ok(())
}
fn render_struct(w: &mut fmt::Formatter, it: &clean::Item,
g: Option<&clean::Generics>,
ty: doctree::StructType,
fields: &[clean::Item],
tab: &str,
structhead: bool) -> fmt::Result {
try!(write!(w, "{}{}{}",
VisSpace(it.visibility),
if structhead {"struct "} else {""},
it.name.get_ref().as_slice()));
match g {
Some(g) => try!(write!(w, "{}", *g)),
None => {}
}
match ty {
doctree::Plain => {
try!(write!(w, " {{\n{}", tab));
let mut fields_stripped = false;
for field in fields.iter() {
match field.inner {
clean::StructFieldItem(clean::HiddenStructField) => {
fields_stripped = true;
}
clean::StructFieldItem(clean::TypedStructField(ref ty)) => {
try!(write!(w, " {}{}: {},\n{}",
VisSpace(field.visibility),
field.name.get_ref().as_slice(),
*ty,
tab));
}
_ => unreachable!(),
};
}
if fields_stripped {
try!(write!(w, " // some fields omitted\n{}", tab));
}
try!(write!(w, "}}"));
}
doctree::Tuple | doctree::Newtype => {
try!(write!(w, "("));
for (i, field) in fields.iter().enumerate() {
if i > 0 {
try!(write!(w, ", "));
}
match field.inner {
clean::StructFieldItem(clean::HiddenStructField) => {
try!(write!(w, "_"))
}
clean::StructFieldItem(clean::TypedStructField(ref ty)) => {
try!(write!(w, "{}{}", VisSpace(field.visibility), *ty))
}
_ => unreachable!()
}
}
try!(write!(w, ");"));
}
doctree::Unit => {
try!(write!(w, ";"));
}
}
Ok(())
}
fn render_methods(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result {
match cache_key.get().unwrap().impls.find(&it.def_id) {
Some(v) => {
let (non_trait, traits) = v.partitioned(|i| i.impl_.trait_.is_none());
if non_trait.len() > 0 {
try!(write!(w, "<h2 id='methods'>Methods</h2>"));
for i in non_trait.iter() {
try!(render_impl(w, i));
}
}
if traits.len() > 0 {
try!(write!(w, "<h2 id='implementations'>Trait \
Implementations</h2>"));
let (derived, manual) = traits.partition(|i| i.impl_.derived);
for i in manual.iter() {
try!(render_impl(w, i));
}
if derived.len() > 0 {
try!(write!(w, "<h3 id='derived_implementations'>Derived Implementations \
</h3>"));
for i in derived.iter() {
try!(render_impl(w, i));
}
}
}
}
None => {}
}
Ok(())
}
fn render_impl(w: &mut fmt::Formatter, i: &Impl) -> fmt::Result {
try!(write!(w, "<h3 class='impl'>{}<code>impl{} ",
ConciseStability(&i.stability),
i.impl_.generics));
match i.impl_.trait_ {
Some(ref ty) => try!(write!(w, "{} for ", *ty)),
None => {}
}
try!(write!(w, "{}</code></h3>", i.impl_.for_));
match i.dox {
Some(ref dox) => {
try!(write!(w, "<div class='docblock'>{}</div>",
Markdown(dox.as_slice())));
}
None => {}
}
fn docmeth(w: &mut fmt::Formatter, item: &clean::Item,
dox: bool) -> fmt::Result {
try!(write!(w, "<h4 id='method.{}' class='method'>{}<code>",
*item.name.get_ref(),
ConciseStability(&item.stability)));
try!(render_method(w, item));
try!(write!(w, "</code></h4>\n"));
match item.doc_value() {
Some(s) if dox => {
try!(write!(w, "<div class='docblock'>{}</div>", Markdown(s)));
Ok(())
}
Some(..) | None => Ok(())
}
}
try!(write!(w, "<div class='impl-methods'>"));
for meth in i.impl_.methods.iter() {
try!(docmeth(w, meth, true));
}
fn render_default_methods(w: &mut fmt::Formatter,
t: &clean::Trait,
i: &clean::Impl) -> fmt::Result {
for method in t.methods.iter() {
let n = method.item().name.clone();
match i.methods.iter().find(|m| { m.name == n }) {
Some(..) => continue,
None => {}
}
try!(docmeth(w, method.item(), false));
}
Ok(())
}
// If we've implemented a trait, then also emit documentation for all
// default methods which weren't overridden in the implementation block.
match i.impl_.trait_ {
Some(clean::ResolvedPath { did, .. }) => {
try!({
match cache_key.get().unwrap().traits.find(&did) {
Some(t) => try!(render_default_methods(w, t, &i.impl_)),
None => {}
}
Ok(())
})
}
Some(..) | None => {}
}
try!(write!(w, "</div>"));
Ok(())
}
fn item_typedef(w: &mut fmt::Formatter, it: &clean::Item,
t: &clean::Typedef) -> fmt::Result {
try!(write!(w, "<pre class='rust typedef'>type {}{} = {};</pre>",
it.name.get_ref().as_slice(),
t.generics,
t.type_));
document(w, it)
}
impl<'a> fmt::Show for Sidebar<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let cx = self.cx;
let it = self.item;
try!(write!(fmt, "<p class='location'>"));
let len = cx.current.len() - if it.is_mod() {1} else {0};
for (i, name) in cx.current.iter().take(len).enumerate() {
if i > 0 {
try!(write!(fmt, "​::"));
}
try!(write!(fmt, "<a href='{}index.html'>{}</a>",
cx.root_path
.as_slice()
.slice_to((cx.current.len() - i - 1) * 3),
*name));
}
try!(write!(fmt, "</p>"));
fn block(w: &mut fmt::Formatter, short: &str, longty: &str,
cur: &clean::Item, cx: &Context) -> fmt::Result {
let items = match cx.sidebar.find_equiv(&short) {
Some(items) => items.as_slice(),
None => return Ok(())
};
try!(write!(w, "<div class='block {}'><h2>{}</h2>", short, longty));
for item in items.iter() {
let curty = shortty(cur).to_static_str();
let class = if cur.name.get_ref() == item &&
short == curty { "current" } else { "" };
try!(write!(w, "<a class='{ty} {class}' href='{href}{path}'>\
{name}</a>",
ty = short,
class = class,
href = if curty == "mod" {"../"} else {""},
path = if short == "mod" {
format!("{}/index.html", item.as_slice())
} else {
format!("{}.{}.html", short, item.as_slice())
},
name = item.as_slice()));
}
try!(write!(w, "</div>"));
Ok(())
}
try!(block(fmt, "mod", "Modules", it, cx));
try!(block(fmt, "struct", "Structs", it, cx));
try!(block(fmt, "enum", "Enums", it, cx));
try!(block(fmt, "trait", "Traits", it, cx));
try!(block(fmt, "fn", "Functions", it, cx));
try!(block(fmt, "macro", "Macros", it, cx));
Ok(())
}
}
fn build_sidebar(m: &clean::Module) -> HashMap<String, Vec<String>> {
let mut map = HashMap::new();
for item in m.items.iter() {
if ignore_private_item(item) { continue }
let short = shortty(item).to_static_str();
let myname = match item.name {
None => continue,
Some(ref s) => s.to_string(),
};
let v = map.find_or_insert_with(short.to_string(), |_| Vec::new());
v.push(myname);
}
for (_, items) in map.mut_iter() {
items.as_mut_slice().sort();
}
return map;
}
impl<'a> fmt::Show for Source<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let Source(s) = *self;
let lines = s.lines().count();
let mut cols = 0;
let mut tmp = lines;
while tmp > 0 {
cols += 1;
tmp /= 10;
}
try!(write!(fmt, "<pre class='line-numbers'>"));
for i in range(1, lines + 1) {
try!(write!(fmt, "<span id='{0:u}'>{0:1$u}</span>\n", i, cols));
}
try!(write!(fmt, "</pre>"));
try!(write!(fmt, "{}", highlight::highlight(s.as_slice(), None, None)));
Ok(())
}
}
fn item_macro(w: &mut fmt::Formatter, it: &clean::Item,
t: &clean::Macro) -> fmt::Result {
try!(w.write(highlight::highlight(t.source.as_slice(), Some("macro"),
None).as_bytes()));
document(w, it)
}
fn item_primitive(w: &mut fmt::Formatter,
it: &clean::Item,
_p: &clean::Primitive) -> fmt::Result {
try!(document(w, it));
render_methods(w, it)
}
fn ignore_private_item(it: &clean::Item) -> bool {
match it.inner {
clean::ModuleItem(ref m) => {
(m.items.len() == 0 && it.doc_value().is_none()) ||
it.visibility != Some(ast::Public)
}
clean::PrimitiveItem(..) => it.visibility != Some(ast::Public),
_ => false,
}
}
| 39.595796 | 98 | 0.477556 |
1dcc1f081e444abd458861ce42d11c55d8de75d3
| 868 |
use std::collections::HashSet;
use std::iter::FromIterator;
impl Solution {
pub fn missing_number(nums: Vec<i32>) -> i32 {
// start from 0, keep adding 1, take num from array
let mut n = 0;
let mut i = 0;
// ratt lo formula wali vibes
// this works on paper but how was it derived?
// https://leetcode.com/problems/missing-number/discuss/69791/4-Line-Simple-Java-Bit-Manipulate-Solution-with-Explaination
while i < nums.len() {
// n ^= nums[i] ^ (i as i32 + 1);
n = n ^ nums[i] ^ i as i32;
i += 1;
}
// important final xor
n ^ i as i32
// let mut set: HashSet<i32> = HashSet::from_iter(nums.iter().cloned());
// let mut n = 0;
// while !set.is_empty() {
// match set.take(&n) {
// Some(val) => n += 1,
// None => return n as i32
// }
// }
// return nums.len() as i32
}
}
| 24.111111 | 124 | 0.569124 |
f58df16dfd713bb97563f34cd64dad57d1c11a81
| 1,907 |
// Copyright 2019 The Starlark in Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Define the bool type for Starlark.
use crate::values::error::ValueError;
use crate::values::frozen::FrozenOnCreation;
use crate::values::*;
use std::cmp::Ordering;
use std::fmt;
use std::iter;
impl From<bool> for Value {
fn from(b: bool) -> Self {
Value::new(b)
}
}
/// Define the bool type
impl TypedValue for bool {
type Holder = Immutable<Self>;
const TYPE: &'static str = "bool";
const INLINE: bool = true;
fn new_value(self) -> Value {
Value(ValueInner::Bool(self))
}
fn to_repr_impl(&self, buf: &mut String) -> fmt::Result {
write!(buf, "{}", if *self { "True" } else { "False" })
}
fn to_int(&self) -> Result<i64, ValueError> {
Ok(if *self { 1 } else { 0 })
}
fn to_bool(&self) -> bool {
*self
}
fn get_hash(&self) -> Result<u64, ValueError> {
Ok(self.to_int().unwrap() as u64)
}
fn values_for_descendant_check_and_freeze<'a>(
&'a self,
) -> Box<dyn Iterator<Item = Value> + 'a> {
Box::new(iter::empty())
}
fn equals(&self, other: &bool) -> Result<bool, ValueError> {
Ok(self == other)
}
fn compare(&self, other: &bool) -> Result<Ordering, ValueError> {
Ok(self.cmp(other))
}
}
impl FrozenOnCreation for bool {}
| 27.637681 | 75 | 0.628212 |
117b78c18adc652095d78c02194acae0937fe8fa
| 4,330 |
extern crate uuid;
use std::fmt;
use std::fs::File;
use std::io;
use std::io::Write;
use chrono::{DateTime, Utc};
use serde::de::{Error, Unexpected};
use serde::{Deserialize, Deserializer, Serialize};
use crate::category::Category;
use crate::episode_info::EpisodeInfo;
use self::uuid::Uuid;
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Torrent {
title: Option<String>,
filename: Option<String>,
category: Category,
download: String,
seeders: Option<u32>,
leechers: Option<u32>,
size: Option<u128>,
pubdate: Option<DateTime<Utc>>,
episode_info: Option<EpisodeInfo>,
#[serde(default, deserialize_with = "bool_from_int")]
ranked: Option<bool>,
info_page: Option<String>,
}
impl Torrent {
/// Return the title.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn title(&self) -> Option<&String> {
self.title.as_ref()
}
/// Return the filename
///
/// Only available when `format` is set to `Format::Json`.
pub fn filename(&self) -> Option<&String> {
self.filename.as_ref()
}
/// Return the category that the torrent belongs to.
pub fn category(&self) -> &Category {
&self.category
}
/// Return a magnet link.
pub fn download(&self) -> &str {
self.download.as_str()
}
/// Return the number of seeders available.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn seeders(&self) -> Option<&u32> {
self.seeders.as_ref()
}
/// Return the number of leechers.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn leechers(&self) -> Option<&u32> {
self.leechers.as_ref()
}
/// Return the size in bytes.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn size(&self) -> Option<&u128> {
self.size.as_ref()
}
/// Return the publication date.
///
/// DateTime is always synchronize with UTC.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn pub_date(&self) -> Option<&DateTime<Utc>> {
self.pubdate.as_ref()
}
/// Return the episode info.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn episode_info(&self) -> Option<&EpisodeInfo> {
self.episode_info.as_ref()
}
/// Return true if it's a scene, rarbg or rartv releases, otherwise false.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn ranked(&self) -> Option<&bool> {
self.ranked.as_ref()
}
/// Return an HTTP link that redirect to the torrent page.
///
/// Only available when `format` is set to `Format::JsonExtended`.
pub fn info_page(&self) -> Option<&String> {
self.info_page.as_ref()
}
/// Export the torrent to a magnet file using its title, filename or UUID as filename.
///
/// # Arguments
///
/// * `path` - A string slice that holds a path to a **folder**
///
pub fn export(&self, path: &str) -> Result<String, io::Error> {
let filename = match self.title() {
Some(title) => title.clone(),
None => match self.filename() {
Some(filename) => filename.clone(),
None => Uuid::new_v4().to_string(),
},
};
let filepath = format!("{}/{}.magnet", path, filename);
let file = File::create(&filepath);
if let Err(error) = file {
return Err(error);
}
match file.unwrap().write_all(self.download.as_bytes()) {
Ok(_) => Ok(filepath),
Err(reason) => Err(reason),
}
}
}
impl fmt::Display for Torrent {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
// https://github.com/serde-rs/serde/issues/1344#issuecomment-410309140
fn bool_from_int<'de, D>(deserializer: D) -> Result<Option<bool>, D::Error>
where
D: Deserializer<'de>,
{
match u8::deserialize(deserializer)? {
0 => Ok(Some(false)),
1 => Ok(Some(true)),
other => Err(Error::invalid_value(
Unexpected::Unsigned(other as u64),
&"zero or one",
)),
}
}
| 27.935484 | 90 | 0.582448 |
14641bea5d331517953dca4a10dcd2a682db6ee0
| 326 |
use crate::Parameters;
/// Specifies the constraints for computing a pairing in the MNT6-298 bilinear group.
pub type PairingVar = ark_r1cs_std::pairing::mnt6::PairingVar<Parameters>;
#[test]
fn test() {
use crate::MNT6_298;
ark_curve_constraint_tests::pairing::bilinearity_test::<MNT6_298, PairingVar>().unwrap()
}
| 29.636364 | 92 | 0.751534 |
185455a45f9b048484df23bd5d85a6a86f70cc2e
| 446 |
use alloc::string::String;
use ffxiv_parser::ExRow;
use crate::{NamedExRow, WrappedExRow};
pub struct CraftAction<'a> {
raw: ExRow<'a>,
}
impl<'a> NamedExRow<'a> for CraftAction<'a> {
fn name(&self) -> String {
self.raw.string(0).decode()
}
}
impl<'a> WrappedExRow<'a> for CraftAction<'a> {
fn new(raw: ExRow<'a>) -> Self {
Self { raw }
}
fn ex_name() -> &'static str {
"craftaction"
}
}
| 17.153846 | 47 | 0.569507 |
484ea510470480622034346e6e4e6355dc6f1117
| 7,514 |
use disco_rs::{
error::{Error, ParamError},
params::*,
};
use disco_ecdh_example::key::soft::AsymKeyType;
use std::str::FromStr;
#[test]
fn test_protocol_string() {
let p: Protocol = "Noise".parse().unwrap();
assert_eq!(format!("{}", p), "Noise");
}
#[test]
#[should_panic]
fn test_protocol_failure() {
let _: Protocol = "Disco".parse().unwrap();
}
#[test]
fn test_handshake_string() {
let xx: Handshake = "XX".parse().unwrap();
assert_eq!(format!("{}", xx), "XX".to_string());
let xk1: Handshake = "XK1".parse().unwrap();
assert_eq!(format!("{}", xk1), "XK1".to_string());
let kk1: Handshake = "KK1".parse().unwrap();
assert_eq!(format!("{}", kk1), "KK1".to_string());
}
#[test]
#[should_panic]
fn test_handshake_failure() {
let _: Handshake = "KN".parse().unwrap();
}
#[test]
fn test_key_exchange_string() {
let x25519: AsymKeyType = "25519".parse().unwrap();
assert_eq!(format!("{}", x25519), "25519".to_string());
let p256: AsymKeyType = "P256".parse().unwrap();
assert_eq!(format!("{}", p256), "P256".to_string());
}
#[test]
#[should_panic]
fn test_key_exchange_failure() {
let _: AsymKeyType = "k256".parse().unwrap();
}
#[test]
fn test_version_string() {
let v: StrobeVersion = "STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", v), "STROBEv1.0.2".to_string());
}
#[test]
#[should_panic]
fn test_version_failure() {
let _: StrobeVersion = "FOOBAR".parse().unwrap();
}
#[test]
fn test_params_string() {
// N
let d: Params<AsymKeyType> = "Noise_N_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_N_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_N_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_N_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_N_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_N_K256_STROBEv1.0.2".to_string());
// K
let d: Params<AsymKeyType> = "Noise_K_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_K_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_K_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_K_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_K_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_K_K256_STROBEv1.0.2".to_string());
// X
let d: Params<AsymKeyType> = "Noise_X_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_X_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_X_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_X_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_X_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_X_K256_STROBEv1.0.2".to_string());
// NN
let d: Params<AsymKeyType> = "Noise_NN_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NN_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NN_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NN_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NN_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NN_K256_STROBEv1.0.2".to_string());
// KK
let d: Params<AsymKeyType> = "Noise_KK_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_KK_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_KK_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_KK_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_KK_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_KK_K256_STROBEv1.0.2".to_string());
// XX
let d: Params<AsymKeyType> = "Noise_XX_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_XX_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_XX_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_XX_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_XX_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_XX_K256_STROBEv1.0.2".to_string());
// IK
let d: Params<AsymKeyType> = "Noise_IK_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_IK_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_IK_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_IK_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_IK_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_IK_K256_STROBEv1.0.2".to_string());
// NK
let d: Params<AsymKeyType> = "Noise_NK_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NK_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NK_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NK_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NK_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NK_K256_STROBEv1.0.2".to_string());
// NX
let d: Params<AsymKeyType> = "Noise_NX_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NX_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NX_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NX_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NX_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NX_K256_STROBEv1.0.2".to_string());
// XK1
let d: Params<AsymKeyType> = "Noise_XK1_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_XK1_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_XK1_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_XK1_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_XK1_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_XK1_K256_STROBEv1.0.2".to_string());
// KK1
let d: Params<AsymKeyType> = "Noise_KK1_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_KK1_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_KK1_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_KK1_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_KK1_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_KK1_K256_STROBEv1.0.2".to_string());
// NNpsk2
let d: Params<AsymKeyType> = "Noise_NNpsk2_25519_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NNpsk2_25519_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NNpsk2_P256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NNpsk2_P256_STROBEv1.0.2".to_string());
let d: Params<AsymKeyType> = "Noise_NNpsk2_K256_STROBEv1.0.2".parse().unwrap();
assert_eq!(format!("{}", d), "Noise_NNpsk2_K256_STROBEv1.0.2".to_string());
}
#[test]
fn test_params_failures() {
let p = Params::<AsymKeyType>::from_str("Disco_XX_25519_STROBEv1.0.2");
assert!(p.is_err());
assert_eq!(p.err(), Some(Error::Param(ParamError::InvalidProtocol)));
}
| 44.72619 | 84 | 0.655709 |
f5cfde9c366af4ddcb759741379cafaa6a908b75
| 3,414 |
use core::fmt;
use core::pin::Pin;
use futures_core::future::Future;
use futures_core::ready;
use futures_core::stream::{FusedStream, Stream};
use futures_core::task::{Context, Poll};
#[cfg(feature = "sink")]
use futures_sink::Sink;
use pin_project_lite::pin_project;
struct StateFn<S, F> {
state: S,
f: F,
}
pin_project! {
/// Stream for the [`scan`](super::StreamExt::scan) method.
#[must_use = "streams do nothing unless polled"]
pub struct Scan<St: Stream, S, Fut, F> {
#[pin]
stream: St,
state_f: Option<StateFn<S, F>>,
#[pin]
future: Option<Fut>,
}
}
impl<St, S, Fut, F> fmt::Debug for Scan<St, S, Fut, F>
where
St: Stream + fmt::Debug,
St::Item: fmt::Debug,
S: fmt::Debug,
Fut: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Scan")
.field("stream", &self.stream)
.field("state", &self.state_f.as_ref().map(|s| &s.state))
.field("future", &self.future)
.field("done_taking", &self.is_done_taking())
.finish()
}
}
impl<St: Stream, S, Fut, F> Scan<St, S, Fut, F> {
/// Checks if internal state is `None`.
fn is_done_taking(&self) -> bool {
self.state_f.is_none()
}
}
impl<B, St, S, Fut, F> Scan<St, S, Fut, F>
where
St: Stream,
F: FnMut(&mut S, St::Item) -> Fut,
Fut: Future<Output = Option<B>>,
{
pub(super) fn new(stream: St, initial_state: S, f: F) -> Self {
Self { stream, state_f: Some(StateFn { state: initial_state, f }), future: None }
}
delegate_access_inner!(stream, St, ());
}
impl<B, St, S, Fut, F> Stream for Scan<St, S, Fut, F>
where
St: Stream,
F: FnMut(&mut S, St::Item) -> Fut,
Fut: Future<Output = Option<B>>,
{
type Item = B;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<B>> {
if self.is_done_taking() {
return Poll::Ready(None);
}
let mut this = self.project();
Poll::Ready(loop {
if let Some(fut) = this.future.as_mut().as_pin_mut() {
let item = ready!(fut.poll(cx));
this.future.set(None);
if item.is_none() {
*this.state_f = None;
}
break item;
} else if let Some(item) = ready!(this.stream.as_mut().poll_next(cx)) {
let state_f = this.state_f.as_mut().unwrap();
this.future.set(Some((state_f.f)(&mut state_f.state, item)))
} else {
break None;
}
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.is_done_taking() {
(0, Some(0))
} else {
self.stream.size_hint() // can't know a lower bound, due to the predicate
}
}
}
impl<B, St, S, Fut, F> FusedStream for Scan<St, S, Fut, F>
where
St: FusedStream,
F: FnMut(&mut S, St::Item) -> Fut,
Fut: Future<Output = Option<B>>,
{
fn is_terminated(&self) -> bool {
self.is_done_taking() || self.future.is_none() && self.stream.is_terminated()
}
}
// Forwarding impl of Sink from the underlying stream
#[cfg(feature = "sink")]
impl<St, S, Fut, F, Item> Sink<Item> for Scan<St, S, Fut, F>
where
St: Stream + Sink<Item>,
{
type Error = St::Error;
delegate_sink!(stream, Item);
}
| 26.465116 | 89 | 0.545987 |
2990114f22f2e5db2948e92a8b36ea945fc2c596
| 579 |
use NetMsg;
use td_rlua::{self, Lua, LuaPush};
use {NetResult};
pub trait EngineProtocol: Sized {
fn pack_protocol(lua: *mut td_rlua::lua_State, index: i32) -> Option<NetMsg>;
fn unpack_protocol(lua: *mut td_rlua::lua_State, net_msg: &mut NetMsg) -> NetResult<i32>;
fn convert_string(lua: *mut td_rlua::lua_State, net_msg: &mut NetMsg) -> NetResult<String>;
}
mod proto_td;
mod proto_json;
mod proto_bin;
mod proto_text;
pub use self::proto_td::ProtoTd;
pub use self::proto_json::ProtoJson;
pub use self::proto_bin::ProtoBin;
pub use self::proto_text::ProtoText;
| 27.571429 | 95 | 0.732297 |
2fb05436555a19e063e1b9455c95b2845d8d5191
| 7,549 |
// WARNING: This file was autogenerated by jni-bindgen. Any changes to this file may be lost!!!
#[cfg(any(feature = "all", feature = "android-hardware-camera2-CameraAccessException"))]
__jni_bindgen! {
/// public class [CameraAccessException](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html)
///
/// Required feature: android-hardware-camera2-CameraAccessException
public class CameraAccessException ("android/hardware/camera2/CameraAccessException") extends crate::android::util::AndroidException {
/// [CameraAccessException](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CameraAccessException(int))
pub fn new_int<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i32) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::hardware::camera2::CameraAccessException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/hardware/camera2/CameraAccessException", java.flags == PUBLIC, .name == "<init>", .descriptor == "(I)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0)];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/hardware/camera2/CameraAccessException\0", "<init>\0", "(I)V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [CameraAccessException](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CameraAccessException(int,%20java.lang.String))
///
/// Required features: "java-lang-String"
#[cfg(any(feature = "all", all(feature = "java-lang-String")))]
pub fn new_int_String<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i32, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::hardware::camera2::CameraAccessException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/hardware/camera2/CameraAccessException", java.flags == PUBLIC, .name == "<init>", .descriptor == "(ILjava/lang/String;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/hardware/camera2/CameraAccessException\0", "<init>\0", "(ILjava/lang/String;)V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [CameraAccessException](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CameraAccessException(int,%20java.lang.String,%20java.lang.Throwable))
///
/// Required features: "java-lang-String", "java-lang-Throwable"
#[cfg(any(feature = "all", all(feature = "java-lang-String", feature = "java-lang-Throwable")))]
pub fn new_int_String_Throwable<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i32, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::String>>, arg2: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::Throwable>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::hardware::camera2::CameraAccessException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/hardware/camera2/CameraAccessException", java.flags == PUBLIC, .name == "<init>", .descriptor == "(ILjava/lang/String;Ljava/lang/Throwable;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0), __jni_bindgen::AsJValue::as_jvalue(&arg1.into()), __jni_bindgen::AsJValue::as_jvalue(&arg2.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/hardware/camera2/CameraAccessException\0", "<init>\0", "(ILjava/lang/String;Ljava/lang/Throwable;)V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [CameraAccessException](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CameraAccessException(int,%20java.lang.Throwable))
///
/// Required features: "java-lang-Throwable"
#[cfg(any(feature = "all", all(feature = "java-lang-Throwable")))]
pub fn new_int_Throwable<'env>(__jni_env: &'env __jni_bindgen::Env, arg0: i32, arg1: impl __jni_bindgen::std::convert::Into<__jni_bindgen::std::option::Option<&'env crate::java::lang::Throwable>>) -> __jni_bindgen::std::result::Result<__jni_bindgen::Local<'env, crate::android::hardware::camera2::CameraAccessException>, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/hardware/camera2/CameraAccessException", java.flags == PUBLIC, .name == "<init>", .descriptor == "(ILjava/lang/Throwable;)V"
unsafe {
let __jni_args = [__jni_bindgen::AsJValue::as_jvalue(&arg0), __jni_bindgen::AsJValue::as_jvalue(&arg1.into())];
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/hardware/camera2/CameraAccessException\0", "<init>\0", "(ILjava/lang/Throwable;)V\0");
__jni_env.new_object_a(__jni_class, __jni_method, __jni_args.as_ptr())
}
}
/// [getReason](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#getReason())
pub fn getReason<'env>(&'env self) -> __jni_bindgen::std::result::Result<i32, __jni_bindgen::Local<'env, crate::java::lang::Throwable>> {
// class.path == "android/hardware/camera2/CameraAccessException", java.flags == PUBLIC | FINAL, .name == "getReason", .descriptor == "()I"
unsafe {
let __jni_args = [];
let __jni_env = __jni_bindgen::Env::from_ptr(self.0.env);
let (__jni_class, __jni_method) = __jni_env.require_class_method("android/hardware/camera2/CameraAccessException\0", "getReason\0", "()I\0");
__jni_env.call_int_method_a(self.0.object, __jni_method, __jni_args.as_ptr())
}
}
/// public static final [CAMERA_DISABLED](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CAMERA_DISABLED)
pub const CAMERA_DISABLED : i32 = 1;
/// public static final [CAMERA_DISCONNECTED](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CAMERA_DISCONNECTED)
pub const CAMERA_DISCONNECTED : i32 = 2;
/// public static final [CAMERA_ERROR](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CAMERA_ERROR)
pub const CAMERA_ERROR : i32 = 3;
/// public static final [CAMERA_IN_USE](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#CAMERA_IN_USE)
pub const CAMERA_IN_USE : i32 = 4;
/// public static final [MAX_CAMERAS_IN_USE](https://developer.android.com/reference/android/hardware/camera2/CameraAccessException.html#MAX_CAMERAS_IN_USE)
pub const MAX_CAMERAS_IN_USE : i32 = 5;
}
}
| 86.770115 | 510 | 0.691615 |
bff139f857315e189de79604d1b09a86ded2aef7
| 1,511 |
#![feature(test)]
extern crate test;
use rand::{thread_rng, Rng};
use solana_runtime::{
accounts_db::AccountInfo,
accounts_index::{AccountSecondaryIndexes, AccountsIndex, BINS_DEFAULT},
};
use solana_sdk::pubkey::{self, Pubkey};
use test::Bencher;
#[bench]
fn bench_accounts_index(bencher: &mut Bencher) {
const NUM_PUBKEYS: usize = 10_000;
let pubkeys: Vec<_> = (0..NUM_PUBKEYS).map(|_| pubkey::new_rand()).collect();
const NUM_FORKS: u64 = 16;
let mut reclaims = vec![];
let index = AccountsIndex::<AccountInfo>::new(BINS_DEFAULT);
for f in 0..NUM_FORKS {
for pubkey in pubkeys.iter().take(NUM_PUBKEYS) {
index.upsert(
f,
pubkey,
&Pubkey::default(),
&[],
&AccountSecondaryIndexes::default(),
AccountInfo::default(),
&mut reclaims,
);
}
}
let mut fork = NUM_FORKS;
let mut root = 0;
bencher.iter(|| {
for _p in 0..NUM_PUBKEYS {
let pubkey = thread_rng().gen_range(0, NUM_PUBKEYS);
index.upsert(
fork,
&pubkeys[pubkey],
&Pubkey::default(),
&[],
&AccountSecondaryIndexes::default(),
AccountInfo::default(),
&mut reclaims,
);
reclaims.clear();
}
index.add_root(root, false);
root += 1;
fork += 1;
});
}
| 26.508772 | 81 | 0.5182 |
89e405c93670e1f6121a645db95541e27cf129fe
| 2,691 |
use crate::utils::{paths, span_lint};
use if_chain::if_chain;
use rustc::hir::*;
use rustc::lint::{LateLintPass, LintArray, LintPass};
use rustc::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// **What it does:** Checks for generics with `std::ops::Drop` as bounds.
///
/// **Why is this bad?** `Drop` bounds do not really accomplish anything.
/// A type may have compiler-generated drop glue without implementing the
/// `Drop` trait itself. The `Drop` trait also only has one method,
/// `Drop::drop`, and that function is by fiat not callable in user code.
/// So there is really no use case for using `Drop` in trait bounds.
///
/// The most likely use case of a drop bound is to distinguish between types
/// that have destructors and types that don't. Combined with specialization,
/// a naive coder would write an implementation that assumed a type could be
/// trivially dropped, then write a specialization for `T: Drop` that actually
/// calls the destructor. Except that doing so is not correct; String, for
/// example, doesn't actually implement Drop, but because String contains a
/// Vec, assuming it can be trivially dropped will leak memory.
///
/// **Known problems:** None.
///
/// **Example:**
/// ```rust
/// fn foo<T: Drop>() {}
/// ```
pub DROP_BOUNDS,
correctness,
"Bounds of the form `T: Drop` are useless"
}
const DROP_BOUNDS_SUMMARY: &str = "Bounds of the form `T: Drop` are useless. \
Use `std::mem::needs_drop` to detect if a type has drop glue.";
declare_lint_pass!(DropBounds => [DROP_BOUNDS]);
impl<'a, 'tcx> LateLintPass<'a, 'tcx> for DropBounds {
fn check_generic_param(&mut self, cx: &rustc::lint::LateContext<'a, 'tcx>, p: &'tcx GenericParam) {
for bound in &p.bounds {
lint_bound(cx, bound);
}
}
fn check_where_predicate(&mut self, cx: &rustc::lint::LateContext<'a, 'tcx>, p: &'tcx WherePredicate) {
if let WherePredicate::BoundPredicate(WhereBoundPredicate { bounds, .. }) = p {
for bound in bounds {
lint_bound(cx, bound);
}
}
}
}
fn lint_bound<'a, 'tcx>(cx: &rustc::lint::LateContext<'a, 'tcx>, bound: &'tcx GenericBound) {
if_chain! {
if let GenericBound::Trait(t, _) = bound;
if let Some(def_id) = t.trait_ref.path.res.opt_def_id();
if cx.match_def_path(def_id, &paths::DROP_TRAIT);
then {
span_lint(
cx,
DROP_BOUNDS,
t.span,
DROP_BOUNDS_SUMMARY
);
}
}
}
| 38.442857 | 107 | 0.609067 |
89a41bd5bf6baf5bf6f1af3d483f5f83ce97c326
| 2,383 |
//! A module that contains all the actions related to the styling of the terminal.
//! Like applying attributes to text and changing the foreground and background.
use std::io;
use super::*;
use crate::{Color, ITerminalColor};
use crossterm_utils::Result;
#[cfg(windows)]
use crossterm_utils::supports_ansi;
/// Allows you to style the terminal.
///
/// # Features:
///
/// - Foreground color (16 base colors)
/// - Background color (16 base colors)
/// - 256 color support (Windows 10 and UNIX only)
/// - RGB support (Windows 10 and UNIX only)
/// - Text Attributes like: bold, italic, underscore and crossed word ect (Windows 10 and UNIX only)
///
/// Check `/examples/` in the library for more specific examples.
pub struct TerminalColor {
#[cfg(windows)]
color: Box<(dyn ITerminalColor + Sync + Send)>,
#[cfg(unix)]
color: AnsiColor,
}
impl TerminalColor {
/// Create new instance whereon color related actions can be performed.
pub fn new() -> TerminalColor {
#[cfg(windows)]
let color = if supports_ansi() {
Box::from(AnsiColor::new()) as Box<(dyn ITerminalColor + Sync + Send)>
} else {
WinApiColor::new() as Box<(dyn ITerminalColor + Sync + Send)>
};
#[cfg(unix)]
let color = AnsiColor::new();
TerminalColor { color }
}
/// Set the foreground color to the given color.
pub fn set_fg(&self, color: Color) -> Result<()> {
self.color.set_fg(color)
}
/// Set the background color to the given color.
pub fn set_bg(&self, color: Color) -> Result<()> {
self.color.set_bg(color)
}
/// Reset the terminal colors and attributes to default.
pub fn reset(&self) -> Result<()> {
self.color.reset()
}
/// Get available color count.
/// (This does not always provide a good result.)
pub fn get_available_color_count(&self) -> io::Result<u16> {
use std::env;
Ok(match env::var_os("TERM") {
Some(val) => {
if val.to_str().unwrap_or("").contains("256color") {
256
} else {
8
}
}
None => 8,
})
}
}
/// Get a `TerminalColor` implementation whereon color related actions can be performed.
pub fn color() -> TerminalColor {
TerminalColor::new()
}
| 28.710843 | 100 | 0.595468 |
297ea96bb5649a5307c9cfff3e382b06e7086ef3
| 11,016 |
// Copyright 2020 The Grin Developers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Build a block to mine: gathers transactions from the pool, assembles
//! them into a block and returns it.
use crate::util::RwLock;
use chrono::prelude::{DateTime, NaiveDateTime, Utc};
use rand::{thread_rng, Rng};
use serde_json::{json, Value};
use std::sync::Arc;
use std::thread;
use std::time::Duration;
use crate::api;
use crate::chain;
use crate::common::types::Error;
use crate::core::core::verifier_cache::VerifierCache;
use crate::core::core::{Output, TxKernel};
use crate::core::libtx::secp_ser;
use crate::core::libtx::ProofBuilder;
use crate::core::{consensus, core, global};
use crate::keychain::{ExtKeychain, Identifier, Keychain};
use crate::pool;
/// Fees in block to use for coinbase amount calculation
/// (Duplicated from Grin wallet project)
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct BlockFees {
/// fees
#[serde(with = "secp_ser::string_or_u64")]
pub fees: u64,
/// height
#[serde(with = "secp_ser::string_or_u64")]
pub height: u64,
/// key id
pub key_id: Option<Identifier>,
}
impl BlockFees {
/// return key id
pub fn key_id(&self) -> Option<Identifier> {
self.key_id.clone()
}
}
/// Response to build a coinbase output.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct CbData {
/// Output
pub output: Output,
/// Kernel
pub kernel: TxKernel,
/// Key Id
pub key_id: Option<Identifier>,
}
// Ensure a block suitable for mining is built and returned
// If a wallet listener URL is not provided the reward will be "burnt"
// Warning: This call does not return until/unless a new block can be built
pub fn get_block(
chain: &Arc<chain::Chain>,
tx_pool: &Arc<RwLock<pool::TransactionPool>>,
verifier_cache: Arc<RwLock<dyn VerifierCache>>,
key_id: Option<Identifier>,
wallet_listener_url: Option<String>,
) -> (core::Block, BlockFees) {
let wallet_retry_interval = 5;
// get the latest chain state and build a block on top of it
let mut result = build_block(
chain,
tx_pool,
verifier_cache.clone(),
key_id.clone(),
wallet_listener_url.clone(),
);
while let Err(e) = result {
let mut new_key_id = key_id.to_owned();
match e {
self::Error::Chain(c) => match c.kind() {
chain::ErrorKind::DuplicateCommitment(_) => {
debug!(
"Duplicate commit for potential coinbase detected. Trying next derivation."
);
// use the next available key to generate a different coinbase commitment
new_key_id = None;
}
_ => {
error!("Chain Error: {}", c);
}
},
self::Error::WalletComm(_) => {
error!(
"Error building new block: Can't connect to wallet listener at {:?}; will retry",
wallet_listener_url.as_ref().unwrap()
);
thread::sleep(Duration::from_secs(wallet_retry_interval));
}
ae => {
warn!("Error building new block: {:?}. Retrying.", ae);
}
}
// only wait if we are still using the same key: a different coinbase commitment is unlikely
// to have duplication
if new_key_id.is_some() {
thread::sleep(Duration::from_millis(100));
}
result = build_block(
chain,
tx_pool,
verifier_cache.clone(),
new_key_id,
wallet_listener_url.clone(),
);
}
return result.unwrap();
}
pub fn get_grin_solution(b: &mut core::Block, head: &core::BlockHeader) -> bool {
let deadline = Utc::now().timestamp_millis() + 10_i64 * 1000;
debug!(
"PoolCenter Mining Cuckoo{} for max 3s on {} @ {}.",
global::min_edge_bits(),
b.header.total_difficulty(),
b.header.height
);
let mut iter_count = 0;
while Utc::now().timestamp() < deadline {
let mut ctx = global::create_pow_context::<u32>(
head.height,
global::min_edge_bits(),
global::proofsize(),
10,
)
.unwrap();
ctx.set_header_nonce(b.header.pre_pow(), None, true)
.unwrap();
if let Ok(proofs) = ctx.find_cycles() {
b.header.pow.proof = proofs[0].clone();
let proof_diff = b.header.pow.to_difficulty(b.header.height);
if proof_diff >= (b.header.total_difficulty() - head.total_difficulty()) {
debug!(
"PoolCenter found solution for height = {} before deadline in {}, iter_count = {}",b.header.height,
deadline - Utc::now().timestamp_millis(), iter_count,
);
return true;
}
}
b.header.pow.nonce += 1;
iter_count += 1;
}
debug!("PoolCenter No solution found in 3s",);
false
}
/// Builds a new block with the chain head as previous and eligible
/// transactions from the pool.
fn build_block(
chain: &Arc<chain::Chain>,
tx_pool: &Arc<RwLock<pool::TransactionPool>>,
verifier_cache: Arc<RwLock<dyn VerifierCache>>,
key_id: Option<Identifier>,
wallet_listener_url: Option<String>,
) -> Result<(core::Block, BlockFees), Error> {
let head = chain.head_header()?;
// prepare the block header timestamp
let mut now_sec = Utc::now().timestamp();
let head_sec = head.timestamp.timestamp();
if now_sec <= head_sec {
now_sec = head_sec + 1;
}
// Determine the difficulty our block should be at.
// Note: do not keep the difficulty_iter in scope (it has an active batch).
let difficulty = consensus::next_difficulty(head.height + 1, chain.difficulty_iter()?);
let nbits = if (head.height + 1) % consensus::DIFFICULTY_ADJUST_WINDOW != 0 {
head.bits
} else {
let start_height = if head.height >= (consensus::DIFFICULTY_ADJUST_WINDOW - 1) {
head.height - (consensus::DIFFICULTY_ADJUST_WINDOW - 1)
} else {
0
};
let first_head = chain.get_header_by_height(start_height)?;
consensus::next_bit_difficulty(
head.height,
head.bits,
head.timestamp.timestamp(),
first_head.timestamp.timestamp(),
)
};
// Extract current "mineable" transactions from the pool.
// If this fails for *any* reason then fallback to an empty vec of txs.
// This will allow us to mine an "empty" block if the txpool is in an
// invalid (and unexpected) state.
let txs = match tx_pool.read().prepare_mineable_transactions() {
Ok(txs) => txs,
Err(e) => {
error!(
"build_block: Failed to prepare mineable txs from txpool: {:?}",
e
);
warn!("build_block: Falling back to mining empty block.");
vec![]
}
};
// build the coinbase and the block itself
let fees = txs.iter().map(|tx| tx.fee()).sum();
let height = head.height + 1;
let block_fees = BlockFees {
fees,
key_id,
height,
};
let (output, kernel, block_fees) = get_coinbase(wallet_listener_url, block_fees)?;
let mut b = core::Block::from_reward(&head, txs, output, kernel, difficulty.difficulty)?;
// making sure we're not spending time mining a useless block
b.validate(&head.total_kernel_offset, verifier_cache)?;
b.header.bits = nbits;
b.header.pow.nonce = thread_rng().gen();
b.header.pow.secondary_scaling = difficulty.secondary_scaling;
b.header.timestamp = DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(now_sec, 0), Utc);
debug!(
"Built new block with {} inputs and {} outputs, block difficulty: {}, cumulative difficulty {}",
b.inputs().len(),
b.outputs().len(),
difficulty.difficulty,
b.header.total_difficulty().to_num(),
);
// Now set txhashset roots and sizes on the header of the block being built.
match chain.set_txhashset_roots(&mut b) {
Ok(_) => Ok((b, block_fees)),
Err(e) => {
match e.kind() {
// If this is a duplicate commitment then likely trying to use
// a key that hass already been derived but not in the wallet
// for some reason, allow caller to retry.
chain::ErrorKind::DuplicateCommitment(e) => Err(Error::Chain(
chain::ErrorKind::DuplicateCommitment(e).into(),
)),
// Some other issue, possibly duplicate kernel
_ => {
error!("Error setting txhashset root to build a block: {:?}", e);
Err(Error::Chain(
chain::ErrorKind::Other(format!("{:?}", e)).into(),
))
}
}
}
}
}
///
/// Probably only want to do this when testing.
///
fn burn_reward(block_fees: BlockFees) -> Result<(core::Output, core::TxKernel, BlockFees), Error> {
warn!("Burning block fees: {:?}", block_fees);
let keychain = ExtKeychain::from_random_seed(global::is_floonet())?;
let key_id = ExtKeychain::derive_key_id(1, 1, 0, 0, 0);
let (out, kernel) = crate::core::libtx::reward::output(
&keychain,
&ProofBuilder::new(&keychain),
&key_id,
block_fees.height,
block_fees.fees,
false,
)?;
Ok((out, kernel, block_fees))
}
// Connect to the wallet listener and get coinbase.
// Warning: If a wallet listener URL is not provided the reward will be "burnt"
fn get_coinbase(
wallet_listener_url: Option<String>,
block_fees: BlockFees,
) -> Result<(core::Output, core::TxKernel, BlockFees), Error> {
match wallet_listener_url {
None => {
// Burn it
return burn_reward(block_fees);
}
Some(wallet_listener_url) => {
let res = create_coinbase(&wallet_listener_url, &block_fees)?;
let output = res.output;
let kernel = res.kernel;
let key_id = res.key_id;
let block_fees = BlockFees {
key_id: key_id,
..block_fees
};
debug!("get_coinbase: {:?}", block_fees);
return Ok((output, kernel, block_fees));
}
}
}
/// Call the wallet API to create a coinbase output for the given block_fees.
/// Will retry based on default "retry forever with backoff" behavior.
fn create_coinbase(dest: &str, block_fees: &BlockFees) -> Result<CbData, Error> {
let url = format!("{}/v2/foreign", dest);
let req_body = json!({
"jsonrpc": "2.0",
"method": "build_coinbase",
"id": 1,
"params": {
"block_fees": block_fees
}
});
trace!("Sending build_coinbase request: {}", req_body);
let req = api::client::create_post_request(url.as_str(), None, &req_body)?;
let res: String = api::client::send_request(req).map_err(|e| {
let report = format!(
"Failed to get coinbase from {}. Is the wallet listening? {}",
dest, e
);
error!("{}", report);
Error::WalletComm(report)
})?;
let res: Value = serde_json::from_str(&res).unwrap();
trace!("Response: {}", res);
if res["error"] != json!(null) {
let report = format!(
"Failed to get coinbase from {}: Error: {}, Message: {}",
dest, res["error"]["code"], res["error"]["message"]
);
error!("{}", report);
return Err(Error::WalletComm(report));
}
let cb_data = res["result"]["Ok"].clone();
trace!("cb_data: {}", cb_data);
let ret_val = match serde_json::from_value::<CbData>(cb_data) {
Ok(r) => r,
Err(e) => {
let report = format!("Couldn't deserialize CbData: {}", e);
error!("{}", report);
return Err(Error::WalletComm(report));
}
};
Ok(ret_val)
}
| 29.612903 | 104 | 0.67529 |
91efd991a03dda5ff6a6a2cf9593aa16c4abcd6b
| 2,549 |
pub mod utils;
pub mod controllers;
pub mod models;
pub mod pages;
pub mod pagination;
pub mod readable;
use {
crate::controllers::{dashboard, edit, explore, item, search, story},
stry_backend::DataBackend,
warp::{
filters::BoxedFilter,
http::header::{HeaderMap, HeaderValue, CONTENT_SECURITY_POLICY, X_FRAME_OPTIONS},
reply::with,
Filter, Reply,
},
};
// const BOM: &str = include_str!("../bom.txt");
pub fn route(backend: DataBackend) -> BoxedFilter<(impl Reply,)> {
utils::init_fluent().expect("Unable to initialize Fluent");
let mut headers = HeaderMap::new();
headers.insert(
CONTENT_SECURITY_POLICY,
HeaderValue::from_static("default-src 'self'"),
);
headers.insert("Feature-Policy", HeaderValue::from_static("accelerometer 'none'; ambient-light-sensor 'self'; battery 'none'; camera 'none'; gyroscope 'none'; geolocation 'none'; magnetometer 'none'; microphone 'none'; payment 'none'; web-share 'none'"));
headers.insert(X_FRAME_OPTIONS, HeaderValue::from_static("DENY"));
let dashboard: BoxedFilter<(_,)> = warp::path("dashboard")
.and(
dashboard::about(backend.clone())
.or(dashboard::downloads(backend.clone()))
.boxed()
.or(dashboard::queue(backend.clone()))
.boxed()
.or(dashboard::updates(backend.clone()))
.boxed()
.or(dashboard::index(backend.clone()))
.boxed(),
)
.boxed();
let edit: BoxedFilter<(_,)> = warp::path("edit")
.and(
edit::story(backend.clone())
.or(edit::chapter_get(backend.clone()))
.or(edit::chapter_post(backend.clone()))
.boxed(),
)
.boxed();
let story: BoxedFilter<(_,)> = warp::path("story")
.and(
story::chapter(backend.clone())
.or(story::index(backend.clone()))
.boxed(),
)
.boxed();
dashboard
.or(edit)
.boxed()
.or(story)
.boxed()
.or(explore::explore(backend.clone()))
.boxed()
.or(search::index(backend.clone()))
.boxed()
.or(item::item(backend.clone()))
.boxed()
.or(controllers::assets::assets())
.boxed()
.or(akibisuto_stylus::route())
.boxed()
.or(controllers::index(backend))
.boxed()
.with(with::headers(headers))
.boxed()
}
| 29.639535 | 259 | 0.54845 |
38b46d0a7f51f64f3637b9c5df3be19ac1dad6cc
| 824 |
#[macro_use]
extern crate criterion;
use criterion::Criterion;
use froop::Stream;
fn map_benchmark(c: &mut Criterion) {
let sink = Stream::sink();
let map = sink.stream().map(|x| x * 2);
let _ = map.subscribe(|_| {});
c.bench_function("map", move |b| b.iter(|| sink.update(42)));
}
fn imitator_benchmark(c: &mut Criterion) {
let imitator = Stream::imitator();
let fold = imitator
.stream()
.fold(1, |p, c| if *c < 10 { p + c } else { p })
.dedupe();
let sink = Stream::sink();
let merge = Stream::merge(vec![fold, sink.stream()]);
imitator.imitate(&merge);
let _ = merge.subscribe(|_| {});
c.bench_function("imitator", move |b| b.iter(|| sink.update(1)));
}
criterion_group!(benches, map_benchmark, imitator_benchmark);
criterion_main!(benches);
| 23.542857 | 69 | 0.609223 |
4bbad2ef93d22fcffbe36ee5a2137dc231e87d29
| 1,112 |
mod drag_icon_handler;
mod input_manager;
mod output_manager;
mod keyboard_handler;
mod pointer_handler;
mod touch_handler;
mod output_handler;
mod xdg_shell_v6_manager;
mod xdg_shell_v6_handler;
mod xdg_shell_manager;
mod xdg_shell_handler;
mod tablet_pad_handler;
mod tablet_tool_handler;
pub use self::drag_icon_handler::{DragIconHandler, DragIconListener};
pub use self::input_manager::{InputManager, InputManagerHandler};
pub use self::keyboard_handler::{KeyboardHandler, KeyboardWrapper};
pub use self::output_handler::{OutputHandler, UserOutput};
pub use self::output_manager::{OutputBuilder, OutputBuilderResult, OutputManager,
OutputManagerHandler};
pub use self::pointer_handler::{PointerHandler, PointerWrapper};
pub use self::tablet_pad_handler::{TabletPadHandler, TabletPadWrapper};
pub use self::tablet_tool_handler::{TabletToolHandler, TabletToolWrapper};
pub use self::touch_handler::{TouchHandler, TouchWrapper};
pub use self::xdg_shell_v6_handler::*;
pub use self::xdg_shell_v6_manager::*;
pub use self::xdg_shell_handler::*;
pub use self::xdg_shell_manager::*;
| 38.344828 | 81 | 0.803058 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.