hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5bc0dd90337d53b2990d78d3c38b9c8eb64afda3 | 1,143 | pub use self::{
asset_definition_loading_system::{AssetDefinitionLoader, AssetDefinitionLoadingSystem},
asset_discovery_system::{AssetDiscoverySystem, AssetDiscoverySystemData},
asset_id_mapping_system::{AssetIdMapper, AssetIdMappingSystem},
asset_part_loader::AssetPartLoader,
asset_part_loading_coordinator_system::{
AssetPartLoadingCoordinatorSystem, AssetPartLoadingCoordinatorSystemData,
},
asset_part_loading_system::AssetPartLoadingSystem,
asset_sequence_component_loading_system::{
AssetSequenceComponentLoader, AssetSequenceComponentLoadingSystem,
},
asset_sprites_definition_loading_system::{
AssetSpritesDefinitionLoader, AssetSpritesDefinitionLoadingSystem,
},
asset_texture_loading_system::{AssetTextureLoader, AssetTextureLoadingSystem},
};
mod asset_definition_loading_system;
mod asset_discovery_system;
mod asset_id_mapping_system;
mod asset_part_loader;
mod asset_part_loading_coordinator_system;
mod asset_part_loading_system;
mod asset_sequence_component_loading_system;
mod asset_sprites_definition_loading_system;
mod asset_texture_loading_system;
| 40.821429 | 91 | 0.84077 |
79872a197a199a89b75ff99eed40215bcf13d35e | 3,747 | use crate::consts::SELECTION_TOLERANCE;
use crate::document::DocumentMessageHandler;
use crate::frontend::utility_types::MouseCursorIcon;
use crate::input::keyboard::MouseMotion;
use crate::input::InputPreprocessorMessageHandler;
use crate::message_prelude::*;
use crate::misc::{HintData, HintGroup, HintInfo};
use crate::viewport_tools::tool::{DocumentToolData, Fsm, ToolActionHandlerData};
use graphene::intersection::Quad;
use graphene::Operation;
use glam::DVec2;
use serde::{Deserialize, Serialize};
#[derive(Default)]
pub struct Fill {
fsm_state: FillToolFsmState,
data: FillToolData,
}
#[remain::sorted]
#[impl_message(Message, ToolMessage, Fill)]
#[derive(PartialEq, Clone, Debug, Hash, Serialize, Deserialize)]
pub enum FillMessage {
Abort,
LeftMouseDown,
RightMouseDown,
}
impl<'a> MessageHandler<ToolMessage, ToolActionHandlerData<'a>> for Fill {
fn process_action(&mut self, action: ToolMessage, data: ToolActionHandlerData<'a>, responses: &mut VecDeque<Message>) {
if action == ToolMessage::UpdateHints {
self.fsm_state.update_hints(responses);
return;
}
if action == ToolMessage::UpdateCursor {
self.fsm_state.update_cursor(responses);
return;
}
let new_state = self.fsm_state.transition(action, data.0, data.1, &mut self.data, data.2, responses);
if self.fsm_state != new_state {
self.fsm_state = new_state;
self.fsm_state.update_hints(responses);
self.fsm_state.update_cursor(responses);
}
}
advertise_actions!(FillMessageDiscriminant; LeftMouseDown, RightMouseDown);
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum FillToolFsmState {
Ready,
}
impl Default for FillToolFsmState {
fn default() -> Self {
FillToolFsmState::Ready
}
}
#[derive(Clone, Debug, Default)]
struct FillToolData {}
impl Fsm for FillToolFsmState {
type ToolData = FillToolData;
fn transition(
self,
event: ToolMessage,
document: &DocumentMessageHandler,
tool_data: &DocumentToolData,
_data: &mut Self::ToolData,
input: &InputPreprocessorMessageHandler,
responses: &mut VecDeque<Message>,
) -> Self {
use FillMessage::*;
use FillToolFsmState::*;
if let ToolMessage::Fill(event) = event {
match (self, event) {
(Ready, lmb_or_rmb) if lmb_or_rmb == LeftMouseDown || lmb_or_rmb == RightMouseDown => {
let mouse_pos = input.mouse.position;
let tolerance = DVec2::splat(SELECTION_TOLERANCE);
let quad = Quad::from_box([mouse_pos - tolerance, mouse_pos + tolerance]);
if let Some(path) = document.graphene_document.intersects_quad_root(quad).last() {
let color = match lmb_or_rmb {
LeftMouseDown => tool_data.primary_color,
RightMouseDown => tool_data.secondary_color,
Abort => unreachable!(),
};
responses.push_back(DocumentMessage::StartTransaction.into());
responses.push_back(Operation::SetLayerFill { path: path.to_vec(), color }.into());
responses.push_back(DocumentMessage::CommitTransaction.into());
}
Ready
}
_ => self,
}
} else {
self
}
}
fn update_hints(&self, responses: &mut VecDeque<Message>) {
let hint_data = match self {
FillToolFsmState::Ready => HintData(vec![HintGroup(vec![
HintInfo {
key_groups: vec![],
mouse: Some(MouseMotion::Lmb),
label: String::from("Fill with Primary"),
plus: false,
},
HintInfo {
key_groups: vec![],
mouse: Some(MouseMotion::Rmb),
label: String::from("Fill with Secondary"),
plus: false,
},
])]),
};
responses.push_back(FrontendMessage::UpdateInputHints { hint_data }.into());
}
fn update_cursor(&self, responses: &mut VecDeque<Message>) {
responses.push_back(FrontendMessage::UpdateMouseCursor { cursor: MouseCursorIcon::Default }.into());
}
}
| 27.551471 | 120 | 0.708033 |
7a3256faed89db6839e3e2554bffd5fb42d92b0f | 7,266 | // this module adds some functionality based on the required implementations
// here like: `LinkedList::pop_back` or `Clone for LinkedList<T>`
// You are free to use anything in it, but it's mainly for the test framework.
mod pre_implemented;
#[derive(Debug)]
pub struct Node<T> {
pub value: T,
pub previous: *mut Node<T>,
pub next: *mut Node<T>,
}
impl<T> Node<T> {
fn new(value: T) -> Self {
Node {
value,
previous: std::ptr::null_mut(), // nullptr equivelant
next: std::ptr::null_mut(), // nullptr equivelant
}
}
}
#[derive(Debug)]
pub struct LinkedList<T> {
pub head: *mut Node<T>,
pub tail: *mut Node<T>,
size: usize,
}
pub struct Cursor<'a, T> {
list: &'a mut LinkedList<T>,
current: *mut Node<T>,
}
pub struct Iter<'a, T>(Option<&'a Node<T>>);
impl<T> LinkedList<T> {
pub fn new() -> Self {
LinkedList {
head: std::ptr::null_mut(),
tail: std::ptr::null_mut(),
size: 0,
}
}
pub fn len(&self) -> usize {
self.size
}
pub fn is_empty(&self) -> bool {
self.size == 0
}
/// Return a cursor positioned on the front element
pub fn cursor_front(&mut self) -> Cursor<'_, T> {
let head = self.head;
Cursor {
list: self,
current: head,
}
}
/// Return a cursor positioned on the back element
pub fn cursor_back(&mut self) -> Cursor<'_, T> {
let tail = self.tail;
Cursor {
list: self,
current: tail,
}
}
/// Return an iterator that moves from front to back
pub fn iter(&self) -> Iter<'_, T> {
if self.head.is_null() {
return Iter(None);
}
unsafe { Iter(Some(&(*self.head))) }
}
}
impl<T> Drop for LinkedList<T> {
fn drop(&mut self) {
let mut ptr = self.cursor_front();
while ptr.take().is_some() {}
}
}
// the cursor is expected to act as if it is at the position of an element
// and it also has to work with and be able to insert into an empty list.
impl<T> Cursor<'_, T> {
/// Take a mutable reference to the current element
pub fn peek_mut(&mut self) -> Option<&mut T> {
if self.current.is_null() {
return None;
}
unsafe {
let ref_mut: &mut T = &mut (*self.current).value;
Some(ref_mut)
}
}
/// Move one position forward (towards the back) and
/// return a reference to the new position
pub fn next(&mut self) -> Option<&mut T> {
if self.current.is_null() {
panic!();
}
unsafe {
self.current = (*self.current).next;
}
self.peek_mut()
}
/// Move one position backward (towards the front) and
/// return a reference to the new position
pub fn prev(&mut self) -> Option<&mut T> {
if self.current.is_null() {
panic!();
}
unsafe {
self.current = (*self.current).previous;
}
self.peek_mut()
}
/// Remove and return the element at the current position and move the cursor
/// to the neighboring element that's closest to the back. This can be
/// either the next or previous position.
pub fn take(&mut self) -> Option<T> {
if self.current.is_null() || self.list.size == 0 {
return None;
}
let c = self.current;
if self.list.size == 1 {
// head, tail, current point to this adress
self.list.head = std::ptr::null_mut();
self.list.tail = std::ptr::null_mut();
self.current = std::ptr::null_mut();
self.list.size = 0;
} else if self.current == self.list.head {
unsafe {
self.current = (*self.current).next;
}
self.list.head = self.current;
self.list.size -= 1;
} else if self.current == self.list.tail {
unsafe {
self.current = (*self.current).previous;
}
self.list.tail = self.current;
self.list.size -= 1;
} else {
unsafe {
let next_node = (*self.current).next;
let prev_node = (*self.current).previous;
(*next_node).previous = prev_node;
(*prev_node).next = next_node;
self.current = next_node;
}
}
unsafe {
let v = Box::from_raw(c);
Some(v.value)
}
}
pub fn insert_after(&mut self, element: T) {
let ptr = Box::into_raw(Box::new(Node::new(element)));
if self.current.is_null() || self.list.size == 0 {
// Empty LinkedList
self.list.head = ptr;
self.list.tail = self.list.head;
self.current = self.list.head;
} else if self.list.size == 1 {
unsafe {
(*self.list.head).next = ptr;
(*ptr).previous = self.list.head;
self.list.tail = ptr
}
} else {
if self.current == self.list.tail {
self.list.tail = ptr; // new tail
}
unsafe {
let next_node = (*self.current).next;
// config new node
(*ptr).previous = self.current;
(*ptr).next = next_node;
//config old current
(*self.current).next = ptr;
if !next_node.is_null() {
(*next_node).previous = ptr;
}
}
}
self.list.size += 1;
}
pub fn insert_before(&mut self, element: T) {
let ptr = Box::into_raw(Box::new(Node::new(element)));
if self.current.is_null() || self.list.size == 0 {
// Empty LinkedList
self.list.head = ptr;
self.list.tail = self.list.head;
self.current = self.list.head;
} else if self.list.size == 1 {
unsafe {
(*self.list.head).previous = ptr;
(*ptr).next = self.list.head;
}
self.list.head = ptr;
} else {
if self.current == self.list.head {
self.list.head = ptr; // new head
}
unsafe {
let prev_node = (*self.current).previous;
// config new node
(*ptr).previous = prev_node;
(*ptr).next = self.current;
//config old current
(*self.current).previous = ptr;
if !prev_node.is_null() {
(*prev_node).next = ptr;
}
}
}
self.list.size += 1;
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
let v = &self.0?.value;
if self.0?.next.is_null() {
self.0 = None;
} else {
unsafe {
self.0 = Some(&(*self.0?.next));
}
}
Some(v)
}
}
impl<T> Default for LinkedList<T> {
fn default() -> Self {
Self::new()
}
}
| 28.054054 | 81 | 0.487201 |
9074d6cce7960a07c80f6b04c6d554f8f8cefc8e | 4,626 | //! Tests auto-converted from "sass-spec/spec/core_functions/color/fade_in.hrx"
mod error {
mod bounds {
#[test]
fn too_high() {
assert_eq!(
crate::rsass(
"a {b: fade-in(red, 1.001)}\
\n"
)
.unwrap_err(),
"Error: $amount: Expected 1.001 to be within 0 and 1.\
\n ,\
\n1 | a {b: fade-in(red, 1.001)}\
\n | ^^^^^^^^^^^^^^^^^^^\
\n \'\
\n input.scss 1:7 root stylesheet",
);
}
#[test]
fn too_low() {
assert_eq!(
crate::rsass(
"a {b: fade-in(red, -0.001)}\
\n"
)
.unwrap_err(),
"Error: $amount: Expected -0.001 to be within 0 and 1.\
\n ,\
\n1 | a {b: fade-in(red, -0.001)}\
\n | ^^^^^^^^^^^^^^^^^^^^\
\n \'\
\n input.scss 1:7 root stylesheet",
);
}
}
#[test]
fn too_few_args() {
assert_eq!(
crate::rsass(
"a {b: fade-in(red)}\
\n"
)
.unwrap_err(),
"Error: Missing argument $amount.\
\n ,--> input.scss\
\n1 | a {b: fade-in(red)}\
\n | ^^^^^^^^^^^^ invocation\
\n \'\
\n ,--> sass:color\
\n1 | @function fade-in($color, $amount) {\
\n | ======================== declaration\
\n \'\
\n input.scss 1:7 root stylesheet",
);
}
#[test]
fn too_many_args() {
assert_eq!(
crate::rsass(
"a {b: fade-in(red, 0.1, 2)}\
\n"
)
.unwrap_err(),
"Error: Only 2 arguments allowed, but 3 were passed.\
\n ,--> input.scss\
\n1 | a {b: fade-in(red, 0.1, 2)}\
\n | ^^^^^^^^^^^^^^^^^^^^ invocation\
\n \'\
\n ,--> sass:color\
\n1 | @function fade-in($color, $amount) {\
\n | ======================== declaration\
\n \'\
\n input.scss 1:7 root stylesheet",
);
}
mod test_type {
#[test]
fn alpha() {
assert_eq!(
crate::rsass(
"a {b: fade-in(red, blue)}\
\n"
)
.unwrap_err(),
"Error: $amount: blue is not a number.\
\n ,\
\n1 | a {b: fade-in(red, blue)}\
\n | ^^^^^^^^^^^^^^^^^^\
\n \'\
\n input.scss 1:7 root stylesheet",
);
}
#[test]
fn color() {
assert_eq!(
crate::rsass(
"a {b: fade-in(1, 0.1)}\
\n"
)
.unwrap_err(),
"Error: $color: 1 is not a color.\
\n ,\
\n1 | a {b: fade-in(1, 0.1)}\
\n | ^^^^^^^^^^^^^^^\
\n \'\
\n input.scss 1:7 root stylesheet",
);
}
}
}
#[test]
fn max() {
assert_eq!(
crate::rsass(
"a {b: fade-in(rgba(red, 0.5), 1)}\
\n"
)
.unwrap(),
"a {\
\n b: red;\
\n}\
\n"
);
}
#[test]
fn max_remaining() {
assert_eq!(
crate::rsass(
"a {b: fade-in(rgba(red, 0.5), 0.5)}\
\n"
)
.unwrap(),
"a {\
\n b: red;\
\n}\
\n"
);
}
#[test]
fn middle() {
assert_eq!(
crate::rsass(
"a {b: fade-in(rgba(red, 0.5), 0.14)}\
\n"
)
.unwrap(),
"a {\
\n b: rgba(255, 0, 0, 0.64);\
\n}\
\n"
);
}
#[test]
fn min() {
assert_eq!(
crate::rsass(
"a {b: fade-in(rgba(red, 0.5), 0)}\
\n"
)
.unwrap(),
"a {\
\n b: rgba(255, 0, 0, 0.5);\
\n}\
\n"
);
}
#[test]
fn named() {
assert_eq!(
crate::rsass(
"a {b: fade-in($color: rgba(red, 0.5), $amount: 0.14)}\
\n"
)
.unwrap(),
"a {\
\n b: rgba(255, 0, 0, 0.64);\
\n}\
\n"
);
}
#[test]
fn opacify() {
assert_eq!(
crate::rsass(
"a {b: opacify($color: rgba(red, 0.5), $amount: 0.14)}\
\n"
)
.unwrap(),
"a {\
\n b: rgba(255, 0, 0, 0.64);\
\n}\
\n"
);
}
| 23.482234 | 79 | 0.321012 |
8a3b0dc5673775ec54fee5707132ecb6ff56744f | 3,402 | use crate::convert::{Convert, Error, TryConvert};
use crate::sys;
use crate::types::{Ruby, Rust};
use crate::value::Value;
use crate::Artichoke;
impl Convert<bool> for Value {
type From = Rust;
type To = Ruby;
fn convert(interp: &Artichoke, value: bool) -> Self {
if value {
Self::new(interp, unsafe { sys::mrb_sys_true_value() })
} else {
Self::new(interp, unsafe { sys::mrb_sys_false_value() })
}
}
}
impl TryConvert<Value> for bool {
type From = Ruby;
type To = Rust;
unsafe fn try_convert(
_interp: &Artichoke,
value: Value,
) -> Result<Self, Error<Self::From, Self::To>> {
match value.ruby_type() {
Ruby::Bool => {
let inner = value.inner();
if sys::mrb_sys_value_is_true(inner) {
Ok(true)
} else if sys::mrb_sys_value_is_false(inner) {
Ok(false)
} else {
// This should be unreachable
Err(Error {
from: Ruby::Bool,
to: Rust::Bool,
})
}
}
type_tag => Err(Error {
from: type_tag,
to: Rust::Bool,
}),
}
}
}
#[cfg(test)]
mod tests {
use quickcheck_macros::quickcheck;
use crate::convert::{Convert, Error, TryConvert};
use crate::eval::Eval;
use crate::sys;
use crate::types::{Ruby, Rust};
use crate::value::Value;
#[test]
fn fail_convert() {
let interp = crate::interpreter().expect("init");
// get a mrb_value that can't be converted to a primitive type.
let value = interp.eval("Object.new").expect("eval");
let expected = Error {
from: Ruby::Object,
to: Rust::Bool,
};
let result = unsafe { <bool>::try_convert(&interp, value) }.map(|_| ());
assert_eq!(result, Err(expected));
}
#[quickcheck]
fn convert_to_bool(b: bool) -> bool {
let interp = crate::interpreter().expect("init");
let value = Value::convert(&interp, b);
value.ruby_type() == Ruby::Bool
}
#[quickcheck]
fn bool_with_value(b: bool) -> bool {
let interp = crate::interpreter().expect("init");
let value = Value::convert(&interp, b);
let inner = value.inner();
let is_false = unsafe { sys::mrb_sys_value_is_false(inner) };
let is_true = unsafe { sys::mrb_sys_value_is_true(inner) };
let is_nil = unsafe { sys::mrb_sys_value_is_nil(inner) };
if b {
is_true && !is_nil
} else {
is_false && !is_nil
}
}
#[quickcheck]
fn roundtrip(b: bool) -> bool {
let interp = crate::interpreter().expect("init");
let value = Value::convert(&interp, b);
let value = unsafe { bool::try_convert(&interp, value) }.expect("convert");
value == b
}
#[quickcheck]
fn roundtrip_err(i: i64) -> bool {
let interp = crate::interpreter().expect("init");
let value = Value::convert(&interp, i);
let value = unsafe { bool::try_convert(&interp, value) };
let expected = Err(Error {
from: Ruby::Fixnum,
to: Rust::Bool,
});
value == expected
}
}
| 29.327586 | 83 | 0.518519 |
c1f4bbf38920a6c3a6b2f94123247bed029071f5 | 741 | use crate::ffi::*;
use std::cell::RefCell;
use std::thread_local;
thread_local! {
pub static MODULE: RefCell<Option<*const NapiModule>> = RefCell::new(None);
}
type napi_addon_register_func =
extern "C" fn(env: napi_env, exports: napi_value) -> napi_value;
#[repr(C)]
#[derive(Debug, Clone)]
pub struct NapiModule {
pub nm_version: i32,
pub nm_flags: u32,
nm_filename: *const c_char,
pub nm_register_func: napi_addon_register_func,
nm_modname: *const c_char,
nm_priv: *mut c_void,
reserved: [*mut c_void; 4],
}
#[napi_sym]
fn napi_module_register(module: *const NapiModule) -> Result {
MODULE.with(|cell| {
let mut slot = cell.borrow_mut();
assert!(slot.is_none());
slot.replace(module);
});
Ok(())
}
| 22.454545 | 77 | 0.689609 |
038003a5e4e19e1caf2b23596811e480f3692a16 | 3,008 | //! Cross-platform interface to the `errno` variable.
#![cfg_attr(target_os = "wasi", feature(thread_local))]
#[cfg(unix)] extern crate libc;
#[cfg(windows)] extern crate winapi;
#[cfg(target_os = "dragonfly")] extern crate errno_dragonfly;
#[cfg(target_os = "wasi")] extern crate libc;
// FIXME(#10): Rust < 1.11 doesn't support cfg_attr on path
/*
#[cfg_attr(unix, path = "unix.rs")]
#[cfg_attr(windows, path = "windows.rs")]
mod sys;
*/
#[cfg(unix)] mod unix;
#[cfg(unix)] mod sys { pub use unix::*; }
#[cfg(windows)] mod windows;
#[cfg(windows)] mod sys { pub use windows::*; }
#[cfg(target_os = "wasi")] mod wasi;
#[cfg(target_os = "wasi")] mod sys { pub use wasi::*; }
use std::fmt;
use std::io;
/// Wraps a platform-specific error code.
///
/// The `Display` instance maps the code to a human-readable string. It
/// calls [`strerror_r`][1] under POSIX, and [`FormatMessageW`][2] on
/// Windows.
///
/// [1]: http://pubs.opengroup.org/onlinepubs/009695399/functions/strerror.html
/// [2]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms679351%28v=vs.85%29.aspx
#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Hash)]
pub struct Errno(pub i32);
impl fmt::Debug for Errno {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
sys::with_description(*self, |desc| {
fmt.debug_struct("Errno")
.field("code", &self.0)
.field("description", &desc.ok())
.finish()
})
}
}
impl fmt::Display for Errno {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
sys::with_description(*self, |desc| match desc {
Ok(desc) => fmt.write_str(&desc),
Err(fm_err) => write!(
fmt, "OS error {} ({} returned error {})",
self.0, sys::STRERROR_NAME, fm_err.0),
})
}
}
impl Into<i32> for Errno {
fn into(self) -> i32 {
self.0
}
}
impl From<Errno> for io::Error {
fn from(errno: Errno) -> Self {
io::Error::from_raw_os_error(errno.0)
}
}
/// Returns the platform-specific value of `errno`.
pub fn errno() -> Errno {
sys::errno()
}
/// Sets the platform-specific value of `errno`.
pub fn set_errno(err: Errno) {
sys::set_errno(err)
}
#[test]
fn it_works() {
let x = errno();
set_errno(x);
let _ = x.to_string();
}
#[test]
fn check_description() {
let expect = if cfg!(windows) {
"Incorrect function."
} else if cfg!(target_os = "wasi") {
"Argument list too long"
} else {
"Operation not permitted"
};
set_errno(Errno(1));
assert_eq!(errno().to_string(), expect);
assert_eq!(
format!("{:?}", errno()),
format!("Errno {{ code: 1, description: Some({:?}) }}", expect));
}
#[test]
fn check_error_into_errno() {
const ERROR_CODE: i32 = 1;
let error = io::Error::from_raw_os_error(ERROR_CODE);
let new_error: io::Error = Errno(ERROR_CODE).into();
assert_eq!(error.kind(), new_error.kind());
}
| 26.156522 | 92 | 0.596077 |
bf269ee901b93682d73b30bc89063a0f81815a69 | 395 | // run-fail
// error-pattern:panic 1
// error-pattern:drop 2
// ignore-emscripten no processes
struct Droppable(u32);
impl Drop for Droppable {
fn drop(&mut self) {
if self.0 == 1 {
panic!("panic 1");
} else {
eprintln!("drop {}", self.0);
}
}
}
fn mir() {
let x = Droppable(2);
let y = Droppable(1);
}
fn main() {
mir();
}
| 15.8 | 41 | 0.511392 |
f898768578307e5855419b9dec8d63075742c3f3 | 1,478 | //! # funscheduler
//! Time based function execution scheduler
use std::time::Duration;
/// Timing configuration
pub enum Timing {
Seconds(u64),
Minutes(u64),
Hours(u64),
Days(u64),
}
/// Different methods for running functions according to time.
pub struct FunScheduler;
impl FunScheduler {
/// Execute a function in specified time interval,
/// the function will be executed imidiately.
pub fn interval(job: fn(), timing: Timing) {
let time = calc_time(timing);
loop {
std::thread::spawn(move || {
job();
});
std::thread::sleep(time);
}
}
/// Like interval but does not execute immediately.
pub fn rinterval(job: fn(), timing: Timing) {
let time = calc_time(timing);
loop {
std::thread::sleep(time);
std::thread::spawn(move || {
job();
});
}
}
/// Execute function once after a specified amount of time
pub fn after(job: fn(), timing: Timing) {
std::thread::sleep(calc_time(timing));
job();
}
}
/// Calculate time
fn calc_time(timing: Timing) -> Duration {
match timing {
Timing::Seconds(s) => Duration::from_secs(s),
Timing::Minutes(minutes) => Duration::from_secs(minutes * 60),
Timing::Hours(hours) => Duration::from_secs(hours * 3600),
Timing::Days(days) => Duration::from_secs(days * 86_400),
}
}
| 24.633333 | 70 | 0.571719 |
08a09e699e02e6e00b080933dfa0935ab18e9331 | 2,622 | extern crate futures;
extern crate tokio;
pub use tokio::signal::unix::SignalKind;
pub type Result<T> = std::result::Result<T, tokio::io::Error>;
pub fn run<F: futures::future::Future>(f: F) -> F::Output {
let mut rt = tokio::runtime::Builder::new()
.basic_scheduler()
.enable_all()
.build()
.expect("runtime");
let result = rt.block_on(f);
rt.shutdown_timeout(std::time::Duration::from_secs(1));
result
}
pub fn make_err<E>(e: E) -> tokio::io::Error
where
E: Into<Box<dyn std::error::Error + 'static + Sync + Send>>,
{
use tokio::io::{Error, ErrorKind};
Error::new(ErrorKind::Other, e)
}
pub async fn with_timeout<R>(
f: impl futures::future::Future<Output = Result<R>>,
timeout: std::time::Duration,
) -> Result<R> {
tokio::select! {
x = f => x,
_ = tokio::time::delay_for(timeout) => {
Err(make_err("timeout"))
}
}
}
pub async fn wait_for_signal(kind: SignalKind) -> Result<()> {
use tokio::signal::unix::signal;
let mut sig = signal(kind)?;
sig.recv().await;
log::info!("received signal {:?}", kind);
Ok(())
}
#[cfg(test)]
pub mod tests {
use super::*;
use futures::task::Poll;
use tokio::io::AsyncRead;
pub struct StringReader {
cursor: std::io::Cursor<String>,
}
impl StringReader {
pub fn new(buf: String) -> StringReader {
StringReader {
cursor: std::io::Cursor::new(buf),
}
}
}
impl AsyncRead for StringReader {
fn poll_read(
mut self: std::pin::Pin<&mut Self>,
_: &mut futures::task::Context,
mut buf: &mut [u8],
) -> Poll<std::io::Result<usize>> {
let r = std::io::Read::read(&mut self.cursor, &mut buf);
Poll::Ready(r)
}
}
#[test]
fn test_run() {
let r = run(futures::future::ready(42));
assert_eq!(42, r);
let r = run(async move { 43 });
assert_eq!(43, r);
}
#[test]
fn test_make_err() {
let err = make_err("booh!");
assert_eq!("booh!", format!("{}", err));
}
#[test]
fn test_with_timeout() {
let r = run(with_timeout(
futures::future::ready(Ok(42)),
std::time::Duration::from_secs(60),
));
assert_eq!(42, r.expect("ok"));
let r = run(with_timeout(
futures::future::pending::<Result<i32>>(),
std::time::Duration::from_nanos(1),
));
assert_eq!("timeout", format!("{}", r.expect_err("err")));
}
}
| 24.055046 | 68 | 0.529367 |
5d8bc31c2a3375be93898b38453a5a1fa918acd7 | 3,089 | // Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(stable_features)]
// ignore-windows - this is a unix-specific test
// ignore-cloudabi no processes
// ignore-emscripten no processes
#![feature(process_exec, libc)]
extern crate libc;
use std::env;
use std::io::Error;
use std::os::unix::process::CommandExt;
use std::process::Command;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering};
fn main() {
if let Some(arg) = env::args().nth(1) {
match &arg[..] {
"test1" => println!("hello2"),
"test2" => assert_eq!(env::var("FOO").unwrap(), "BAR"),
"test3" => assert_eq!(env::current_dir().unwrap()
.to_str().unwrap(), "/"),
"empty" => {}
_ => panic!("unknown argument: {}", arg),
}
return
}
let me = env::current_exe().unwrap();
let output = Command::new(&me).arg("test1").before_exec(|| {
println!("hello");
Ok(())
}).output().unwrap();
assert!(output.status.success());
assert!(output.stderr.is_empty());
assert_eq!(output.stdout, b"hello\nhello2\n");
let output = Command::new(&me).arg("test2").before_exec(|| {
env::set_var("FOO", "BAR");
Ok(())
}).output().unwrap();
assert!(output.status.success());
assert!(output.stderr.is_empty());
assert!(output.stdout.is_empty());
let output = Command::new(&me).arg("test3").before_exec(|| {
env::set_current_dir("/").unwrap();
Ok(())
}).output().unwrap();
assert!(output.status.success());
assert!(output.stderr.is_empty());
assert!(output.stdout.is_empty());
let output = Command::new(&me).arg("bad").before_exec(|| {
Err(Error::from_raw_os_error(102))
}).output().unwrap_err();
assert_eq!(output.raw_os_error(), Some(102));
let pid = unsafe { libc::getpid() };
assert!(pid >= 0);
let output = Command::new(&me).arg("empty").before_exec(move || {
let child = unsafe { libc::getpid() };
assert!(child >= 0);
assert!(pid != child);
Ok(())
}).output().unwrap();
assert!(output.status.success());
assert!(output.stderr.is_empty());
assert!(output.stdout.is_empty());
let mem = Arc::new(AtomicUsize::new(0));
let mem2 = mem.clone();
let output = Command::new(&me).arg("empty").before_exec(move || {
assert_eq!(mem2.fetch_add(1, Ordering::SeqCst), 0);
Ok(())
}).output().unwrap();
assert!(output.status.success());
assert!(output.stderr.is_empty());
assert!(output.stdout.is_empty());
assert_eq!(mem.load(Ordering::SeqCst), 0);
}
| 32.861702 | 69 | 0.595338 |
75d279782ec5666309086f2b3929e7700c8a8ef4 | 19,985 | #[doc = "Reader of register OUTINIT"]
pub type R = crate::R<u32, super::OUTINIT>;
#[doc = "Writer for register OUTINIT"]
pub type W = crate::W<u32, super::OUTINIT>;
#[doc = "Register OUTINIT `reset()`'s with value 0"]
impl crate::ResetValue for super::OUTINIT {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Channel 0 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH0OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH0OI_A> for bool {
#[inline(always)]
fn from(variant: CH0OI_A) -> Self {
match variant {
CH0OI_A::_0 => false,
CH0OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH0OI`"]
pub type CH0OI_R = crate::R<bool, CH0OI_A>;
impl CH0OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH0OI_A {
match self.bits {
false => CH0OI_A::_0,
true => CH0OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH0OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH0OI_A::_1
}
}
#[doc = "Write proxy for field `CH0OI`"]
pub struct CH0OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH0OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH0OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH0OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH0OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);
self.w
}
}
#[doc = "Channel 1 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH1OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH1OI_A> for bool {
#[inline(always)]
fn from(variant: CH1OI_A) -> Self {
match variant {
CH1OI_A::_0 => false,
CH1OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH1OI`"]
pub type CH1OI_R = crate::R<bool, CH1OI_A>;
impl CH1OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH1OI_A {
match self.bits {
false => CH1OI_A::_0,
true => CH1OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH1OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH1OI_A::_1
}
}
#[doc = "Write proxy for field `CH1OI`"]
pub struct CH1OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH1OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH1OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH1OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH1OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Channel 2 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH2OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH2OI_A> for bool {
#[inline(always)]
fn from(variant: CH2OI_A) -> Self {
match variant {
CH2OI_A::_0 => false,
CH2OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH2OI`"]
pub type CH2OI_R = crate::R<bool, CH2OI_A>;
impl CH2OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH2OI_A {
match self.bits {
false => CH2OI_A::_0,
true => CH2OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH2OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH2OI_A::_1
}
}
#[doc = "Write proxy for field `CH2OI`"]
pub struct CH2OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH2OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH2OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH2OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH2OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Channel 3 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH3OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH3OI_A> for bool {
#[inline(always)]
fn from(variant: CH3OI_A) -> Self {
match variant {
CH3OI_A::_0 => false,
CH3OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH3OI`"]
pub type CH3OI_R = crate::R<bool, CH3OI_A>;
impl CH3OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH3OI_A {
match self.bits {
false => CH3OI_A::_0,
true => CH3OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH3OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH3OI_A::_1
}
}
#[doc = "Write proxy for field `CH3OI`"]
pub struct CH3OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH3OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH3OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH3OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH3OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Channel 4 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH4OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH4OI_A> for bool {
#[inline(always)]
fn from(variant: CH4OI_A) -> Self {
match variant {
CH4OI_A::_0 => false,
CH4OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH4OI`"]
pub type CH4OI_R = crate::R<bool, CH4OI_A>;
impl CH4OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH4OI_A {
match self.bits {
false => CH4OI_A::_0,
true => CH4OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH4OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH4OI_A::_1
}
}
#[doc = "Write proxy for field `CH4OI`"]
pub struct CH4OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH4OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH4OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH4OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH4OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Channel 5 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH5OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH5OI_A> for bool {
#[inline(always)]
fn from(variant: CH5OI_A) -> Self {
match variant {
CH5OI_A::_0 => false,
CH5OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH5OI`"]
pub type CH5OI_R = crate::R<bool, CH5OI_A>;
impl CH5OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH5OI_A {
match self.bits {
false => CH5OI_A::_0,
true => CH5OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH5OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH5OI_A::_1
}
}
#[doc = "Write proxy for field `CH5OI`"]
pub struct CH5OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH5OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH5OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH5OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH5OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Channel 6 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH6OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH6OI_A> for bool {
#[inline(always)]
fn from(variant: CH6OI_A) -> Self {
match variant {
CH6OI_A::_0 => false,
CH6OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH6OI`"]
pub type CH6OI_R = crate::R<bool, CH6OI_A>;
impl CH6OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH6OI_A {
match self.bits {
false => CH6OI_A::_0,
true => CH6OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH6OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH6OI_A::_1
}
}
#[doc = "Write proxy for field `CH6OI`"]
pub struct CH6OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH6OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH6OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH6OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH6OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Channel 7 Output Initialization Value\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum CH7OI_A {
#[doc = "0: The initialization value is 0."]
_0,
#[doc = "1: The initialization value is 1."]
_1,
}
impl From<CH7OI_A> for bool {
#[inline(always)]
fn from(variant: CH7OI_A) -> Self {
match variant {
CH7OI_A::_0 => false,
CH7OI_A::_1 => true,
}
}
}
#[doc = "Reader of field `CH7OI`"]
pub type CH7OI_R = crate::R<bool, CH7OI_A>;
impl CH7OI_R {
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> CH7OI_A {
match self.bits {
false => CH7OI_A::_0,
true => CH7OI_A::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline(always)]
pub fn is_0(&self) -> bool {
*self == CH7OI_A::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline(always)]
pub fn is_1(&self) -> bool {
*self == CH7OI_A::_1
}
}
#[doc = "Write proxy for field `CH7OI`"]
pub struct CH7OI_W<'a> {
w: &'a mut W,
}
impl<'a> CH7OI_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: CH7OI_A) -> &'a mut W {
{
self.bit(variant.into())
}
}
#[doc = "The initialization value is 0."]
#[inline(always)]
pub fn _0(self) -> &'a mut W {
self.variant(CH7OI_A::_0)
}
#[doc = "The initialization value is 1."]
#[inline(always)]
pub fn _1(self) -> &'a mut W {
self.variant(CH7OI_A::_1)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
impl R {
#[doc = "Bit 0 - Channel 0 Output Initialization Value"]
#[inline(always)]
pub fn ch0oi(&self) -> CH0OI_R {
CH0OI_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Channel 1 Output Initialization Value"]
#[inline(always)]
pub fn ch1oi(&self) -> CH1OI_R {
CH1OI_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - Channel 2 Output Initialization Value"]
#[inline(always)]
pub fn ch2oi(&self) -> CH2OI_R {
CH2OI_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Channel 3 Output Initialization Value"]
#[inline(always)]
pub fn ch3oi(&self) -> CH3OI_R {
CH3OI_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Channel 4 Output Initialization Value"]
#[inline(always)]
pub fn ch4oi(&self) -> CH4OI_R {
CH4OI_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Channel 5 Output Initialization Value"]
#[inline(always)]
pub fn ch5oi(&self) -> CH5OI_R {
CH5OI_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Channel 6 Output Initialization Value"]
#[inline(always)]
pub fn ch6oi(&self) -> CH6OI_R {
CH6OI_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Channel 7 Output Initialization Value"]
#[inline(always)]
pub fn ch7oi(&self) -> CH7OI_R {
CH7OI_R::new(((self.bits >> 7) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Channel 0 Output Initialization Value"]
#[inline(always)]
pub fn ch0oi(&mut self) -> CH0OI_W {
CH0OI_W { w: self }
}
#[doc = "Bit 1 - Channel 1 Output Initialization Value"]
#[inline(always)]
pub fn ch1oi(&mut self) -> CH1OI_W {
CH1OI_W { w: self }
}
#[doc = "Bit 2 - Channel 2 Output Initialization Value"]
#[inline(always)]
pub fn ch2oi(&mut self) -> CH2OI_W {
CH2OI_W { w: self }
}
#[doc = "Bit 3 - Channel 3 Output Initialization Value"]
#[inline(always)]
pub fn ch3oi(&mut self) -> CH3OI_W {
CH3OI_W { w: self }
}
#[doc = "Bit 4 - Channel 4 Output Initialization Value"]
#[inline(always)]
pub fn ch4oi(&mut self) -> CH4OI_W {
CH4OI_W { w: self }
}
#[doc = "Bit 5 - Channel 5 Output Initialization Value"]
#[inline(always)]
pub fn ch5oi(&mut self) -> CH5OI_W {
CH5OI_W { w: self }
}
#[doc = "Bit 6 - Channel 6 Output Initialization Value"]
#[inline(always)]
pub fn ch6oi(&mut self) -> CH6OI_W {
CH6OI_W { w: self }
}
#[doc = "Bit 7 - Channel 7 Output Initialization Value"]
#[inline(always)]
pub fn ch7oi(&mut self) -> CH7OI_W {
CH7OI_W { w: self }
}
}
| 27.718447 | 84 | 0.532349 |
03c9033ed33e088b8df2d9a4d211dd5ab4fb6a7e | 3,245 | use endpoint_plugin::{
AssetNode,
FileNode,
IAssetNode,
IFileNode,
};
use grapl_graph_descriptions::graph_description::*;
use serde::{
Deserialize,
Serialize,
};
use super::from_str;
use crate::parsers::OSQueryAction;
#[derive(Serialize, Deserialize, Debug, Clone, Hash)]
#[serde(rename_all = "camelCase")]
pub struct FileEvent {
host_identifier: String,
calendar_time: String,
unix_time: u64,
action: OSQueryAction,
columns: FileEventColumns,
}
/// See https://osquery.io/schema/4.5.0/#processes
#[derive(Serialize, Deserialize, Debug, Clone, Hash)]
pub struct FileEventColumns {
target_path: String,
action: OSQueryFileAction,
inode: String,
md5: String,
sha1: String,
sha256: String,
size: String,
#[serde(deserialize_with = "from_str")]
time: u64,
}
#[derive(Serialize, Deserialize, Debug, Clone, Hash)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub(crate) enum OSQueryFileAction {
Accessed,
AttributesModified,
Updated,
Created,
Deleted,
MovedFrom,
MovedTo,
Opened,
}
impl From<FileEvent> for GraphDescription {
#[tracing::instrument]
fn from(file_event: FileEvent) -> Self {
tracing::trace!(message = "Building Graph from FileEvent.");
let mut graph = GraphDescription::new();
let mut asset = AssetNode::new(AssetNode::static_strategy());
asset
.with_asset_id(file_event.host_identifier.clone())
.with_hostname(file_event.host_identifier.clone());
let mut subject_file = FileNode::new(FileNode::session_strategy());
subject_file
.with_asset_id(file_event.host_identifier.clone())
.with_file_path(file_event.columns.target_path.clone())
.with_last_seen_timestamp(file_event.columns.time);
/*
Technically this might not be 100% correct but the moved_to and moved_from events
seem like they could easily be represented by using create/deletes.
*/
match &file_event.columns.action {
OSQueryFileAction::Created | OSQueryFileAction::MovedFrom => {
subject_file.with_created_timestamp(file_event.columns.time)
}
OSQueryFileAction::Deleted | OSQueryFileAction::MovedTo => {
subject_file.with_deleted_timestamp(file_event.columns.time)
}
_ => subject_file.with_last_seen_timestamp(file_event.columns.time),
};
graph.add_edge(
"files_on_asset",
asset.clone_node_key(),
subject_file.clone_node_key(),
);
graph.add_node(asset);
graph.add_node(subject_file);
graph
}
}
#[cfg(test)]
mod tests {
use crate::parsers::OSQueryEvent;
#[test]
fn parse_pack_grapl_files_json() {
let test_json = std::fs::read_to_string("sample_data/unit/pack_grapl_files.json")
.expect("unable to read test file.");
let event: OSQueryEvent =
serde_json::from_str(&test_json).expect("serde_json::from_str failed.");
match event {
OSQueryEvent::File(_) => {}
_ => panic!("expected OSQueryEvent::File"),
};
}
}
| 28.217391 | 92 | 0.641294 |
1d3270959e2746a36233de2fd877a396a1b686a5 | 832 | //! Fault Mask Register
#[cfg(cortex_m)]
use core::arch::asm;
/// All exceptions are ...
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum Faultmask {
/// Active
Active,
/// Inactive, expect for NMI
Inactive,
}
impl Faultmask {
/// All exceptions are active
#[inline]
pub fn is_active(self) -> bool {
self == Faultmask::Active
}
/// All exceptions, except for NMI, are inactive
#[inline]
pub fn is_inactive(self) -> bool {
self == Faultmask::Inactive
}
}
/// Reads the CPU register
#[cfg(cortex_m)]
#[inline]
pub fn read() -> Faultmask {
let r: u32;
unsafe { asm!("mrs {}, FAULTMASK", out(reg) r, options(nomem, nostack, preserves_flags)) };
if r & (1 << 0) == (1 << 0) {
Faultmask::Inactive
} else {
Faultmask::Active
}
}
| 20.292683 | 95 | 0.576923 |
9148c93107f1c3b7550f1cfdbf55e0f5082b0b62 | 624 | use std::io;
use std::io::*;
pub fn pause() {
let mut input = String::new();
print!("\x1b[1;36mPress enter to continue: \x1b[0m");
io::stdout().flush().unwrap();
io::stdin().read_line(&mut input).expect("error: unable to read user input");
}
pub fn read(display: &str) -> String {
let mut input = String::new();
print!("\x1b[1;33m{}\x1b[0m",display);
io::stdout().flush().unwrap();
io::stdin().read_line(&mut input).expect("error: unable to read user input");
print!("\x1B[2J\x1B[1;1H");
println!("\x1b[1;35mExpression\x1b[0m \x1b[1;33m{}\x1b[0m \x1b[1;35m->\x1b[0m",&input.trim());
return input;
}
| 32.842105 | 96 | 0.626603 |
339e7dca666a4f552430cadfee00156342ea688f | 1,244 | use std::error::Error;
use std::fs::{create_dir, read, write};
use std::path::Path;
use std::process::Command;
pub fn populate_ssh_directory(
directory: String,
host_pub_key: Option<String>,
) -> Result<(), Box<dyn Error>> {
let pub_key_filename = format!("{}/id_rsa.pub", directory);
if !Path::new(&pub_key_filename).exists() {
create_dir(&directory)?;
let _ = Command::new("ssh-keygen")
.arg("-b")
.arg("2048")
.arg("-t")
.arg("rsa")
.arg("-f")
.arg(&format!("{}/id_rsa", directory))
.arg("-q")
.arg("-N")
.arg("")
.output()?;
}
let mut pub_key = read(pub_key_filename)?;
let mut authorized_keys_content = vec![];
if let Some(host_pub_key) = host_pub_key {
authorized_keys_content.append(&mut host_pub_key.into_bytes());
}
authorized_keys_content.extend("\n".to_string().into_bytes());
authorized_keys_content.append(&mut pub_key);
authorized_keys_content.extend("\n".to_string().into_bytes());
let authorized_keys_filename = format!("{}/authorized_keys", directory);
write(&authorized_keys_filename, authorized_keys_content)?;
Ok(())
}
| 33.621622 | 76 | 0.596463 |
90dd5f67a05b6059e4ef9b700f52586e7f87a7d6 | 3,458 | #[doc = "Register `PDMA_DSCT5_SA` reader"]
pub struct R(crate::R<PDMA_DSCT5_SA_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PDMA_DSCT5_SA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<PDMA_DSCT5_SA_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<PDMA_DSCT5_SA_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PDMA_DSCT5_SA` writer"]
pub struct W(crate::W<PDMA_DSCT5_SA_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PDMA_DSCT5_SA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<PDMA_DSCT5_SA_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<PDMA_DSCT5_SA_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `SA` reader - PDMA Transfer Source Address\nThis field indicates a 32-bit source address of PDMA controller."]
pub struct SA_R(crate::FieldReader<u32, u32>);
impl SA_R {
pub(crate) fn new(bits: u32) -> Self {
SA_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SA_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SA` writer - PDMA Transfer Source Address\nThis field indicates a 32-bit source address of PDMA controller."]
pub struct SA_W<'a> {
w: &'a mut W,
}
impl<'a> SA_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | (value as u32 & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31 - PDMA Transfer Source Address This field indicates a 32-bit source address of PDMA controller."]
#[inline(always)]
pub fn sa(&self) -> SA_R {
SA_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31 - PDMA Transfer Source Address This field indicates a 32-bit source address of PDMA controller."]
#[inline(always)]
pub fn sa(&mut self) -> SA_W {
SA_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "Source Address Register of PDMA Channel n\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pdma_dsct5_sa](index.html) module"]
pub struct PDMA_DSCT5_SA_SPEC;
impl crate::RegisterSpec for PDMA_DSCT5_SA_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [pdma_dsct5_sa::R](R) reader structure"]
impl crate::Readable for PDMA_DSCT5_SA_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [pdma_dsct5_sa::W](W) writer structure"]
impl crate::Writable for PDMA_DSCT5_SA_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PDMA_DSCT5_SA to value 0"]
impl crate::Resettable for PDMA_DSCT5_SA_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 33.572816 | 435 | 0.632157 |
8796ad4ee20e07c0ec951a8a7e07e5ffc5504b61 | 2,668 | use structopt::StructOpt;
use catalyst_toolbox::vote_check::{CheckNode, Error};
use jormungandr_lib::interfaces::VotePlanStatus;
use std::fs::File;
use std::path::PathBuf;
/// Verify that your votes were correctly tallied.
///
/// Requires Jormungandr to be installed in the system
#[derive(Debug, PartialEq, StructOpt)]
#[structopt(rename_all = "kebab-case")]
pub struct VoteCheck {
/// Path to folder containing the full blockchain history saved in Jormungandr
/// storage format.
#[structopt(short, long, parse(from_os_str))]
blockchain: PathBuf,
/// Genesis block hash
#[structopt(short, long)]
genesis_block_hash: String,
/// Ids of the transactions to check
#[structopt(short, long)]
transactions: Vec<String>,
/// Path to the expected results of the election, in Json format as returned by the /vote/active/plans endpoint
#[structopt(short, long)]
expected_results: PathBuf,
/// Path to the Jormungandr binary. If not provided, will look for 'jormungandr' in PATH
#[structopt(short, long)]
jormungandr_bin: Option<PathBuf>,
}
impl VoteCheck {
/// Vote verification follows this plan:
/// * Start a new node with the storage containing the full blockchain history to validate
/// that all ledger operations.
/// * Check that the election results obtained are the same as provided
/// * Check that the transactions containing your votes were indeed included in a block
/// in the main chain
///
pub fn exec(self) -> Result<(), Error> {
let node = CheckNode::spawn(
self.blockchain.clone(),
self.genesis_block_hash.clone(),
self.jormungandr_bin,
)?;
let expected_results: Vec<VotePlanStatus> =
serde_json::from_reader(File::open(self.expected_results)?)?;
let actual_results = node.active_vote_plans()?;
for vote_plan in expected_results {
if !actual_results.contains(&vote_plan) {
return Err(Error::ResultsDoNotMatch {
// no reason for failure when serializing to json a struct without a map
expected: serde_json::to_string_pretty(&vote_plan).unwrap(),
actual: actual_results
.iter()
.find(|act| act.id == vote_plan.id)
.map(|act| serde_json::to_string_pretty(act).unwrap())
.unwrap_or_default(),
});
}
}
node.check_transactions_on_chain(self.transactions)?;
println!("Vote(s) correctly validated!");
Ok(())
}
}
| 36.054054 | 115 | 0.628936 |
91c1ad9d926e722fe151af65b9feac68eaeaec4b | 6,301 | /*
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
use http::uri::{Authority, InvalidUri, Uri};
use std::borrow::Cow;
use std::str::FromStr;
/// API Endpoint
///
/// This implements an API endpoint as specified in the
/// [Smithy Endpoint Specification](https://awslabs.github.io/smithy/1.0/spec/core/endpoint-traits.html)
#[derive(Clone, Debug)]
pub struct Endpoint {
uri: http::Uri,
/// If true, endpointPrefix does ignored when setting the endpoint on a request
immutable: bool,
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct EndpointPrefix(String);
impl EndpointPrefix {
pub fn new(prefix: impl Into<String>) -> Result<Self, InvalidUri> {
let prefix = prefix.into();
let _ = Authority::from_str(&prefix)?;
Ok(EndpointPrefix(prefix))
}
pub fn as_str(&self) -> &str {
&self.0
}
}
#[non_exhaustive]
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum InvalidEndpoint {
EndpointMustHaveAuthority,
}
impl Endpoint {
/// Create a new endpoint from a URI
///
/// Certain protocols will attempt to prefix additional information onto an endpoint. If you
/// wish to ignore these prefixes (for example, when communicating with localhost), set `immutable` to `true`.
pub fn mutable(uri: Uri) -> Self {
Endpoint {
uri,
immutable: false,
}
}
/// Create a new immutable endpoint from a URI
///
/// ```rust
/// # use smithy_http::endpoint::Endpoint;
/// use http::Uri;
/// let endpoint = Endpoint::immutable(Uri::from_static("http://localhost:8000"));
/// ```
pub fn immutable(uri: Uri) -> Self {
Endpoint {
uri,
immutable: true,
}
}
/// Sets the endpoint on `uri`, potentially applying the specified `prefix` in the process.
pub fn set_endpoint(&self, uri: &mut http::Uri, prefix: Option<&EndpointPrefix>) {
let prefix = prefix.map(|p| p.0.as_str()).unwrap_or("");
let authority = self
.uri
.authority()
.as_ref()
.map(|auth| auth.as_str())
.unwrap_or("");
let authority = if !self.immutable && !prefix.is_empty() {
Authority::from_str(&format!("{}{}", prefix, authority)).expect("parts must be valid")
} else {
Authority::from_str(authority).expect("authority is valid")
};
let scheme = *self.uri.scheme().as_ref().expect("scheme must be provided");
let new_uri = Uri::builder()
.authority(authority)
.scheme(scheme.clone())
.path_and_query(Self::merge_paths(&self.uri, &uri).as_ref())
.build()
.expect("valid uri");
*uri = new_uri;
}
fn merge_paths<'a>(endpoint: &'a Uri, uri: &'a Uri) -> Cow<'a, str> {
if let Some(query) = endpoint.path_and_query().and_then(|pq| pq.query()) {
tracing::warn!(query = %query, "query specified in endpoint will be ignored during endpoint resolution");
}
let endpoint_path = endpoint.path();
let uri_path_and_query = uri.path_and_query().map(|pq| pq.as_str()).unwrap_or("");
if endpoint_path.is_empty() {
Cow::Borrowed(uri_path_and_query)
} else {
let ep_no_slash = endpoint_path.strip_suffix("/").unwrap_or(endpoint_path);
let uri_path_no_slash = uri_path_and_query
.strip_prefix("/")
.unwrap_or(uri_path_and_query);
Cow::Owned(format!("{}/{}", ep_no_slash, uri_path_no_slash))
}
}
}
#[cfg(test)]
mod test {
use crate::endpoint::{Endpoint, EndpointPrefix};
use http::Uri;
#[test]
fn prefix_endpoint() {
let ep = Endpoint::mutable(Uri::from_static("https://us-east-1.dynamo.amazonaws.com"));
let mut uri = Uri::from_static("/list_tables?k=v");
ep.set_endpoint(
&mut uri,
Some(&EndpointPrefix::new("subregion.").expect("valid prefix")),
);
assert_eq!(
uri,
Uri::from_static("https://subregion.us-east-1.dynamo.amazonaws.com/list_tables?k=v")
);
}
#[test]
fn prefix_endpoint_custom_port() {
let ep = Endpoint::mutable(Uri::from_static(
"https://us-east-1.dynamo.amazonaws.com:6443",
));
let mut uri = Uri::from_static("/list_tables?k=v");
ep.set_endpoint(
&mut uri,
Some(&EndpointPrefix::new("subregion.").expect("valid prefix")),
);
assert_eq!(
uri,
Uri::from_static(
"https://subregion.us-east-1.dynamo.amazonaws.com:6443/list_tables?k=v"
)
);
}
#[test]
fn prefix_immutable_endpoint() {
let ep = Endpoint::immutable(Uri::from_static("https://us-east-1.dynamo.amazonaws.com"));
let mut uri = Uri::from_static("/list_tables?k=v");
ep.set_endpoint(
&mut uri,
Some(&EndpointPrefix::new("subregion.").expect("valid prefix")),
);
assert_eq!(
uri,
Uri::from_static("https://us-east-1.dynamo.amazonaws.com/list_tables?k=v")
);
}
#[test]
fn endpoint_with_path() {
for uri in &[
// check that trailing slashes are properly normalized
"https://us-east-1.dynamo.amazonaws.com/private",
"https://us-east-1.dynamo.amazonaws.com/private/",
] {
let ep = Endpoint::immutable(Uri::from_static(uri));
let mut uri = Uri::from_static("/list_tables?k=v");
ep.set_endpoint(
&mut uri,
Some(&EndpointPrefix::new("subregion.").expect("valid prefix")),
);
assert_eq!(
uri,
Uri::from_static("https://us-east-1.dynamo.amazonaws.com/private/list_tables?k=v")
);
}
}
#[test]
fn set_endpoint_empty_path() {
let ep = Endpoint::immutable(Uri::from_static("http://localhost:8000"));
let mut uri = Uri::from_static("/");
ep.set_endpoint(&mut uri, None);
assert_eq!(uri, Uri::from_static("http://localhost:8000/"))
}
}
| 33.338624 | 117 | 0.573401 |
221bf57de3aa3998816db44ea02b133b5826fa12 | 5,499 | #![cfg(feature = "yaml")]
use clap::{load_yaml, App, ErrorKind, ValueHint};
#[test]
fn create_app_from_yaml() {
let yaml = load_yaml!("fixtures/app.yaml");
App::from(yaml);
}
// TODO: Uncomment to test yaml with 2 spaces https://github.com/chyh1990/yaml-rust/issues/101
// #[test]
// fn create_app_from_yaml_2spaces() {
// let yaml = load_yaml!("fixtures/app_2space.yaml");
// App::from(yaml);
// }
#[test]
fn help_message() {
let yaml = load_yaml!("fixtures/app.yaml");
let mut app = App::from(yaml);
// Generate the full help message!
let _ = app.try_get_matches_from_mut(Vec::<String>::new());
let mut help_buffer = Vec::new();
app.write_help(&mut help_buffer).unwrap();
let help_string = String::from_utf8(help_buffer).unwrap();
assert!(help_string
.contains("-h, --help prints help with a nonstandard description\n"));
}
#[test]
fn author() {
let yaml = load_yaml!("fixtures/app.yaml");
let mut app = App::from(yaml);
// Generate the full help message!
let _ = app.try_get_matches_from_mut(Vec::<String>::new());
let mut help_buffer = Vec::new();
app.write_help(&mut help_buffer).unwrap();
let help_string = String::from_utf8(help_buffer).unwrap();
assert!(help_string.contains("Kevin K. <[email protected]>"));
}
#[test]
fn app_settings() {
let yaml = load_yaml!("fixtures/app.yaml");
let app = App::from(yaml);
let m = app.try_get_matches_from(vec!["prog"]);
assert!(m.is_err());
assert_eq!(
m.unwrap_err().kind,
ErrorKind::DisplayHelpOnMissingArgumentOrSubcommand
);
}
#[test]
#[should_panic = "Unknown AppSetting 'random' found in YAML file for app"]
fn app_setting_invalid() {
let yaml = load_yaml!("fixtures/app_setting_invalid.yaml");
App::from(yaml);
}
#[test]
#[should_panic = "Unknown ArgSetting 'random' found in YAML file for arg 'option'"]
fn arg_setting_invalid() {
let yaml = load_yaml!("fixtures/arg_setting_invalid.yaml");
App::from(yaml);
}
// ValueHint must be parsed correctly from Yaml
#[test]
fn value_hint() {
let yml = load_yaml!("fixtures/app.yaml");
let app = App::from(yml);
let arg = app
.get_arguments()
.find(|a| a.get_name() == "value_hint")
.unwrap();
assert_eq!(arg.get_value_hint(), ValueHint::FilePath);
}
#[test]
fn default_value_if_not_triggered_by_argument() {
let yml = load_yaml!("fixtures/app.yaml");
let app = App::from(yml);
// Fixtures use "other" as value
let matches = app.try_get_matches_from(vec!["prog", "wrong"]).unwrap();
assert!(matches.value_of("positional2").is_none());
}
#[test]
fn default_value_if_triggered_by_matching_argument() {
let yml = load_yaml!("fixtures/app.yaml");
let app = App::from(yml);
let matches = app.try_get_matches_from(vec!["prog", "other"]).unwrap();
assert_eq!(matches.value_of("positional2").unwrap(), "something");
}
#[test]
fn default_value_if_triggered_by_flag() {
let yml = load_yaml!("fixtures/app.yaml");
let app = App::from(yml);
let matches = app
.try_get_matches_from(vec!["prog", "--flag", "flagvalue"])
.unwrap();
assert_eq!(matches.value_of("positional2").unwrap(), "some");
}
#[test]
fn default_value_if_triggered_by_flag_and_argument() {
let yml = load_yaml!("fixtures/app.yaml");
let app = App::from(yml);
let matches = app
.try_get_matches_from(vec!["prog", "--flag", "flagvalue", "other"])
.unwrap();
// First condition triggers, therefore "some"
assert_eq!(matches.value_of("positional2").unwrap(), "some");
}
#[test]
fn yaml_multiple_occurrences() {
let yaml = load_yaml!("fixtures/app.yaml");
let matches = App::from(yaml)
.try_get_matches_from(vec!["prog", "-vvv"])
.unwrap();
assert_eq!(matches.occurrences_of("verbose"), 3);
}
#[test]
fn yaml_multiple_values() {
let yaml = load_yaml!("fixtures/app.yaml");
let matches = App::from(yaml)
.try_get_matches_from(vec!["prog", "-s", "aaa", "bbb"])
.unwrap();
assert_eq!(
matches
.values_of("settings")
.unwrap()
.collect::<Vec<&str>>(),
vec!["aaa", "bbb"]
);
}
#[cfg(feature = "regex")]
#[test]
fn regex_with_invalid_string() {
let yml = load_yaml!("fixtures/app_regex.yaml");
let app = App::from(yml);
let res = app.try_get_matches_from(vec!["prog", "not a proper filter"]);
assert!(res.is_err());
}
#[cfg(feature = "regex")]
#[test]
fn regex_with_valid_string() {
let yml = load_yaml!("fixtures/app_regex.yaml");
let app = App::from(yml);
let matches = app.try_get_matches_from(vec!["prog", "*.txt"]).unwrap();
assert_eq!(matches.value_of("filter").unwrap(), "*.txt");
}
#[cfg(feature = "regex")]
#[test]
#[should_panic]
fn regex_with_invalid_yaml() {
let yml = load_yaml!("fixtures/app_regex_invalid.yaml");
App::from(yml);
}
#[test]
fn extra_fields() {
let yml = load_yaml!("fixtures/extra_fields.yaml");
App::from(yml);
}
#[test]
#[should_panic = "Unknown setting 'random' in YAML file for arg 'option'"]
fn extra_fields_invalid_arg() {
let yml = load_yaml!("fixtures/extra_fields_invalid_arg.yaml");
App::from(yml);
}
#[test]
#[should_panic = "Unknown setting 'random' in YAML file for subcommand 'info'"]
fn extra_fields_invalid_app() {
let yml = load_yaml!("fixtures/extra_fields_invalid_app.yaml");
App::from(yml);
}
| 27.08867 | 96 | 0.641026 |
e596057dc4fbf797f5ad3e137cf5fc1ac27ea2cc | 4,884 | use crate::test;
use std::io::Read;
use super::super::*;
#[test]
fn header_passing() {
test::set_handler("/header_passing", |unit| {
assert!(unit.has("X-Foo"));
assert_eq!(unit.header("X-Foo").unwrap(), "bar");
test::make_response(200, "OK", vec!["X-Bar: foo"], vec![])
});
let resp = get("test://host/header_passing").set("X-Foo", "bar").call();
assert_eq!(resp.status(), 200);
assert!(resp.has("X-Bar"));
assert_eq!(resp.header("X-Bar").unwrap(), "foo");
}
#[test]
fn repeat_non_x_header() {
test::set_handler("/repeat_non_x_header", |unit| {
assert!(unit.has("Accept"));
assert_eq!(unit.header("Accept").unwrap(), "baz");
test::make_response(200, "OK", vec![], vec![])
});
let resp = get("test://host/repeat_non_x_header")
.set("Accept", "bar")
.set("Accept", "baz")
.call();
assert_eq!(resp.status(), 200);
}
#[test]
fn repeat_x_header() {
test::set_handler("/repeat_x_header", |unit| {
assert!(unit.has("X-Forwarded-For"));
assert_eq!(unit.header("X-Forwarded-For").unwrap(), "130.240.19.2");
assert_eq!(
unit.all("X-Forwarded-For"),
vec!["130.240.19.2", "130.240.19.3"]
);
test::make_response(200, "OK", vec![], vec![])
});
let resp = get("test://host/repeat_x_header")
.set("X-Forwarded-For", "130.240.19.2")
.set("X-Forwarded-For", "130.240.19.3")
.call();
assert_eq!(resp.status(), 200);
}
#[test]
fn body_as_text() {
test::set_handler("/body_as_text", |_unit| {
test::make_response(200, "OK", vec![], "Hello World!".to_string().into_bytes())
});
let resp = get("test://host/body_as_text").call();
let text = resp.into_string().unwrap();
assert_eq!(text, "Hello World!");
}
#[test]
#[cfg(feature = "json")]
fn body_as_json() {
test::set_handler("/body_as_json", |_unit| {
test::make_response(
200,
"OK",
vec![],
"{\"hello\":\"world\"}".to_string().into_bytes(),
)
});
let resp = get("test://host/body_as_json").call();
let json = resp.into_json().unwrap();
assert_eq!(json["hello"], "world");
}
#[test]
fn body_as_reader() {
test::set_handler("/body_as_reader", |_unit| {
test::make_response(200, "OK", vec![], "abcdefgh".to_string().into_bytes())
});
let resp = get("test://host/body_as_reader").call();
let mut reader = resp.into_reader();
let mut text = String::new();
reader.read_to_string(&mut text).unwrap();
assert_eq!(text, "abcdefgh");
}
#[test]
fn escape_path() {
test::set_handler("/escape_path%20here", |_unit| {
test::make_response(200, "OK", vec![], vec![])
});
let resp = get("test://host/escape_path here").call();
let vec = resp.to_write_vec();
let s = String::from_utf8_lossy(&vec);
assert!(s.contains("GET /escape_path%20here HTTP/1.1"))
}
#[test]
fn request_debug() {
let req = get("/my/page")
.set("Authorization", "abcdef")
.set("Content-Length", "1234")
.set("Content-Type", "application/json")
.build();
let s = format!("{:?}", req);
assert_eq!(
s,
"Request(GET /my/page, [Authorization: abcdef, \
Content-Length: 1234, Content-Type: application/json])"
);
let req = get("/my/page?q=z")
.query("foo", "bar baz")
.set("Authorization", "abcdef")
.build();
let s = format!("{:?}", req);
assert_eq!(
s,
"Request(GET /my/page?q=z&foo=bar%20baz, [Authorization: abcdef])"
);
}
#[test]
fn non_ascii_header() {
test::set_handler("/non_ascii_header", |_unit| {
test::make_response(200, "OK", vec!["Wörse: Hädör"], vec![])
});
let resp = get("test://host/non_ascii_header")
.set("Bäd", "Headör")
.call();
// surprisingly, this is ok, because this lib is not about enforcing standards.
assert!(resp.ok());
assert_eq!(resp.status(), 200);
assert_eq!(resp.status_text(), "OK");
}
#[test]
pub fn no_status_text() {
// this one doesn't return the status text
// let resp = get("https://www.okex.com/api/spot/v3/products")
test::set_handler("/no_status_text", |_unit| {
test::make_response(200, "", vec![], vec![])
});
let resp = get("test://host/no_status_text").call();
assert!(resp.ok());
assert_eq!(resp.status(), 200);
}
#[test]
pub fn header_with_spaces_before_value() {
test::set_handler("/space_before_value", |unit| {
assert!(unit.has("X-Test"));
assert_eq!(unit.header("X-Test").unwrap(), "value");
test::make_response(200, "OK", vec![], vec![])
});
let resp = get("test://host/space_before_value")
.set("X-Test", " value")
.call();
assert_eq!(resp.status(), 200);
}
| 29.245509 | 87 | 0.562449 |
1adde54cae19cf54e9648d4ab636ea56ce0729cf | 4,350 | use yaml_rust::Yaml;
#[derive(Debug, PartialEq)]
pub enum Expectation {
Rows(u16),
Latency(u16),
Regex(String), // Still not properly implemented.
}
pub fn parse_expectations(doc: &Yaml) -> Vec<Expectation> {
let mut expectations: Vec<Expectation> = Vec::new();
for current_expectation in doc["mysql"]["expectation"].as_vec().unwrap() {
match current_expectation["type"].as_str().as_ref() {
Some(&"ROWS") => expectations.push(Expectation::Rows(
current_expectation["value"].as_i64().unwrap() as u16,
)),
Some(&"LATENCY") => expectations.push(Expectation::Latency(
current_expectation["threshold"].as_i64().unwrap() as u16,
)),
Some(&"REGEX") => expectations.push(Expectation::Regex(
current_expectation["regex"].as_str().unwrap().to_owned(),
)),
Some(&_) => (),
None => (),
};
}
return expectations;
}
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
use yaml_rust::YamlLoader;
#[test]
fn test_parse_expectations_rows() {
assert_expectation(
indoc!(
"
mysql:
expectation:
- type: ROWS
value: 2
"
),
Some(Expectation::Rows(2)),
);
}
#[test]
fn test_parse_expectations_latency() {
assert_expectation(
indoc!(
"
mysql:
expectation:
- type: LATENCY
threshold: 10
"
),
Some(Expectation::Latency(10)),
);
}
#[test]
fn test_parse_expectations_regex() {
assert_expectation(
indoc!(
"
mysql:
expectation:
- type: REGEX
regex: \"foo.*\"
"
),
Some(Expectation::Regex("foo.*".to_owned())),
);
}
#[test]
fn test_parse_expectations_multiple() {
let mut expected: Vec<Expectation> = Vec::new();
expected.push(Expectation::Regex("foo.*".to_owned()));
expected.push(Expectation::Latency(10));
assert_expectations(
indoc!(
"
mysql:
expectation:
- type: REGEX
regex: \"foo.*\"
- type: LATENCY
threshold: 10
"
),
expected,
);
}
#[test]
fn test_parse_expectations_empty() {
assert_expectation(
indoc!(
"
mysql:
expectation: []
"
),
None,
);
}
#[test]
fn test_parse_expectations_unknown() {
assert_expectation(
indoc!(
"
mysql:
expectation:
- type: FOO
"
),
None,
);
}
/// Will parse the given str reference (yaml string) and verify it matches the given expected_expectation.
///
/// # Arguments
/// * `yaml_string` - The yaml string that should represent the mysql configuration, including the expectation array.
/// * `expected_expectation` - The expected Expectation enum, extracted from the yaml string.
fn assert_expectation(yaml_string: &str, expected_expectation: Option<Expectation>) {
let docs = YamlLoader::load_from_str(yaml_string).unwrap();
let expectations = parse_expectations(&docs[0]);
match expected_expectation {
None => assert!(expectations.is_empty()),
_ => assert!(expectations.contains(&expected_expectation.unwrap())),
}
}
/// Will parse the given str reference (yaml string) and verify if each one of the expectations in the vector
/// are contained inside the parsed expectations.
///
/// # Arguments
/// * `yaml_string` - The yaml string that should represent the mysql configuration, including the expectation array.
/// * `expected_expectation_vector` - The expected Expectation enum vector, extracted from the yaml string.
fn assert_expectations(yaml_string: &str, expected_expectation_vector: Vec<Expectation>) {
let docs = YamlLoader::load_from_str(yaml_string).unwrap();
let expectations = parse_expectations(&docs[0]);
for expected_expectation in expected_expectation_vector {
assert!(expectations.contains(&expected_expectation));
}
}
}
| 27.358491 | 121 | 0.566207 |
9b8eabe094e3643e287905f668246cf79ac8c687 | 2,463 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::TMR3INV {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct TMR3R {
bits: u32,
}
impl TMR3R {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _TMR3W<'a> {
w: &'a mut W,
}
impl<'a> _TMR3W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:31"]
#[inline]
pub fn tmr3(&self) -> TMR3R {
let bits = {
const MASK: u32 = 4294967295;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u32
};
TMR3R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:31"]
#[inline]
pub fn tmr3(&mut self) -> _TMR3W {
_TMR3W { w: self }
}
}
| 23.235849 | 59 | 0.490459 |
d68f8d5b459baf5a6ac17d08fc4fccc2c0deef1d | 615 | use pylon_cxx::HasProperties;
fn main() -> anyhow::Result<()> {
// Before using any pylon methods, the pylon runtime must be initialized.
let pylon = pylon_cxx::Pylon::new();
for device in pylon_cxx::TlFactory::instance(&pylon).enumerate_devices()? {
println!(
"Device {} {} -------------",
device.property_value("VendorName")?,
device.property_value("SerialNumber")?
);
for name in device.property_names()? {
let value = device.property_value(&name)?;
println!(" {}: {}", name, value);
}
}
Ok(())
}
| 30.75 | 79 | 0.55122 |
09a9a1a3f305898330e9e0428cdd3cb0fc6da6f1 | 5,687 | use serde::{Deserialize, Serialize};
use solana_sdk::fee_calculator::FeeCalculator;
use solana_sdk::hash::Hash;
use solana_sdk::timing::timestamp;
use std::collections::HashMap;
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
struct HashAge {
fee_calculator: FeeCalculator,
hash_height: u64,
timestamp: u64,
}
/// Low memory overhead, so can be cloned for every checkpoint
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BlockhashQueue {
/// updated whenever an hash is registered
hash_height: u64,
/// last hash to be registered
last_hash: Option<Hash>,
ages: HashMap<Hash, HashAge>,
/// hashes older than `max_age` will be dropped from the queue
max_age: usize,
}
impl BlockhashQueue {
pub fn new(max_age: usize) -> Self {
Self {
ages: HashMap::new(),
hash_height: 0,
last_hash: None,
max_age,
}
}
#[allow(dead_code)]
pub fn hash_height(&self) -> u64 {
self.hash_height
}
pub fn last_hash(&self) -> Hash {
self.last_hash.expect("no hash has been set")
}
pub fn get_fee_calculator(&self, hash: &Hash) -> Option<&FeeCalculator> {
self.ages.get(hash).map(|hash_age| &hash_age.fee_calculator)
}
/// Check if the age of the hash is within the max_age
/// return false for any hashes with an age above max_age
pub fn check_hash_age(&self, hash: &Hash, max_age: usize) -> bool {
let hash_age = self.ages.get(hash);
match hash_age {
Some(age) => self.hash_height - age.hash_height <= max_age as u64,
_ => false,
}
}
/// check if hash is valid
#[cfg(test)]
pub fn check_hash(&self, hash: Hash) -> bool {
self.ages.get(&hash).is_some()
}
pub fn genesis_hash(&mut self, hash: &Hash, fee_calculator: &FeeCalculator) {
self.ages.insert(
*hash,
HashAge {
fee_calculator: fee_calculator.clone(),
hash_height: 0,
timestamp: timestamp(),
},
);
self.last_hash = Some(*hash);
}
fn check_age(hash_height: u64, max_age: usize, age: &HashAge) -> bool {
hash_height - age.hash_height <= max_age as u64
}
pub fn register_hash(&mut self, hash: &Hash, fee_calculator: &FeeCalculator) {
self.hash_height += 1;
let hash_height = self.hash_height;
// this clean up can be deferred until sigs gets larger
// because we verify age.nth every place we check for validity
let max_age = self.max_age;
if self.ages.len() >= max_age {
self.ages
.retain(|_, age| Self::check_age(hash_height, max_age, age));
}
self.ages.insert(
*hash,
HashAge {
fee_calculator: fee_calculator.clone(),
hash_height,
timestamp: timestamp(),
},
);
self.last_hash = Some(*hash);
}
/// Maps a hash height to a timestamp
pub fn hash_height_to_timestamp(&self, hash_height: u64) -> Option<u64> {
for age in self.ages.values() {
if age.hash_height == hash_height {
return Some(age.timestamp);
}
}
None
}
pub fn get_recent_blockhashes(&self) -> impl Iterator<Item = (u64, &Hash)> {
(&self.ages).iter().map(|(k, v)| (v.hash_height, k))
}
}
#[cfg(test)]
mod tests {
use super::*;
use bincode::serialize;
use solana_sdk::clock::MAX_RECENT_BLOCKHASHES;
use solana_sdk::hash::hash;
#[test]
fn test_register_hash() {
let last_hash = Hash::default();
let mut hash_queue = BlockhashQueue::new(100);
assert!(!hash_queue.check_hash(last_hash));
hash_queue.register_hash(&last_hash, &FeeCalculator::default());
assert!(hash_queue.check_hash(last_hash));
assert_eq!(hash_queue.hash_height(), 1);
}
#[test]
fn test_reject_old_last_hash() {
let mut hash_queue = BlockhashQueue::new(100);
let last_hash = hash(&serialize(&0).unwrap());
for i in 0..102 {
let last_hash = hash(&serialize(&i).unwrap());
hash_queue.register_hash(&last_hash, &FeeCalculator::default());
}
// Assert we're no longer able to use the oldest hash.
assert!(!hash_queue.check_hash(last_hash));
}
/// test that when max age is 0, that a valid last_hash still passes the age check
#[test]
fn test_queue_init_blockhash() {
let last_hash = Hash::default();
let mut hash_queue = BlockhashQueue::new(100);
hash_queue.register_hash(&last_hash, &FeeCalculator::default());
assert_eq!(last_hash, hash_queue.last_hash());
assert!(hash_queue.check_hash_age(&last_hash, 0));
}
#[test]
fn test_get_recent_blockhashes() {
let mut blockhash_queue = BlockhashQueue::new(MAX_RECENT_BLOCKHASHES);
let recent_blockhashes = blockhash_queue.get_recent_blockhashes();
// Sanity-check an empty BlockhashQueue
assert_eq!(recent_blockhashes.count(), 0);
for i in 0..MAX_RECENT_BLOCKHASHES {
let hash = hash(&serialize(&i).unwrap());
blockhash_queue.register_hash(&hash, &FeeCalculator::default());
}
let recent_blockhashes = blockhash_queue.get_recent_blockhashes();
// Verify that the returned hashes are most recent
for (_slot, hash) in recent_blockhashes {
assert!(blockhash_queue.check_hash_age(hash, MAX_RECENT_BLOCKHASHES));
}
}
}
| 32.3125 | 86 | 0.605416 |
383eb0620b400b315d3d1e85be6a3e53b10d7061 | 9,188 | use std::convert::TryFrom;
use std::ops::Deref;
use std::str::FromStr;
use serde::de::Deserializer;
use serde::ser::Serializer;
use serde::{Deserialize, Serialize};
#[cfg(feature = "druid")]
use druid::Data;
use crate::error::ErrorKind;
use crate::Error;
pub static PUBLIC_OBJECT_LIBS_KEY: &str = "public.objectLibs";
/// A Plist dictionary.
pub type Plist = plist::Dictionary;
/// A color.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "druid", derive(Data))]
pub struct Color {
pub red: f32,
pub green: f32,
pub blue: f32,
pub alpha: f32,
}
impl FromStr for Color {
type Err = ErrorKind;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut iter = s.split(',').map(|v| match v.parse::<f32>() {
Ok(val) if (0.0..=1.0).contains(&val) => Ok(val),
_ => Err(ErrorKind::BadColor),
});
let red = iter.next().unwrap_or(Err(ErrorKind::BadColor))?;
let green = iter.next().unwrap_or(Err(ErrorKind::BadColor))?;
let blue = iter.next().unwrap_or(Err(ErrorKind::BadColor))?;
let alpha = iter.next().unwrap_or(Err(ErrorKind::BadColor))?;
if iter.next().is_some() {
Err(ErrorKind::BadColor)
} else {
Ok(Color { red, green, blue, alpha })
}
}
}
impl Serialize for Color {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let color_string = format!("{},{},{},{}", self.red, self.green, self.blue, self.alpha);
serializer.serialize_str(&color_string)
}
}
impl<'de> Deserialize<'de> for Color {
fn deserialize<D>(deserializer: D) -> Result<Color, D::Error>
where
D: Deserializer<'de>,
{
let string = String::deserialize(deserializer)?;
Color::from_str(&string).map_err(|_| serde::de::Error::custom("Malformed color string."))
}
}
// Types used in fontinfo.plist.
pub type Integer = i32;
pub type NonNegativeInteger = u32;
pub type Float = f64;
pub type Bitlist = Vec<u8>;
/// IntegerOrFloat represents a number that can be an integer or float. It should
/// serialize to an integer if it effectively represents one.
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct IntegerOrFloat(f64);
impl IntegerOrFloat {
pub fn new(value: f64) -> Self {
IntegerOrFloat(value)
}
pub fn get(&self) -> f64 {
self.0
}
pub fn set(&mut self, value: f64) {
self.0 = value
}
pub fn is_integer(&self) -> bool {
(self.0 - self.round()).abs() < std::f64::EPSILON
}
}
impl Deref for IntegerOrFloat {
type Target = f64;
fn deref(&self) -> &f64 {
&self.0
}
}
impl From<i32> for IntegerOrFloat {
fn from(value: i32) -> Self {
IntegerOrFloat(value as f64)
}
}
impl From<f64> for IntegerOrFloat {
fn from(value: f64) -> Self {
IntegerOrFloat(value)
}
}
impl Serialize for IntegerOrFloat {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if self.is_integer() {
serializer.serialize_i32(self.0 as i32)
} else {
serializer.serialize_f64(self.0)
}
}
}
impl<'de> Deserialize<'de> for IntegerOrFloat {
fn deserialize<D>(deserializer: D) -> Result<IntegerOrFloat, D::Error>
where
D: Deserializer<'de>,
{
let value: f64 = Deserialize::deserialize(deserializer)?;
Ok(IntegerOrFloat(value))
}
}
/// NonNegativeIntegerOrFloat represents a number that can be a NonNegative integer or float.
/// It should serialize to an integer if it effectively represents one.
#[derive(Debug, Clone, Copy, PartialEq)]
pub struct NonNegativeIntegerOrFloat(f64);
impl NonNegativeIntegerOrFloat {
pub fn new(value: f64) -> Option<Self> {
if value.is_sign_positive() {
Some(NonNegativeIntegerOrFloat(value))
} else {
None
}
}
pub fn get(&self) -> f64 {
self.0
}
pub fn try_set(&mut self, value: f64) -> Result<(), Error> {
if value.is_sign_positive() {
self.0 = value;
Ok(())
} else {
Err(Error::ExpectedPositiveValue)
}
}
pub fn is_integer(&self) -> bool {
(self.0 - self.round()).abs() < std::f64::EPSILON
}
}
impl Deref for NonNegativeIntegerOrFloat {
type Target = f64;
fn deref(&self) -> &f64 {
&self.0
}
}
impl TryFrom<i32> for NonNegativeIntegerOrFloat {
type Error = Error;
fn try_from(value: i32) -> Result<Self, Self::Error> {
match NonNegativeIntegerOrFloat::new(value as f64) {
Some(v) => Ok(v),
_ => Err(Error::ExpectedPositiveValue),
}
}
}
impl TryFrom<f64> for NonNegativeIntegerOrFloat {
type Error = Error;
fn try_from(value: f64) -> Result<Self, Self::Error> {
match NonNegativeIntegerOrFloat::new(value) {
Some(v) => Ok(v),
_ => Err(Error::ExpectedPositiveValue),
}
}
}
impl TryFrom<IntegerOrFloat> for NonNegativeIntegerOrFloat {
type Error = Error;
fn try_from(value: IntegerOrFloat) -> Result<Self, Self::Error> {
match NonNegativeIntegerOrFloat::new(*value) {
Some(v) => Ok(v),
_ => Err(Error::ExpectedPositiveValue),
}
}
}
impl Serialize for NonNegativeIntegerOrFloat {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
if self.is_integer() {
serializer.serialize_i32(self.0 as i32)
} else {
serializer.serialize_f64(self.0)
}
}
}
impl<'de> Deserialize<'de> for NonNegativeIntegerOrFloat {
fn deserialize<D>(deserializer: D) -> Result<NonNegativeIntegerOrFloat, D::Error>
where
D: Deserializer<'de>,
{
let value: f64 = Deserialize::deserialize(deserializer)?;
match NonNegativeIntegerOrFloat::try_from(value) {
Ok(v) => Ok(v),
Err(_) => Err(serde::de::Error::custom("Value must be positive.")),
}
}
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use serde_test::{assert_tokens, Token};
use crate::{Color, Guideline, Identifier, IntegerOrFloat, Line, NonNegativeIntegerOrFloat};
#[test]
fn color_parsing() {
let c1 = Color { red: 1.0, green: 0.0, blue: 0.0, alpha: 1.0 };
assert_tokens(&c1, &[Token::Str("1,0,0,1")]);
let c2 = Color { red: 0.0, green: 0.5, blue: 0.0, alpha: 0.5 };
assert_tokens(&c2, &[Token::Str("0,0.5,0,0.5")]);
}
#[test]
fn identifier_parsing() {
let valid_chars = " !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~";
assert!(Identifier::new(valid_chars).is_ok());
let i2 = Identifier::new("0aAä");
assert!(i2.is_err());
let i3 = Identifier::new("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa");
assert!(i3.is_err());
}
#[test]
fn guideline_parsing() {
let g1 = Guideline::new(
Line::Angle { x: 10.0, y: 20.0, degrees: 360.0 },
Some("hello".to_string()),
Some(Color { red: 0.0, green: 0.5, blue: 0.0, alpha: 0.5 }),
Some(Identifier::new("abcABC123").unwrap()),
None,
);
assert_tokens(
&g1,
&[
Token::Struct { name: "RawGuideline", len: 6 },
Token::Str("x"),
Token::Some,
Token::F32(10.0),
Token::Str("y"),
Token::Some,
Token::F32(20.0),
Token::Str("angle"),
Token::Some,
Token::F32(360.0),
Token::Str("name"),
Token::Some,
Token::Str("hello"),
Token::Str("color"),
Token::Some,
Token::Str("0,0.5,0,0.5"),
Token::Str("identifier"),
Token::Some,
Token::Str("abcABC123"),
Token::StructEnd,
],
);
}
#[test]
fn test_integer_or_float_type() {
let n1 = IntegerOrFloat::new(1.1);
assert_tokens(&n1, &[Token::F64(1.1)]);
let n1 = IntegerOrFloat::new(1.0);
assert_tokens(&n1, &[Token::I32(1)]);
let n1 = IntegerOrFloat::new(-1.1);
assert_tokens(&n1, &[Token::F64(-1.1)]);
let n1 = IntegerOrFloat::new(-1.0);
assert_tokens(&n1, &[Token::I32(-1)]);
let n1 = NonNegativeIntegerOrFloat::new(1.1).unwrap();
assert_tokens(&n1, &[Token::F64(1.1)]);
let n1 = NonNegativeIntegerOrFloat::new(1.0).unwrap();
assert_tokens(&n1, &[Token::I32(1)]);
}
#[test]
fn test_positive_int_or_float() {
assert!(NonNegativeIntegerOrFloat::try_from(-1.0).is_err());
let mut v = NonNegativeIntegerOrFloat::try_from(1.0).unwrap();
assert!(v.try_set(-1.0).is_err());
assert!(v.try_set(1.0).is_ok());
}
}
| 27.674699 | 138 | 0.566391 |
8a2e4074beeee9bed0427c8d7277a438c9d0798f | 2,329 | use crate::{
cart::Cart,
ppu::{control_register::IncrementAmount, vram::IVram, write_latch::LatchState},
};
use std::cell::Cell;
#[derive(Default)]
pub struct MockVram {
pub mock_addr: Cell<u8>,
pub mock_data: Cell<u8>,
pub scroll_write_called: Cell<bool>,
pub control_write_called: Cell<bool>,
pub coarse_x_increment_called: Cell<bool>,
pub fine_y_increment_called: Cell<bool>,
pub copy_horizontal_pos_to_addr_called: Cell<bool>,
pub copy_vertical_pos_to_addr_called: Cell<bool>,
}
impl MockVram {
pub fn reset_mock(&self) {
self.mock_addr.set(0);
self.mock_data.set(0);
self.scroll_write_called.set(false);
self.control_write_called.set(false);
self.coarse_x_increment_called.set(false);
self.fine_y_increment_called.set(false);
self.copy_horizontal_pos_to_addr_called.set(false);
self.copy_vertical_pos_to_addr_called.set(false);
}
}
impl IVram for MockVram {
fn write_ppu_addr(&self, latch_state: LatchState) {
let val = match latch_state {
LatchState::FirstWrite(val) => val,
LatchState::SecondWrite(val) => val,
};
self.mock_addr.set(val)
}
fn write_ppu_data<C: Cart>(&mut self, val: u8, _: IncrementAmount, _: &mut C) {
self.mock_data.set(val);
}
fn read_ppu_data<C: Cart>(&self, _: IncrementAmount, _: &C) -> u8 {
self.mock_data.get()
}
fn ppu_data<C: Cart>(&self, _: &C) -> u8 {
self.mock_data.get()
}
fn read<C: Cart>(&self, _: u16, _: &C) -> u8 {
0
}
fn read_palette(&self, _: u16) -> u8 {
0
}
fn addr(&self) -> u16 {
0
}
fn scroll_write(&self, _: LatchState) {
self.scroll_write_called.set(true)
}
fn control_write(&self, _: u8) {
self.control_write_called.set(true)
}
fn coarse_x_increment(&self) {
self.coarse_x_increment_called.set(true)
}
fn fine_y_increment(&self) {
self.fine_y_increment_called.set(true)
}
fn copy_horizontal_pos_to_addr(&self) {
self.copy_horizontal_pos_to_addr_called.set(true)
}
fn copy_vertical_pos_to_addr(&self) {
self.copy_vertical_pos_to_addr_called.set(true)
}
fn fine_x(&self) -> u8 {
0
}
}
| 24.776596 | 83 | 0.624302 |
0e85f2d2d45923188464be1be9b03520fb00c1b2 | 95,771 | //---------------------------------------------------------------------------//
// Copyright (c) 2017-2022 Ismael Gutiérrez González. All rights reserved.
//
// This file is part of the Rusted PackFile Manager (RPFM) project,
// which can be found here: https://github.com/Frodo45127/rpfm.
//
// This file is licensed under the MIT license, which can be found here:
// https://github.com/Frodo45127/rpfm/blob/master/LICENSE.
//---------------------------------------------------------------------------//
/*!
Module with the background loop.
Basically, this does the heavy load of the program.
!*/
use crossbeam::channel::Sender;
use log::info;
use open::that_in_background;
use rayon::prelude::*;
use uuid::Uuid;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::env::temp_dir;
use std::fs::File;
use std::io::{BufWriter, Read, Write};
use std::path::PathBuf;
use std::thread;
use rpfm_error::{Error, ErrorKind};
use rpfm_lib::assembly_kit::*;
use rpfm_lib::common::*;
use rpfm_lib::diagnostics::Diagnostics;
use rpfm_lib::dependencies::{Dependencies, DependenciesInfo};
use rpfm_lib::GAME_SELECTED;
use rpfm_lib::packfile::PFHFileType;
use rpfm_lib::packedfile::*;
use rpfm_lib::packedfile::animpack::AnimPack;
use rpfm_lib::packedfile::table::db::DB;
use rpfm_lib::packedfile::table::loc::{Loc, TSV_NAME_LOC};
use rpfm_lib::packedfile::text::{Text, TextType};
use rpfm_lib::packfile::{PackFile, PackFileInfo, packedfile::{PackedFile, PackedFileInfo, RawPackedFile}, PathType, PFHFlags, RESERVED_NAME_NOTES};
use rpfm_lib::schema::{*, patch::SchemaPatches};
use rpfm_lib::SCHEMA;
use rpfm_lib::SCHEMA_PATCHES;
use rpfm_lib::SETTINGS;
use rpfm_lib::SUPPORTED_GAMES;
use rpfm_lib::tips::Tips;
use crate::app_ui::NewPackedFile;
use crate::CENTRAL_COMMAND;
use crate::communications::{CentralCommand, Command, Response, THREADS_COMMUNICATION_ERROR};
use crate::locale::{tr, tre};
use crate::packedfile_views::DataSource;
use crate::RPFM_PATH;
use crate::views::table::TableType;
/// This is the background loop that's going to be executed in a parallel thread to the UI. No UI or "Unsafe" stuff here.
///
/// All communication between this and the UI thread is done use the `CENTRAL_COMMAND` static.
pub fn background_loop() {
//---------------------------------------------------------------------------------------//
// Initializing stuff...
//---------------------------------------------------------------------------------------//
// We need two PackFiles:
// - `pack_file_decoded`: This one will hold our opened PackFile.
// - `pack_files_decoded_extra`: This one will hold the PackFiles opened for the `add_from_packfile` feature, using their paths as keys.
let mut pack_file_decoded = PackFile::new();
let mut pack_files_decoded_extra = BTreeMap::new();
// Preload the default game's dependencies.
let mut dependencies = Dependencies::default();
// Load all the tips we have.
let mut tips = if let Ok(tips) = Tips::load() { tips } else { Tips::default() };
// Try to load the schema patchs. Ignore them if fails due to missing file.
if let Ok(schema_patches) = SchemaPatches::load() {
*SCHEMA_PATCHES.write().unwrap() = schema_patches;
}
//---------------------------------------------------------------------------------------//
// Looping forever and ever...
//---------------------------------------------------------------------------------------//
info!("Background Thread looping around…");
'background_loop: loop {
// Wait until you get something through the channel. This hangs the thread until we got something,
// so it doesn't use processing power until we send it a message.
let (sender, response): (Sender<Response>, Command) = CENTRAL_COMMAND.recv_background();
match response {
// Command to close the thread.
Command::Exit => return,
// In case we want to reset the PackFile to his original state (dummy)...
Command::ResetPackFile => pack_file_decoded = PackFile::new(),
// In case we want to remove a Secondary Packfile from memory...
Command::RemovePackFileExtra(path) => { pack_files_decoded_extra.remove(&path); },
// In case we want to create a "New PackFile"...
Command::NewPackFile => {
let pack_version = GAME_SELECTED.read().unwrap().get_pfh_version_by_file_type(PFHFileType::Mod);
pack_file_decoded = PackFile::new_with_name("unknown.pack", pack_version);
if let Ok(version_number) = get_game_selected_exe_version_number() {
pack_file_decoded.set_game_version(version_number);
}
}
// In case we want to "Open one or more PackFiles"...
Command::OpenPackFiles(paths) => {
match PackFile::open_packfiles(&paths, SETTINGS.read().unwrap().settings_bool["use_lazy_loading"], false, false) {
Ok(pack_file) => {
pack_file_decoded = pack_file;
// Force decoding of table/locs, so they're in memory for the diagnostics to work.
if let Some(ref schema) = *SCHEMA.read().unwrap() {
let mut packed_files = pack_file_decoded.get_ref_mut_packed_files_by_types(&[PackedFileType::DB, PackedFileType::Loc], false);
packed_files.par_iter_mut().for_each(|x| {
let _ = x.decode_no_locks(schema);
});
}
CentralCommand::send_back(&sender, Response::PackFileInfo(PackFileInfo::from(&pack_file_decoded)));
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to "Open an Extra PackFile" (for "Add from PackFile")...
Command::OpenPackFileExtra(path) => {
match pack_files_decoded_extra.get(&path) {
Some(pack_file) => CentralCommand::send_back(&sender, Response::PackFileInfo(PackFileInfo::from(pack_file))),
None => match PackFile::open_packfiles(&[path.to_path_buf()], true, false, true) {
Ok(pack_file) => {
CentralCommand::send_back(&sender, Response::PackFileInfo(PackFileInfo::from(&pack_file)));
pack_files_decoded_extra.insert(path.to_path_buf(), pack_file);
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
}
// In case we want to "Load All CA PackFiles"...
Command::LoadAllCAPackFiles => {
match PackFile::open_all_ca_packfiles() {
Ok(pack_file) => {
pack_file_decoded = pack_file;
CentralCommand::send_back(&sender, Response::PackFileInfo(PackFileInfo::from(&pack_file_decoded)));
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to "Save a PackFile"...
Command::SavePackFile => {
match pack_file_decoded.save(None) {
Ok(_) => CentralCommand::send_back(&sender, Response::PackFileInfo(From::from(&pack_file_decoded))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::SavePackFileGeneric(error.to_string())))),
}
}
// In case we want to "Save a PackFile As"...
Command::SavePackFileAs(path) => {
match pack_file_decoded.save(Some(path.to_path_buf())) {
Ok(_) => CentralCommand::send_back(&sender, Response::PackFileInfo(From::from(&pack_file_decoded))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::SavePackFileGeneric(error.to_string())))),
}
}
// If you want to perform a clean&save over a PackFile...
Command::CleanAndSavePackFileAs(path) => {
pack_file_decoded.clean_packfile();
match pack_file_decoded.save(Some(path.to_path_buf())) {
Ok(_) => CentralCommand::send_back(&sender, Response::PackFileInfo(From::from(&pack_file_decoded))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::SavePackFileGeneric(error.to_string())))),
}
}
// In case we want to change the current settings...
Command::SetSettings(settings) => {
*SETTINGS.write().unwrap() = settings;
match SETTINGS.read().unwrap().save() {
Ok(()) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to change the current shortcuts...
Command::SetShortcuts(shortcuts) => {
match shortcuts.save() {
Ok(()) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to get the data of a PackFile needed to form the TreeView...
Command::GetPackFileDataForTreeView => {
// Get the name and the PackedFile list, and send it.
CentralCommand::send_back(&sender, Response::PackFileInfoVecPackedFileInfo((
From::from(&pack_file_decoded),
pack_file_decoded.get_packed_files_all_info(),
)));
}
// In case we want to get the data of a Secondary PackFile needed to form the TreeView...
Command::GetPackFileExtraDataForTreeView(path) => {
// Get the name and the PackedFile list, and serialize it.
match pack_files_decoded_extra.get(&path) {
Some(pack_file) => CentralCommand::send_back(&sender, Response::PackFileInfoVecPackedFileInfo((
From::from(pack_file),
pack_file.get_packed_files_all_info(),
))),
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::CannotFindExtraPackFile(path).into())),
}
}
// In case we want to get the info of one PackedFile from the TreeView.
Command::GetPackedFileInfo(path) => {
CentralCommand::send_back(&sender, Response::OptionPackedFileInfo(
pack_file_decoded.get_packed_file_info_by_path(&path)
));
}
// In case we want to get the info of more than one PackedFiles from the TreeView.
Command::GetPackedFilesInfo(paths) => {
CentralCommand::send_back(&sender, Response::VecOptionPackedFileInfo(
paths.iter().map(|x| pack_file_decoded.get_packed_file_info_by_path(x)).collect()
));
}
// In case we want to launch a global search on a `PackFile`...
Command::GlobalSearch(mut global_search) => {
global_search.search(&mut pack_file_decoded, &dependencies);
let packed_files_info = global_search.get_results_packed_file_info(&mut pack_file_decoded);
CentralCommand::send_back(&sender, Response::GlobalSearchVecPackedFileInfo((global_search, packed_files_info)));
}
// In case we want to change the current `Game Selected`...
Command::SetGameSelected(game_selected) => {
*GAME_SELECTED.write().unwrap() = SUPPORTED_GAMES.get_supported_game_from_key(&game_selected).unwrap();
// Try to load the Schema for this game but, before it, PURGE THE DAMN SCHEMA-RELATED CACHE AND REBUILD IT AFTERWARDS.
pack_file_decoded.get_ref_mut_packed_files_by_type(PackedFileType::DB, false).par_iter_mut().for_each(|x| { let _ = x.encode_and_clean_cache(); });
*SCHEMA.write().unwrap() = Schema::load(GAME_SELECTED.read().unwrap().get_schema_name()).ok();
if let Some(ref schema) = *SCHEMA.read().unwrap() {
pack_file_decoded.get_ref_mut_packed_files_by_type(PackedFileType::DB, false).par_iter_mut().for_each(|x| { let _ = x.decode_no_locks(schema); });
}
// Send a response, so we can unlock the UI.
CentralCommand::send_back(&sender, Response::Success);
// If there is a PackFile open, change his id to match the one of the new `Game Selected`.
if !pack_file_decoded.get_file_name().is_empty() {
pack_file_decoded.set_pfh_version(GAME_SELECTED.read().unwrap().get_pfh_version_by_file_type(pack_file_decoded.get_pfh_file_type()));
if let Ok(version_number) = get_game_selected_exe_version_number() {
pack_file_decoded.set_game_version(version_number);
}
}
}
// In case we want to generate the dependencies cache for our Game Selected...
Command::GenerateDependenciesCache(path, version) => {
match dependencies.generate_dependencies_cache(&path, version) {
Ok(mut cache) => match cache.save_to_binary() {
Ok(_) => {
let _ = dependencies.rebuild(pack_file_decoded.get_packfiles_list(), false);
let dependencies_info = DependenciesInfo::from(&dependencies);
CentralCommand::send_back(&sender, Response::DependenciesInfo(dependencies_info));
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to update the Schema for our Game Selected...
Command::UpdateCurrentSchemaFromAssKit(path) => {
match update_schema_from_raw_files(path, &dependencies) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to optimize our PackFile...
Command::OptimizePackFile => {
match pack_file_decoded.optimize(&dependencies) {
Ok(paths_to_delete) => CentralCommand::send_back(&sender, Response::VecVecString(paths_to_delete)),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to Patch the SiegeAI of a PackFile...
Command::PatchSiegeAI => {
match pack_file_decoded.patch_siege_ai() {
Ok(result) => CentralCommand::send_back(&sender, Response::StringVecVecString(result)),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error))
}
}
// In case we want to change the PackFile's Type...
Command::SetPackFileType(new_type) => pack_file_decoded.set_pfh_file_type(new_type),
// In case we want to change the "Include Last Modified Date" setting of the PackFile...
Command::ChangeIndexIncludesTimestamp(state) => pack_file_decoded.get_ref_mut_bitmask().set(PFHFlags::HAS_INDEX_WITH_TIMESTAMPS, state),
// In case we want to compress/decompress the PackedFiles of the currently open PackFile...
Command::ChangeDataIsCompressed(state) => pack_file_decoded.toggle_compression(state),
// In case we want to get the path of the currently open `PackFile`.
Command::GetPackFilePath => CentralCommand::send_back(&sender, Response::PathBuf(pack_file_decoded.get_file_path().to_path_buf())),
// In case we want to get the Dependency PackFiles of our PackFile...
Command::GetDependencyPackFilesList => CentralCommand::send_back(&sender, Response::VecString(pack_file_decoded.get_packfiles_list().to_vec())),
// In case we want to set the Dependency PackFiles of our PackFile...
Command::SetDependencyPackFilesList(pack_files) => pack_file_decoded.set_packfiles_list(&pack_files),
// In case we want to check if there is a Dependency Database loaded...
Command::IsThereADependencyDatabase(include_asskit) => CentralCommand::send_back(&sender, Response::Bool(dependencies.game_has_vanilla_data_loaded(include_asskit))),
// In case we want to create a PackedFile from scratch...
Command::NewPackedFile(path, new_packed_file) => {
if let Some(ref schema) = *SCHEMA.read().unwrap() {
let decoded = match new_packed_file {
NewPackedFile::AnimPack(_) => {
let packed_file = AnimPack::new();
DecodedPackedFile::AnimPack(packed_file)
},
NewPackedFile::DB(_, table, version) => {
match schema.get_ref_versioned_file_db(&table) {
Ok(versioned_file) => {
match versioned_file.get_version(version) {
Ok(definition) => DecodedPackedFile::DB(DB::new(&table, None, definition)),
Err(error) => {
CentralCommand::send_back(&sender, Response::Error(error));
continue;
}
}
}
Err(error) => {
CentralCommand::send_back(&sender, Response::Error(error));
continue;
}
}
},
NewPackedFile::Loc(_) => {
match schema.get_ref_last_definition_loc() {
Ok(definition) => DecodedPackedFile::Loc(Loc::new(definition)),
Err(error) => {
CentralCommand::send_back(&sender, Response::Error(error));
continue;
}
}
}
NewPackedFile::Text(_, text_type) => {
let mut packed_file = Text::new();
packed_file.set_text_type(text_type);
DecodedPackedFile::Text(packed_file)
},
};
let packed_file = PackedFile::new_from_decoded(&decoded, &path);
match pack_file_decoded.add_packed_file(&packed_file, false) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaNotFound.into())); }
}
// When we want to add one or more PackedFiles to our PackFile.
Command::AddPackedFiles(source_paths, destination_paths, paths_to_ignore, import_tables_from_tsv) => {
let mut added_paths = vec![];
let mut it_broke = None;
// If we're going to import TSV, make sure to remove any collision between binary and TSV.
let paths = if import_tables_from_tsv {
source_paths.iter().zip(destination_paths.iter())
.filter(|(source, _)| {
if let Some(extension) = source.extension() {
if extension == "tsv" {
true
} else {
let mut path = source.to_path_buf();
path.set_extension("tsv");
source_paths.par_iter().all(|source| source != &path)
}
} else {
let mut path = source.to_path_buf();
path.set_extension("tsv");
source_paths.par_iter().all(|source| source != &path)
}
})
.collect::<Vec<(&PathBuf, &Vec<String>)>>()
} else {
source_paths.iter().zip(destination_paths.iter()).collect::<Vec<(&PathBuf, &Vec<String>)>>()
};
for (source_path, destination_path) in paths {
// Skip ignored paths.
if let Some(ref paths_to_ignore) = paths_to_ignore {
if paths_to_ignore.iter().any(|x| source_path.starts_with(x)) {
continue;
}
}
match pack_file_decoded.add_from_file(source_path, destination_path.to_vec(), true, import_tables_from_tsv) {
Ok(path) => if !path.is_empty() { added_paths.push(PathType::File(path.to_vec())) },
Err(error) => it_broke = Some(error),
}
}
if let Some(error) = it_broke {
CentralCommand::send_back(&sender, Response::VecPathType(added_paths.to_vec()));
CentralCommand::send_back(&sender, Response::Error(error));
} else {
CentralCommand::send_back(&sender, Response::VecPathType(added_paths.to_vec()));
CentralCommand::send_back(&sender, Response::Success);
}
// Force decoding of table/locs, so they're in memory for the diagnostics to work.
if let Some(ref schema) = *SCHEMA.read().unwrap() {
let paths = added_paths.iter().filter_map(|x| if let PathType::File(path) = x { Some(&**path) } else { None }).collect::<Vec<&[String]>>();
let mut packed_files = pack_file_decoded.get_ref_mut_packed_files_by_paths(paths);
packed_files.par_iter_mut()
.filter(|x| [PackedFileType::DB, PackedFileType::Loc].contains(&x.get_packed_file_type(false)))
.for_each(|x| {
let _ = x.decode_no_locks(schema);
});
}
}
// In case we want to add one or more entire folders to our PackFile...
Command::AddPackedFilesFromFolder(paths, paths_to_ignore, import_tables_from_tsv) => {
match pack_file_decoded.add_from_folders(&paths, &paths_to_ignore, true, import_tables_from_tsv) {
Ok(paths) => {
CentralCommand::send_back(&sender, Response::VecPathType(paths.iter().filter(|x| !x.is_empty()).map(|x| PathType::File(x.to_vec())).collect()));
// Force decoding of table/locs, so they're in memory for the diagnostics to work.
if let Some(ref schema) = *SCHEMA.read().unwrap() {
let paths = paths.iter().map(|x| &**x).collect::<Vec<&[String]>>();
let mut packed_files = pack_file_decoded.get_ref_mut_packed_files_by_paths(paths);
packed_files.par_iter_mut()
.filter(|x| [PackedFileType::DB, PackedFileType::Loc].contains(&x.get_packed_file_type(false)))
.for_each(|x| {
let _ = x.decode_no_locks(schema);
});
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to move stuff from one PackFile to another...
Command::AddPackedFilesFromPackFile((pack_file_path, paths)) => {
match pack_files_decoded_extra.get(&pack_file_path) {
// Try to add the PackedFile to the main PackFile.
Some(pack_file) => match pack_file_decoded.add_from_packfile(pack_file, &paths, true) {
Ok(paths) => {
CentralCommand::send_back(&sender, Response::VecPathType(paths.to_vec()));
// Force decoding of table/locs, so they're in memory for the diagnostics to work.
if let Some(ref schema) = *SCHEMA.read().unwrap() {
let paths = paths.iter().filter_map(|x| if let PathType::File(path) = x { Some(&**path) } else { None }).collect::<Vec<&[String]>>();
let mut packed_files = pack_file_decoded.get_ref_mut_packed_files_by_paths(paths);
packed_files.par_iter_mut()
.filter(|x| [PackedFileType::DB, PackedFileType::Loc].contains(&x.get_packed_file_type(false)))
.for_each(|x| {
let _ = x.decode_no_locks(schema);
});
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::CannotFindExtraPackFile(pack_file_path).into())),
}
}
// In case we want to move stuff from our PackFile to an Animpack...
Command::AddPackedFilesFromPackFileToAnimpack((anim_pack_path, paths)) => {
let packed_files_to_add = pack_file_decoded.get_packed_files_by_path_type(&paths);
match pack_file_decoded.get_ref_mut_packed_file_by_path(&anim_pack_path) {
Some(packed_file) => {
let packed_file_decoded = packed_file.get_ref_mut_decoded();
match packed_file_decoded {
DecodedPackedFile::AnimPack(anim_pack) => match anim_pack.add_packed_files(&packed_files_to_add) {
Ok(paths) => CentralCommand::send_back(&sender, Response::VecPathType(paths)),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
_ => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileTypeIsNotWhatWeExpected(PackedFileType::AnimPack.to_string(), PackedFileType::from(&*packed_file_decoded).to_string()).into())),
}
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())),
}
}
// In case we want to move stuff from an Animpack to our PackFile...
Command::AddPackedFilesFromAnimpack((anim_pack_path, paths)) => {
let packed_files_to_add = match pack_file_decoded.get_ref_packed_file_by_path(&anim_pack_path) {
Some(packed_file) => {
let packed_file_decoded = packed_file.get_ref_decoded();
match packed_file_decoded {
DecodedPackedFile::AnimPack(anim_pack) => anim_pack.get_anim_packed_as_packed_files(&paths),
_ => {
CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileTypeIsNotWhatWeExpected(PackedFileType::AnimPack.to_string(), PackedFileType::from(&*packed_file_decoded).to_string()).into()));
continue;
}
}
}
None => {
CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into()));
continue;
},
};
let packed_files_to_add = packed_files_to_add.iter().collect::<Vec<&PackedFile>>();
match pack_file_decoded.add_packed_files(&packed_files_to_add, true, true) {
Ok(paths) => CentralCommand::send_back(&sender, Response::VecPathType(paths.iter().map(|x| PathType::File(x.to_vec())).collect())),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to delete files from an Animpack...
Command::DeleteFromAnimpack((anim_pack_path, paths)) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&anim_pack_path) {
Some(packed_file) => {
let packed_file_decoded = packed_file.get_ref_mut_decoded();
match packed_file_decoded {
DecodedPackedFile::AnimPack(anim_pack) => {
anim_pack.remove_packed_file_by_path_types(&paths);
CentralCommand::send_back(&sender, Response::Success);
}
_ => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileTypeIsNotWhatWeExpected(PackedFileType::AnimPack.to_string(), PackedFileType::from(&*packed_file_decoded).to_string()).into())),
}
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())),
}
}
// In case we want to decode a RigidModel PackedFile...
Command::DecodePackedFile(path, data_source) => {
match data_source {
DataSource::PackFile => {
if path == [RESERVED_NAME_NOTES.to_owned()] {
let mut note = Text::new();
note.set_text_type(TextType::Markdown);
match pack_file_decoded.get_notes() {
Some(notes) => {
note.set_contents(notes);
CentralCommand::send_back(&sender, Response::Text(note));
}
None => CentralCommand::send_back(&sender, Response::Text(note)),
}
}
else {
// Find the PackedFile we want and send back the response.
match pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
Some(ref mut packed_file) => {
match packed_file.decode_return_ref() {
Ok(packed_file_data) => {
match packed_file_data {
DecodedPackedFile::AnimFragment(data) => CentralCommand::send_back(&sender, Response::AnimFragmentPackedFileInfo((data.clone(), From::from(&**packed_file)))),
DecodedPackedFile::AnimPack(data) => CentralCommand::send_back(&sender, Response::AnimPackPackedFileInfo((data.get_as_pack_file_info(&path), From::from(&**packed_file)))),
DecodedPackedFile::AnimTable(data) => CentralCommand::send_back(&sender, Response::AnimTablePackedFileInfo((data.clone(), From::from(&**packed_file)))),
DecodedPackedFile::CaVp8(data) => CentralCommand::send_back(&sender, Response::CaVp8PackedFileInfo((data.clone(), From::from(&**packed_file)))),
DecodedPackedFile::ESF(data) => CentralCommand::send_back(&sender, Response::ESFPackedFileInfo((data.clone(), From::from(&**packed_file)))),
DecodedPackedFile::DB(table) => CentralCommand::send_back(&sender, Response::DBPackedFileInfo((table.clone(), From::from(&**packed_file)))),
DecodedPackedFile::Image(image) => CentralCommand::send_back(&sender, Response::ImagePackedFileInfo((image.clone(), From::from(&**packed_file)))),
DecodedPackedFile::Loc(table) => CentralCommand::send_back(&sender, Response::LocPackedFileInfo((table.clone(), From::from(&**packed_file)))),
DecodedPackedFile::MatchedCombat(data) => CentralCommand::send_back(&sender, Response::MatchedCombatPackedFileInfo((data.clone(), From::from(&**packed_file)))),
DecodedPackedFile::RigidModel(rigid_model) => CentralCommand::send_back(&sender, Response::RigidModelPackedFileInfo((rigid_model.clone(), From::from(&**packed_file)))),
DecodedPackedFile::Text(text) => CentralCommand::send_back(&sender, Response::TextPackedFileInfo((text.clone(), From::from(&**packed_file)))),
DecodedPackedFile::UIC(uic) => CentralCommand::send_back(&sender, Response::UICPackedFileInfo((uic.clone(), From::from(&**packed_file)))),
DecodedPackedFile::UnitVariant(_) => CentralCommand::send_back(&sender, Response::DecodedPackedFilePackedFileInfo((packed_file_data.clone(), From::from(&**packed_file)))),
_ => CentralCommand::send_back(&sender, Response::Unknown),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
None => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::PackedFileNotFound))),
}
}
}
DataSource::ParentFiles => {
match dependencies.get_packedfile_from_parent_files(&path) {
Ok(mut packed_file) => {
match packed_file.decode_return_ref() {
Ok(packed_file_data) => {
match packed_file_data {
DecodedPackedFile::AnimFragment(data) => CentralCommand::send_back(&sender, Response::AnimFragmentPackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::AnimPack(data) => CentralCommand::send_back(&sender, Response::AnimPackPackedFileInfo((data.get_as_pack_file_info(&path), From::from(&packed_file)))),
DecodedPackedFile::AnimTable(data) => CentralCommand::send_back(&sender, Response::AnimTablePackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::CaVp8(data) => CentralCommand::send_back(&sender, Response::CaVp8PackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::ESF(data) => CentralCommand::send_back(&sender, Response::ESFPackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::DB(table) => CentralCommand::send_back(&sender, Response::DBPackedFileInfo((table.clone(), From::from(&packed_file)))),
DecodedPackedFile::Image(image) => CentralCommand::send_back(&sender, Response::ImagePackedFileInfo((image.clone(), From::from(&packed_file)))),
DecodedPackedFile::Loc(table) => CentralCommand::send_back(&sender, Response::LocPackedFileInfo((table.clone(), From::from(&packed_file)))),
DecodedPackedFile::MatchedCombat(data) => CentralCommand::send_back(&sender, Response::MatchedCombatPackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::RigidModel(rigid_model) => CentralCommand::send_back(&sender, Response::RigidModelPackedFileInfo((rigid_model.clone(), From::from(&packed_file)))),
DecodedPackedFile::Text(text) => CentralCommand::send_back(&sender, Response::TextPackedFileInfo((text.clone(), From::from(&packed_file)))),
DecodedPackedFile::UIC(uic) => CentralCommand::send_back(&sender, Response::UICPackedFileInfo((uic.clone(), From::from(&packed_file)))),
DecodedPackedFile::UnitVariant(_) => CentralCommand::send_back(&sender, Response::DecodedPackedFilePackedFileInfo((packed_file_data.clone(), From::from(&packed_file)))),
_ => CentralCommand::send_back(&sender, Response::Unknown),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
DataSource::GameFiles => {
match dependencies.get_packedfile_from_game_files(&path) {
Ok(mut packed_file) => {
match packed_file.decode_return_ref() {
Ok(packed_file_data) => {
match packed_file_data {
DecodedPackedFile::AnimFragment(data) => CentralCommand::send_back(&sender, Response::AnimFragmentPackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::AnimPack(data) => CentralCommand::send_back(&sender, Response::AnimPackPackedFileInfo((data.get_as_pack_file_info(&path), From::from(&packed_file)))),
DecodedPackedFile::AnimTable(data) => CentralCommand::send_back(&sender, Response::AnimTablePackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::CaVp8(data) => CentralCommand::send_back(&sender, Response::CaVp8PackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::ESF(data) => CentralCommand::send_back(&sender, Response::ESFPackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::DB(table) => CentralCommand::send_back(&sender, Response::DBPackedFileInfo((table.clone(), From::from(&packed_file)))),
DecodedPackedFile::Image(image) => CentralCommand::send_back(&sender, Response::ImagePackedFileInfo((image.clone(), From::from(&packed_file)))),
DecodedPackedFile::Loc(table) => CentralCommand::send_back(&sender, Response::LocPackedFileInfo((table.clone(), From::from(&packed_file)))),
DecodedPackedFile::MatchedCombat(data) => CentralCommand::send_back(&sender, Response::MatchedCombatPackedFileInfo((data.clone(), From::from(&packed_file)))),
DecodedPackedFile::RigidModel(rigid_model) => CentralCommand::send_back(&sender, Response::RigidModelPackedFileInfo((rigid_model.clone(), From::from(&packed_file)))),
DecodedPackedFile::Text(text) => CentralCommand::send_back(&sender, Response::TextPackedFileInfo((text.clone(), From::from(&packed_file)))),
DecodedPackedFile::UIC(uic) => CentralCommand::send_back(&sender, Response::UICPackedFileInfo((uic.clone(), From::from(&packed_file)))),
DecodedPackedFile::UnitVariant(_) => CentralCommand::send_back(&sender, Response::DecodedPackedFilePackedFileInfo((packed_file_data.clone(), From::from(&packed_file)))),
_ => CentralCommand::send_back(&sender, Response::Unknown),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
DataSource::AssKitFiles => {
match dependencies.get_packedfile_from_asskit_files(&path) {
Ok(db) => CentralCommand::send_back(&sender, Response::DBPackedFileInfo((db, PackedFileInfo::default()))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
DataSource::ExternalFile => {}
}
}
// When we want to save a PackedFile from the view....
Command::SavePackedFileFromView(path, decoded_packed_file) => {
if path == [RESERVED_NAME_NOTES.to_owned()] {
if let DecodedPackedFile::Text(data) = decoded_packed_file {
let note = if data.get_ref_contents().is_empty() { None } else { Some(data.get_ref_contents().to_owned()) };
pack_file_decoded.set_notes(¬e);
}
}
else if let Some(packed_file) = pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
*packed_file.get_ref_mut_decoded() = decoded_packed_file;
}
CentralCommand::send_back(&sender, Response::Success);
}
// In case we want to delete PackedFiles from a PackFile...
Command::DeletePackedFiles(item_types) => {
CentralCommand::send_back(&sender, Response::VecPathType(pack_file_decoded.remove_packed_files_by_type(&item_types)));
}
// In case we want to extract PackedFiles from a PackFile...
Command::ExtractPackedFiles(item_types, path, extract_tables_to_tsv) => {
match pack_file_decoded.extract_packed_files_by_type(&item_types, &path, extract_tables_to_tsv) {
Ok(result) => CentralCommand::send_back(&sender, Response::String(tre("files_extracted_success", &[&result.to_string()]))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to rename one or more PackedFiles...
Command::RenamePackedFiles(renaming_data) => {
CentralCommand::send_back(&sender, Response::VecPathTypeVecString(pack_file_decoded.rename_packedfiles(&renaming_data, false)));
}
// In case we want to Mass-Import TSV Files...
Command::MassImportTSV(paths, name) => {
match pack_file_decoded.mass_import_tsv(&paths, name, true) {
Ok(result) => CentralCommand::send_back(&sender, Response::VecVecStringVecVecString(result)),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to Mass-Export TSV Files...
Command::MassExportTSV(path_types, path) => {
match pack_file_decoded.mass_export_tsv(&path_types, &path) {
Ok(result) => CentralCommand::send_back(&sender, Response::String(result)),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to know if a Folder exists, knowing his path...
Command::FolderExists(path) => {
CentralCommand::send_back(&sender, Response::Bool(pack_file_decoded.folder_exists(&path)));
}
// In case we want to know if PackedFile exists, knowing his path...
Command::PackedFileExists(path) => {
CentralCommand::send_back(&sender, Response::Bool(pack_file_decoded.packedfile_exists(&path)));
}
// In case we want to get the list of tables in the dependency database...
Command::GetTableListFromDependencyPackFile => {
let tables = if let Ok(tables) = dependencies.get_db_and_loc_tables_from_cache(true, false, true, true) {
tables.iter().map(|x| x.get_path()[1].to_owned()).collect::<Vec<String>>()
} else { vec![] };
CentralCommand::send_back(&sender, Response::VecString(tables));
}
// In case we want to get the version of an specific table from the dependency database...
Command::GetTableVersionFromDependencyPackFile(table_name) => {
if dependencies.game_has_vanilla_data_loaded(false) {
if let Some(ref schema) = *SCHEMA.read().unwrap() {
match schema.get_ref_last_definition_db(&table_name, &dependencies) {
Ok(definition) => CentralCommand::send_back(&sender, Response::I32(definition.get_version())),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaNotFound.into())); }
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::DependenciesCacheNotGeneratedorOutOfDate.into())); }
}
// In case we want to get the definition of an specific table from the dependency database...
Command::GetTableDefinitionFromDependencyPackFile(table_name) => {
if dependencies.game_has_vanilla_data_loaded(false) {
if let Some(ref schema) = *SCHEMA.read().unwrap() {
match schema.get_ref_last_definition_db(&table_name, &dependencies) {
Ok(definition) => CentralCommand::send_back(&sender, Response::Definition(definition.clone())),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaNotFound.into())); }
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::DependenciesCacheNotGeneratedorOutOfDate.into())); }
}
// In case we want to merge DB or Loc Tables from a PackFile...
Command::MergeTables(paths, name, delete_source_files) => {
match pack_file_decoded.merge_tables(&paths, &name, delete_source_files) {
Ok(data) => CentralCommand::send_back(&sender, Response::VecString(data)),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to update a table...
Command::UpdateTable(path_type) => {
if let Some(ref schema) = *SCHEMA.read().unwrap() {
if let PathType::File(path) = path_type {
if let Some(packed_file) = pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
match packed_file.decode_return_ref_mut_no_locks(schema) {
Ok(packed_file_decoded) => match packed_file_decoded.update_table(&dependencies) {
Ok(data) => {
// Save it to binary, so the decoder will load the proper data if we open it with it.
let _ = packed_file.encode_no_load();
CentralCommand::send_back(&sender, Response::I32I32(data))
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())); }
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())); }
} else { CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaNotFound.into())); }
}
// In case we want to replace all matches in a Global Search...
Command::GlobalSearchReplaceMatches(mut global_search, matches) => {
let _ = global_search.replace_matches(&mut pack_file_decoded, &matches);
let packed_files_info = global_search.get_results_packed_file_info(&mut pack_file_decoded);
CentralCommand::send_back(&sender, Response::GlobalSearchVecPackedFileInfo((global_search, packed_files_info)));
}
// In case we want to replace all matches in a Global Search...
Command::GlobalSearchReplaceAll(mut global_search) => {
let _ = global_search.replace_all(&mut pack_file_decoded);
let packed_files_info = global_search.get_results_packed_file_info(&mut pack_file_decoded);
CentralCommand::send_back(&sender, Response::GlobalSearchVecPackedFileInfo((global_search, packed_files_info)));
}
// In case we want to get the reference data for a definition...
Command::GetReferenceDataFromDefinition(table_name, definition, files_to_ignore) => {
// This is a heavy function, so first check if we have the data we want in the cache.
let dependency_data = if dependencies.get_ref_cached_data().read().unwrap().get(&table_name).is_some() {
DB::get_dependency_data(
&pack_file_decoded,
&table_name,
&definition,
&[],
&[],
&dependencies,
&files_to_ignore,
)
} else {
if let Ok(dependencies_vanilla) = dependencies.get_db_and_loc_tables_from_cache(true, false, true, true) {
DB::get_dependency_data(
&pack_file_decoded,
&table_name,
&definition,
&dependencies_vanilla,
dependencies.get_ref_asskit_only_db_tables(),
&dependencies,
&files_to_ignore,
)
} else { BTreeMap::new() }
};
CentralCommand::send_back(&sender, Response::BTreeMapI32DependencyData(dependency_data));
}
// In case we want to return an entire PackedFile to the UI.
Command::GetPackedFile(path) => CentralCommand::send_back(&sender, Response::OptionPackedFile(pack_file_decoded.get_packed_file_by_path(&path))),
// In case we want to change the format of a ca_vp8 video...
Command::SetCaVp8Format((path, format)) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
Some(ref mut packed_file) => {
match packed_file.decode_return_ref_mut() {
Ok(data) => {
if let DecodedPackedFile::CaVp8(ref mut data) = data {
data.set_format(format);
}
// TODO: Put an error here.
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
None => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::PackedFileNotFound))),
}
},
// In case we want to save an schema to disk...
Command::SaveSchema(mut schema) => {
match schema.save(GAME_SELECTED.read().unwrap().get_schema_name()) {
Ok(_) => {
*SCHEMA.write().unwrap() = Some(schema);
CentralCommand::send_back(&sender, Response::Success);
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// In case we want to clean the cache of one or more PackedFiles...
Command::CleanCache(paths) => {
let mut packed_files = pack_file_decoded.get_ref_mut_packed_files_by_paths(paths.iter().map(|x| x.as_ref()).collect::<Vec<&[String]>>());
packed_files.iter_mut().for_each(|x| { let _ = x.encode_and_clean_cache(); });
}
// In case we want to export a PackedFile as a TSV file...
Command::ExportTSV((internal_path, external_path)) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&internal_path) {
Some(packed_file) => match packed_file.get_decoded() {
DecodedPackedFile::DB(data) => match data.export_tsv(&external_path, &internal_path[1], &packed_file.get_path()) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
},
DecodedPackedFile::Loc(data) => match data.export_tsv(&external_path, TSV_NAME_LOC, &packed_file.get_path()) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
},
/*
DecodedPackedFile::DependencyPackFileList(data) => match data.export_tsv(&[external_path]) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
},*/
_ => unimplemented!()
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())),
}
}
// In case we want to import a TSV as a PackedFile...
Command::ImportTSV((internal_path, external_path)) => {
match *SCHEMA.read().unwrap() {
Some(ref schema) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&internal_path) {
Some(packed_file) => match packed_file.get_packed_file_type(false) {
PackedFileType::DB => match DB::import_tsv(&schema, &external_path) {
Ok((data, _)) => CentralCommand::send_back(&sender, Response::TableType(TableType::DB(data))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
},
PackedFileType::Loc => match Loc::import_tsv(&schema, &external_path) {
Ok((data, _)) => CentralCommand::send_back(&sender, Response::TableType(TableType::Loc(data))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
},
_ => unimplemented!()
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())),
}
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaNotFound.into())),
}
}
// In case we want to open a PackFile's location in the file manager...
Command::OpenContainingFolder => {
// If the path exists, try to open it. If not, throw an error.
if pack_file_decoded.get_file_path().exists() {
let mut temp_path = pack_file_decoded.get_file_path().to_path_buf();
temp_path.pop();
open::that_in_background(&temp_path);
CentralCommand::send_back(&sender, Response::Success);
}
else {
CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackFileIsNotAFile.into()));
}
},
// When we want to open a PackedFile in a external program...
Command::OpenPackedFileInExternalProgram(path) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
Some(packed_file) => {
let extension = path.last().unwrap().rsplitn(2, '.').next().unwrap();
let name = format!("{}.{}", Uuid::new_v4(), extension);
let mut temporal_file_path = temp_dir();
temporal_file_path.push(name);
match packed_file.get_packed_file_type(false) {
// Tables we extract them as TSV.
PackedFileType::DB => {
match packed_file.decode_return_clean_cache() {
Ok(data) => {
if let DecodedPackedFile::DB(data) = data {
temporal_file_path.set_extension("tsv");
match data.export_tsv(&temporal_file_path, &path[1], &packed_file.get_path()) {
Ok(_) => {
that_in_background(&temporal_file_path);
CentralCommand::send_back(&sender, Response::PathBuf(temporal_file_path));
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
},
PackedFileType::Loc => {
match packed_file.decode_return_clean_cache() {
Ok(data) => {
if let DecodedPackedFile::Loc(data) = data {
temporal_file_path.set_extension("tsv");
match data.export_tsv(&temporal_file_path, TSV_NAME_LOC, &packed_file.get_path()) {
Ok(_) => {
that_in_background(&temporal_file_path);
CentralCommand::send_back(&sender, Response::PathBuf(temporal_file_path));
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
},
// The rest of the files, we extract them as we have them.
_ => {
match packed_file.get_raw_data_and_clean_cache() {
Ok(data) => {
match File::create(&temporal_file_path) {
Ok(mut file) => {
if file.write_all(&data).is_ok() {
that_in_background(&temporal_file_path);
CentralCommand::send_back(&sender, Response::PathBuf(temporal_file_path));
}
else {
CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::IOGenericWrite(vec![temporal_file_path.display().to_string();1]))));
}
}
Err(_) => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::IOGenericWrite(vec![temporal_file_path.display().to_string();1])))),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
}
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())),
}
}
// When we want to save a PackedFile from the external view....
Command::SavePackedFileFromExternalView((path, external_path)) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
Some(packed_file) => {
match packed_file.get_packed_file_type(false) {
// Tables we extract them as TSV.
PackedFileType::DB | PackedFileType::Loc => {
match *SCHEMA.read().unwrap() {
Some(ref schema) => {
match packed_file.decode_return_ref_mut() {
Ok(data) => {
match data {
DecodedPackedFile::DB(ref mut data) => {
match DB::import_tsv(&schema, &external_path) {
Ok((new_data, _)) => {
*data = new_data;
match packed_file.encode_and_clean_cache() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
DecodedPackedFile::Loc(ref mut data) => {
match Loc::import_tsv(&schema, &external_path) {
Ok((new_data, _)) => {
*data = new_data;
match packed_file.encode_and_clean_cache() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
_ => unimplemented!(),
}
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaNotFound.into())),
}
},
_ => {
match File::open(external_path) {
Ok(mut file) => {
let mut data = vec![];
match file.read_to_end(&mut data) {
Ok(_) => {
packed_file.set_raw_data(&data);
CentralCommand::send_back(&sender, Response::Success);
}
Err(_) => CentralCommand::send_back(&sender, Response::Error(ErrorKind::IOGeneric.into())),
}
}
Err(_) => CentralCommand::send_back(&sender, Response::Error(ErrorKind::IOGeneric.into())),
}
}
}
}
None => CentralCommand::send_back(&sender, Response::Error(ErrorKind::PackedFileNotFound.into())),
}
}
// When we want to update our schemas...
Command::UpdateSchemas => {
match Schema::update_schema_repo() {
// If it worked, we have to update the currently open schema with the one we just downloaded and rebuild cache/dependencies with it.
Ok(_) => {
// Encode the decoded tables with the old schema, then re-decode them with the new one.
pack_file_decoded.get_ref_mut_packed_files_by_type(PackedFileType::DB, false).par_iter_mut().for_each(|x| { let _ = x.encode_and_clean_cache(); });
*SCHEMA.write().unwrap() = Schema::load(GAME_SELECTED.read().unwrap().get_schema_name()).ok();
if let Some(ref schema) = *SCHEMA.read().unwrap() {
pack_file_decoded.get_ref_mut_packed_files_by_type(PackedFileType::DB, false).par_iter_mut().for_each(|x| { let _ = x.decode_no_locks(schema); });
}
// Try to reload the schema patchs. Ignore them if fails due to missing file.
if let Ok(schema_patches) = SchemaPatches::load() {
*SCHEMA_PATCHES.write().unwrap() = schema_patches;
}
// Then rebuild the dependencies stuff.
if dependencies.game_has_dependencies_generated() {
match dependencies.rebuild(pack_file_decoded.get_packfiles_list(), false) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(ErrorKind::SchemaUpdateRebuildError(error.to_string()).into())),
}
}
// Otherwise, just report the schema update success, and don't leave the ui waiting eternally again...
else {
CentralCommand::send_back(&sender, Response::Success);
}
},
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// When we want to update our messages...
Command::UpdateMessages => {
// TODO: Properly reload all loaded tips.
match Tips::update_from_repo() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// When we want to update our program...
Command::UpdateMainProgram => {
match rpfm_lib::updater::update_main_program() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// When we want to update our program...
Command::TriggerBackupAutosave => {
// Note: we no longer notify the UI of success or error to not hang it up.
if let Ok(Some(file)) = get_oldest_file_in_folder(&get_backup_autosave_path().unwrap()) {
let _ = pack_file_decoded.clone().save(Some(file));
}
}
// In case we want to "Open one or more PackFiles"...
Command::DiagnosticsCheck => {
thread::spawn(clone!(
mut dependencies,
mut pack_file_decoded => move || {
let mut diag = Diagnostics::default();
if pack_file_decoded.get_pfh_file_type() == PFHFileType::Mod ||
pack_file_decoded.get_pfh_file_type() == PFHFileType::Movie {
diag.check(&pack_file_decoded, &dependencies);
}
CentralCommand::send_back(&sender, Response::Diagnostics(diag));
}));
}
// In case we want to "Open one or more PackFiles"...
Command::DiagnosticsUpdate((mut diagnostics, path_types)) => {
diagnostics.update(&pack_file_decoded, &path_types, &dependencies);
let packed_files_info = diagnostics.get_update_paths_packed_file_info(&pack_file_decoded, &path_types);
CentralCommand::send_back(&sender, Response::DiagnosticsVecPackedFileInfo(diagnostics, packed_files_info));
}
// In case we want to get the open PackFile's Settings...
Command::GetPackFileSettings(is_autosave) => {
if is_autosave {
CentralCommand::send_back(&sender, Response::PackFileSettings(pack_file_decoded.get_settings().clone()));
} else {
CentralCommand::send_back(&sender, Response::PackFileSettings(pack_file_decoded.get_settings().clone()));
}
}
Command::SetPackFileSettings(settings) => {
pack_file_decoded.set_settings(&settings);
}
Command::GetMissingDefinitions => {
// Test to see if every DB Table can be decoded. This is slow and only useful when
// a new patch lands and you want to know what tables you need to decode. So, unless you want
// to decode new tables, leave the setting as false.
if SETTINGS.read().unwrap().settings_bool["check_for_missing_table_definitions"] {
let mut counter = 0;
let mut table_list = String::new();
if let Some(ref schema) = *SCHEMA.read().unwrap() {
for packed_file in pack_file_decoded.get_ref_mut_packed_files_by_type(PackedFileType::DB, false) {
if packed_file.decode_return_ref_no_locks(schema).is_err() {
if let Ok(raw_data) = packed_file.get_raw_data() {
if let Ok((_, _, _, entry_count, _)) = DB::read_header(&raw_data) {
if entry_count > 0 {
counter += 1;
table_list.push_str(&format!("{}, {:?}\n", counter, packed_file.get_path()))
}
}
}
}
}
}
// Try to save the file. And I mean "try". Someone seems to love crashing here...
let path = RPFM_PATH.to_path_buf().join(PathBuf::from("missing_table_definitions.txt"));
if let Ok(file) = File::create(path) {
let mut file = BufWriter::new(file);
let _ = file.write_all(table_list.as_bytes());
}
}
}
// Ignore errors for now.
Command::RebuildDependencies(rebuild_only_current_mod_dependencies) => {
let _ = dependencies.rebuild(pack_file_decoded.get_packfiles_list(), rebuild_only_current_mod_dependencies);
let dependencies_info = DependenciesInfo::from(&dependencies);
CentralCommand::send_back(&sender, Response::DependenciesInfo(dependencies_info));
},
Command::CascadeEdition(editions) => {
let edited_paths = DB::cascade_edition(&editions, &mut pack_file_decoded);
let edited_paths_2 = edited_paths.iter().map(|x| &**x).collect::<Vec<&[String]>>();
let packed_files_info = pack_file_decoded.get_ref_packed_files_by_paths(edited_paths_2).iter().map(|x| PackedFileInfo::from(*x)).collect::<Vec<PackedFileInfo>>();
CentralCommand::send_back(&sender, Response::VecVecStringVecPackedFileInfo(edited_paths, packed_files_info));
}
Command::GoToDefinition(ref_table, ref_column, ref_data) => {
let table_folder = vec!["db".to_owned(), ref_table + "_tables"];
let packed_files = pack_file_decoded.get_ref_packed_files_by_path_start(&table_folder);
let mut found = false;
for packed_file in &packed_files {
if let Ok(DecodedPackedFile::DB(data)) = packed_file.get_decoded_from_memory() {
if let Some((column_index, row_index)) = data.get_ref_table().get_source_location_of_reference_data(&ref_column, &ref_data) {
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::PackFile, packed_file.get_path().to_vec(), column_index, row_index));
found = true;
break;
}
}
}
if !found {
if let Ok(packed_files) = dependencies.get_db_and_loc_tables_from_cache(true, false, false, true) {
for packed_file in &packed_files {
if packed_file.get_path().starts_with(&table_folder) {
if let Ok(DecodedPackedFile::DB(data)) = packed_file.get_decoded_from_memory() {
if let Some((column_index, row_index)) = data.get_ref_table().get_source_location_of_reference_data(&ref_column, &ref_data) {
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::ParentFiles, packed_file.get_path().to_vec(), column_index, row_index));
found = true;
break;
}
}
}
}
}
}
if !found {
if let Ok(packed_files) = dependencies.get_db_and_loc_tables_from_cache(true, false, true, false) {
for packed_file in &packed_files {
if packed_file.get_path().starts_with(&table_folder) {
if let Ok(DecodedPackedFile::DB(data)) = packed_file.get_decoded_from_memory() {
if let Some((column_index, row_index)) = data.get_ref_table().get_source_location_of_reference_data(&ref_column, &ref_data) {
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::GameFiles, packed_file.get_path().to_vec(), column_index, row_index));
found = true;
break;
}
}
}
}
}
}
if !found {
let tables = dependencies.get_ref_asskit_only_db_tables();
for table in tables {
if table.get_ref_table_name() == table_folder[1] {
if let Some((column_index, row_index)) = table.get_ref_table().get_source_location_of_reference_data(&ref_column, &ref_data) {
let path = vec![table_folder[0].to_owned(), table_folder[1].to_owned(), "ak_data".to_owned()];
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::AssKitFiles, path, column_index, row_index));
found = true;
break;
}
}
}
}
if !found {
CentralCommand::send_back(&sender, Response::Error(ErrorKind::GenericHTMLError(tr("source_data_for_field_not_found")).into()));
}
},
Command::GoToLoc(loc_key) => {
let packed_files = pack_file_decoded.get_ref_packed_files_by_type(PackedFileType::Loc, false);
let mut found = false;
for packed_file in &packed_files {
if let Ok(DecodedPackedFile::Loc(data)) = packed_file.get_decoded_from_memory() {
if let Some((column_index, row_index)) = data.get_ref_table().get_source_location_of_reference_data("key", &loc_key) {
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::PackFile, packed_file.get_path().to_vec(), column_index, row_index));
found = true;
break;
}
}
}
if !found {
if let Ok(packed_files) = dependencies.get_db_and_loc_tables_from_cache(false, true, false, true) {
for packed_file in &packed_files {
if let Ok(DecodedPackedFile::Loc(data)) = packed_file.get_decoded_from_memory() {
if let Some((column_index, row_index)) = data.get_ref_table().get_source_location_of_reference_data("key", &loc_key) {
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::ParentFiles, packed_file.get_path().to_vec(), column_index, row_index));
found = true;
break;
}
}
}
}
}
if !found {
if let Ok(packed_files) = dependencies.get_db_and_loc_tables_from_cache(false, true, true, false) {
for packed_file in &packed_files {
if let Ok(DecodedPackedFile::Loc(data)) = packed_file.get_decoded_from_memory() {
if let Some((column_index, row_index)) = data.get_ref_table().get_source_location_of_reference_data("key", &loc_key) {
CentralCommand::send_back(&sender, Response::DataSourceVecStringUsizeUsize(DataSource::GameFiles, packed_file.get_path().to_vec(), column_index, row_index));
found = true;
break;
}
}
}
}
}
if !found {
CentralCommand::send_back(&sender, Response::Error(ErrorKind::GenericHTMLError(tr("loc_key_not_found")).into()));
}
},
Command::GetSourceDataFromLocKey(loc_key) => CentralCommand::send_back(&sender, Response::OptionStringStringString(Loc::get_source_location_of_loc_key(&loc_key, &dependencies))),
Command::GetPackedFileType(path) => {
let packed_file = RawPackedFile::read_from_vec(path, String::new(), 0, false, vec![]);
CentralCommand::send_back(&sender, Response::PackedFileType(PackedFileType::get_packed_file_type(&packed_file, false)));
}
Command::GetPackFileName => CentralCommand::send_back(&sender, Response::String(pack_file_decoded.get_file_name())),
Command::GetPackedFileRawData(path) => {
match pack_file_decoded.get_ref_mut_packed_file_by_path(&path) {
Some(ref mut packed_file) => {
match packed_file.get_ref_raw().get_raw_data() {
Ok(data) => CentralCommand::send_back(&sender, Response::VecU8(data.clone())),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
None => CentralCommand::send_back(&sender, Response::Error(Error::from(ErrorKind::PackedFileNotFound))),
}
},
Command::ImportDependenciesToOpenPackFile(paths_by_data_source) => {
let mut added_paths = vec![];
let mut error_paths = vec![];
for (data_source, paths) in &paths_by_data_source {
let packed_files: Vec<PackedFile> = match data_source {
DataSource::GameFiles => {
match dependencies.get_packedfiles_from_game_files(paths) {
Ok((packed_files, mut errors)) => {
error_paths.append(&mut errors);
packed_files
}
Err(error) => {
CentralCommand::send_back(&sender, Response::Error(error));
CentralCommand::send_back(&sender, Response::Success);
continue 'background_loop;
},
}
}
DataSource::ParentFiles => {
match dependencies.get_packedfiles_from_parent_files(paths) {
Ok((packed_files, mut errors)) => {
error_paths.append(&mut errors);
packed_files
}
Err(error) => {
CentralCommand::send_back(&sender, Response::Error(error));
CentralCommand::send_back(&sender, Response::Success);
continue 'background_loop;
},
}
},
_ => {
CentralCommand::send_back(&sender, Response::Error(ErrorKind::Generic.into()));
CentralCommand::send_back(&sender, Response::Success);
continue 'background_loop;
},
};
let packed_files_ref = packed_files.iter().collect::<Vec<&PackedFile>>();
added_paths.append(&mut pack_file_decoded.add_packed_files(&packed_files_ref, true, true).unwrap());
}
if !error_paths.is_empty() {
CentralCommand::send_back(&sender, Response::VecPathType(added_paths.iter().map(|x| PathType::File(x.to_vec())).collect()));
CentralCommand::send_back(&sender, Response::VecVecString(error_paths));
} else {
CentralCommand::send_back(&sender, Response::VecPathType(added_paths.iter().map(|x| PathType::File(x.to_vec())).collect()));
CentralCommand::send_back(&sender, Response::Success);
}
},
Command::GetPackedFilesFromAllSources(paths) => {
let mut packed_files = HashMap::new();
// Get PackedFiles requested from the Parent Files.
let mut packed_files_parent = HashMap::new();
if let Ok((packed_files_decoded, _)) = dependencies.get_packedfiles_from_parent_files_unicased(&paths) {
for packed_file in packed_files_decoded {
packed_files_parent.insert(packed_file.get_path().to_vec(), packed_file);
}
packed_files.insert(DataSource::ParentFiles, packed_files_parent);
}
// Get PackedFiles requested from the Game Files.
let mut packed_files_game = HashMap::new();
if let Ok((packed_files_decoded, _)) = dependencies.get_packedfiles_from_game_files_unicased(&paths) {
for packed_file in packed_files_decoded {
packed_files_game.insert(packed_file.get_path().to_vec(), packed_file);
}
packed_files.insert(DataSource::GameFiles, packed_files_game);
}
// Get PackedFiles requested from the AssKit Files.
//let mut packed_files_asskit = HashMap::new();
//if let Ok((packed_files_decoded, _)) = dependencies.get_packedfile_from_asskit_files(&paths) {
// for packed_file in packed_files_decoded {
// packed_files_asskit.insert(packed_file.get_path().to_vec(), packed_file);
// }
// packed_files.insert(DataSource::AssKitFiles, packed_files_asskit);
//}
// Get PackedFiles requested from the currently open PackFile, if any.
let mut packed_files_packfile = HashMap::new();
for packed_file in pack_file_decoded.get_packed_files_by_path_type_unicased(&paths) {
packed_files_packfile.insert(packed_file.get_path().to_vec(), packed_file );
}
packed_files.insert(DataSource::PackFile, packed_files_packfile);
// Return the full list of PackedFiles requested, split by source.
CentralCommand::send_back(&sender, Response::HashMapDataSourceHashMapVecStringPackedFile(packed_files));
},
Command::GetPackedFilesNamesStartingWitPathFromAllSources(path) => {
let mut packed_files = HashMap::new();
let base_path = if let PathType::Folder(ref path) = path { path.to_vec() } else { unimplemented!() };
// Get PackedFiles requested from the Parent Files.
let mut packed_files_parent = HashSet::new();
if let Ok((packed_files_decoded, _)) = dependencies.get_packedfiles_from_parent_files_unicased(&[path.clone()]) {
for packed_file in packed_files_decoded {
let packed_file_path = packed_file.get_path()[base_path.len() - 1..].to_vec();
packed_files_parent.insert(packed_file_path);
}
packed_files.insert(DataSource::ParentFiles, packed_files_parent);
}
// Get PackedFiles requested from the Game Files.
let mut packed_files_game = HashSet::new();
if let Ok((packed_files_decoded, _)) = dependencies.get_packedfiles_from_game_files_unicased(&[path.clone()]) {
for packed_file in packed_files_decoded {
let packed_file_path = packed_file.get_path()[base_path.len() - 1..].to_vec();
packed_files_game.insert(packed_file_path);
}
packed_files.insert(DataSource::GameFiles, packed_files_game);
}
// Get PackedFiles requested from the currently open PackFile, if any.
let mut packed_files_packfile = HashSet::new();
for packed_file in pack_file_decoded.get_packed_files_by_path_type_unicased(&[path]) {
let packed_file_path = packed_file.get_path()[base_path.len() - 1..].to_vec();
packed_files_packfile.insert(packed_file_path);
}
packed_files.insert(DataSource::PackFile, packed_files_packfile);
// Return the full list of PackedFile names requested, split by source.
CentralCommand::send_back(&sender, Response::HashMapDataSourceHashSetVecString(packed_files));
},
Command::SavePackedFilesToPackFileAndClean(packed_files) => {
// We receive a list of edited PackedFiles. The UI is the one that takes care of editing them to have the data we want where we want.
// Also, the UI is responsible for naming them in case they're new. Here we grab them and directly add them into the PackFile.
let packed_files = packed_files.iter().collect::<Vec<&PackedFile>>();
let mut added_paths = vec![];
if let Ok(mut paths) = pack_file_decoded.add_packed_files(&packed_files, true, true) {
added_paths.append(&mut paths);
}
// Clean up duplicates from overwrites.
added_paths.sort();
added_paths.dedup();
// Then, optimize the PackFile. This should remove any non-edited rows/files.
match pack_file_decoded.optimize(&dependencies) {
Ok(paths_to_delete) => CentralCommand::send_back(&sender, Response::VecVecStringVecVecString((added_paths, paths_to_delete))),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
},
Command::GetTipsForPath(path) => {
let local_tips = tips.get_local_tips_for_path(&path);
let remote_tips = tips.get_remote_tips_for_path(&path);
CentralCommand::send_back(&sender, Response::VecTipVecTip(local_tips, remote_tips));
}
Command::AddTipToLocalTips(tip) => {
tips.add_tip_to_local_tips(tip);
match tips.save() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Command::DeleteTipById(id) => {
tips.delete_tip_by_id(id);
match tips.save() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Command::PublishTipById(id) => {
match tips.publish_tip_by_id(id) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Command::UploadSchemaPatch(patch) => {
match patch.upload() {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
Command::ImportSchemaPatch(patch) => {
match SCHEMA_PATCHES.write().unwrap().import(patch) {
Ok(_) => CentralCommand::send_back(&sender, Response::Success),
Err(error) => CentralCommand::send_back(&sender, Response::Error(error)),
}
}
// These two belong to the network thread, not to this one!!!!
Command::CheckUpdates | Command::CheckSchemaUpdates | Command::CheckMessageUpdates => panic!("{}{:?}", THREADS_COMMUNICATION_ERROR, response),
}
}
}
| 61.907563 | 233 | 0.514498 |
9043a2c1660c9d85037c11ec975e3f15c8343ad8 | 51,574 | use std::fmt;
use std::path::PathBuf;
use super::ExitCode;
use crate::style::{text_width, tool_version};
use crate::tool;
use crate::tool::package::PackageManager;
use textwrap::{fill, indent};
const REPORT_BUG_CTA: &str =
"Please rerun the command that triggered this error with the environment
variable `VOLTA_LOGLEVEL` set to `debug` and open an issue at
https://github.com/volta-cli/volta/issues with the details!";
const PERMISSIONS_CTA: &str = "Please ensure you have correct permissions to the Volta directory.";
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub enum ErrorKind {
/// Thrown when package tries to install a binary that is already installed.
BinaryAlreadyInstalled {
bin_name: String,
existing_package: String,
new_package: String,
},
/// Thrown when executing an external binary fails
BinaryExecError,
/// Thrown when a binary could not be found in the local inventory
BinaryNotFound {
name: String,
},
/// Thrown when building the virtual environment path fails
BuildPathError,
/// Thrown when unable to launch a command with VOLTA_BYPASS set
BypassError {
command: String,
},
/// Thrown when a user tries to `volta fetch` something other than node/yarn/npm.
CannotFetchPackage {
package: String,
},
/// Thrown when a user tries to `volta pin` something other than node/yarn/npm.
CannotPinPackage {
package: String,
},
/// Thrown when the Completions out-dir is not a directory
CompletionsOutFileError {
path: PathBuf,
},
/// Thrown when the containing directory could not be determined
ContainingDirError {
path: PathBuf,
},
CouldNotDetermineTool,
/// Thrown when unable to start the migration executable
CouldNotStartMigration,
CreateDirError {
dir: PathBuf,
},
/// Thrown when unable to create the layout file
CreateLayoutFileError {
file: PathBuf,
},
/// Thrown when unable to create a link to the shared global library directory
CreateSharedLinkError {
name: String,
},
/// Thrown when creating a temporary directory fails
CreateTempDirError {
in_dir: PathBuf,
},
/// Thrown when creating a temporary file fails
CreateTempFileError {
in_dir: PathBuf,
},
CurrentDirError,
/// Thrown when deleting a directory fails
DeleteDirectoryError {
directory: PathBuf,
},
/// Thrown when deleting a file fails
DeleteFileError {
file: PathBuf,
},
DeprecatedCommandError {
command: String,
advice: String,
},
DownloadToolNetworkError {
tool: tool::Spec,
from_url: String,
},
/// Thrown when unable to execute a hook command
ExecuteHookError {
command: String,
},
/// Thrown when `volta.extends` keys result in an infinite cycle
ExtensionCycleError {
paths: Vec<PathBuf>,
duplicate: PathBuf,
},
/// Thrown when determining the path to an extension manifest fails
ExtensionPathError {
path: PathBuf,
},
/// Thrown when a hook command returns a non-zero exit code
HookCommandFailed {
command: String,
},
/// Thrown when a hook contains multiple fields (prefix, template, or bin)
HookMultipleFieldsSpecified,
/// Thrown when a hook doesn't contain any of the known fields (prefix, template, or bin)
HookNoFieldsSpecified,
/// Thrown when determining the path to a hook fails
HookPathError {
command: String,
},
/// Thrown when determining the name of a newly-installed package fails
InstalledPackageNameError,
InvalidHookCommand {
command: String,
},
/// Thrown when output from a hook command could not be read
InvalidHookOutput {
command: String,
},
/// Thrown when a user does e.g. `volta install node 12` instead of
/// `volta install node@12`.
InvalidInvocation {
action: String,
name: String,
version: String,
},
/// Thrown when a user does e.g. `volta install 12` instead of
/// `volta install node@12`.
InvalidInvocationOfBareVersion {
action: String,
version: String,
},
/// Thrown when a format other than "npm" or "github" is given for yarn.index in the hooks
InvalidRegistryFormat {
format: String,
},
/// Thrown when a tool name is invalid per npm's rules.
InvalidToolName {
name: String,
errors: Vec<String>,
},
/// Thrown when unable to acquire a lock on the Volta directory
LockAcquireError,
/// Thrown when pinning or installing npm@bundled and couldn't detect the bundled version
NoBundledNpm {
command: String,
},
/// Thrown when Yarn is not set at the command-line
NoCommandLineYarn,
/// Thrown when a user tries to install a Yarn or npm version before installing a Node version.
NoDefaultNodeVersion {
tool: String,
},
/// Thrown when there is no Node version matching a requested semver specifier.
NodeVersionNotFound {
matching: String,
},
NoHomeEnvironmentVar,
/// Thrown when the install dir could not be determined
NoInstallDir,
NoLocalDataDir,
/// Thrown when a user tries to pin a Yarn or npm version before pinning a Node version.
NoPinnedNodeVersion {
tool: String,
},
/// Thrown when the platform (Node version) could not be determined
NoPlatform,
/// Thrown when parsing the project manifest and there is a `"volta"` key without Node
NoProjectNodeInManifest,
/// Thrown when Yarn is not set in a project
NoProjectYarn,
/// Thrown when no shell profiles could be found
NoShellProfile {
env_profile: String,
bin_dir: PathBuf,
},
/// Thrown when the user tries to pin Node or Yarn versions outside of a package.
NotInPackage,
/// Thrown when default Yarn is not set
NoDefaultYarn,
/// Thrown when `npm link` is called with a package that isn't available
NpmLinkMissingPackage {
package: String,
},
/// Thrown when `npm link` is called with a package that was not installed / linked with npm
NpmLinkWrongManager {
package: String,
},
/// Thrown when there is no npm version matching the requested Semver/Tag
NpmVersionNotFound {
matching: String,
},
NpxNotAvailable {
version: String,
},
/// Thrown when the command to install a global package is not successful
PackageInstallFailed {
package: String,
},
/// Thrown when parsing the package manifest fails
PackageManifestParseError {
package: String,
},
/// Thrown when reading the package manifest fails
PackageManifestReadError {
package: String,
},
/// Thrown when a specified package could not be found on the npm registry
PackageNotFound {
package: String,
},
/// Thrown when parsing a package manifest fails
PackageParseError {
file: PathBuf,
},
/// Thrown when reading a package manifest fails
PackageReadError {
file: PathBuf,
},
/// Thrown when a package has been unpacked but is not formed correctly.
PackageUnpackError,
/// Thrown when writing a package manifest fails
PackageWriteError {
file: PathBuf,
},
/// Thrown when unable to parse a bin config file
ParseBinConfigError,
/// Thrown when unable to parse a hooks.json file
ParseHooksError {
file: PathBuf,
},
/// Thrown when unable to parse the node index cache
ParseNodeIndexCacheError,
/// Thrown when unable to parse the node index
ParseNodeIndexError {
from_url: String,
},
/// Thrown when unable to parse the node index cache expiration
ParseNodeIndexExpiryError,
/// Thrown when unable to parse the npm manifest file from a node install
ParseNpmManifestError,
/// Thrown when unable to parse a package configuration
ParsePackageConfigError,
/// Thrown when unable to parse the platform.json file
ParsePlatformError,
/// Thrown when unable to parse a tool spec (`<tool>[@<version>]`)
ParseToolSpecError {
tool_spec: String,
},
/// Thrown when persisting an archive to the inventory fails
PersistInventoryError {
tool: String,
},
/// Thrown when executing a project-local binary fails
ProjectLocalBinaryExecError {
command: String,
},
/// Thrown when a project-local binary could not be found
ProjectLocalBinaryNotFound {
command: String,
},
/// Thrown when a publish hook contains both the url and bin fields
PublishHookBothUrlAndBin,
/// Thrown when a publish hook contains neither url nor bin fields
PublishHookNeitherUrlNorBin,
/// Thrown when there was an error reading the user bin directory
ReadBinConfigDirError {
dir: PathBuf,
},
/// Thrown when there was an error reading the config for a binary
ReadBinConfigError {
file: PathBuf,
},
/// Thrown when unable to read the default npm version file
ReadDefaultNpmError {
file: PathBuf,
},
/// Thrown when unable to read the contents of a directory
ReadDirError {
dir: PathBuf,
},
/// Thrown when there was an error opening a hooks.json file
ReadHooksError {
file: PathBuf,
},
/// Thrown when there was an error reading the Node Index Cache
ReadNodeIndexCacheError {
file: PathBuf,
},
/// Thrown when there was an error reading the Node Index Cache Expiration
ReadNodeIndexExpiryError {
file: PathBuf,
},
/// Thrown when there was an error reading the npm manifest file
ReadNpmManifestError,
/// Thrown when there was an error reading a package configuration file
ReadPackageConfigError {
file: PathBuf,
},
/// Thrown when there was an error opening the user platform file
ReadPlatformError {
file: PathBuf,
},
/// Thrown when unable to read the user Path environment variable from the registry
#[cfg(windows)]
ReadUserPathError,
/// Thrown when the public registry for Node or Yarn could not be downloaded.
RegistryFetchError {
tool: String,
from_url: String,
},
/// Thrown when the shim binary is called directly, not through a symlink
RunShimDirectly,
/// Thrown when there was an error setting a tool to executable
SetToolExecutable {
tool: String,
},
/// Thrown when there was an error copying an unpacked tool to the image directory
SetupToolImageError {
tool: String,
version: String,
dir: PathBuf,
},
/// Thrown when Volta is unable to create a shim
ShimCreateError {
name: String,
},
/// Thrown when Volta is unable to remove a shim
ShimRemoveError {
name: String,
},
/// Thrown when serializnig a bin config to JSON fails
StringifyBinConfigError,
/// Thrown when serializnig a package config to JSON fails
StringifyPackageConfigError,
/// Thrown when serializing the platform to JSON fails
StringifyPlatformError,
/// Thrown when a given feature has not yet been implemented
Unimplemented {
feature: String,
},
/// Thrown when unpacking an archive (tarball or zip) fails
UnpackArchiveError {
tool: String,
version: String,
},
/// Thrown when a package to upgrade was not found
UpgradePackageNotFound {
package: String,
manager: PackageManager,
},
/// Thrown when a package to upgrade was installed with a different package manager
UpgradePackageWrongManager {
package: String,
manager: PackageManager,
},
VersionParseError {
version: String,
},
/// Thrown when there was an error writing a bin config file
WriteBinConfigError {
file: PathBuf,
},
/// Thrown when there was an error writing the default npm to file
WriteDefaultNpmError {
file: PathBuf,
},
/// Thrown when there was an error writing the npm launcher
WriteLauncherError {
tool: String,
},
/// Thrown when there was an error writing the node index cache
WriteNodeIndexCacheError {
file: PathBuf,
},
/// Thrown when there was an error writing the node index expiration
WriteNodeIndexExpiryError {
file: PathBuf,
},
/// Thrown when there was an error writing a package config
WritePackageConfigError {
file: PathBuf,
},
/// Thrown when writing the platform.json file fails
WritePlatformError {
file: PathBuf,
},
/// Thrown when unable to write the user PATH environment variable
#[cfg(windows)]
WriteUserPathError,
/// Thrown when a user attempts to install a version of Yarn2
Yarn2NotSupported,
/// Thrown when there is an error fetching the latest version of Yarn
YarnLatestFetchError {
from_url: String,
},
/// Thrown when there is no Yarn version matching a requested semver specifier.
YarnVersionNotFound {
matching: String,
},
}
impl fmt::Display for ErrorKind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
ErrorKind::BinaryAlreadyInstalled {
bin_name,
existing_package,
new_package,
} => write!(
f,
"Executable '{}' is already installed by {}
Please remove {} before installing {}",
bin_name, existing_package, existing_package, new_package
),
ErrorKind::BinaryExecError => write!(
f,
"Could not execute command.
See `volta help install` and `volta help pin` for info about making tools available."
),
ErrorKind::BinaryNotFound { name } => write!(
f,
r#"Could not find executable "{}"
Use `volta install` to add a package to your toolchain (see `volta help install` for more info)."#,
name
),
ErrorKind::BuildPathError => write!(
f,
"Could not create execution environment.
Please ensure your PATH is valid."
),
ErrorKind::BypassError { command } => write!(
f,
"Could not execute command '{}'
VOLTA_BYPASS is enabled, please ensure that the command exists on your system or unset VOLTA_BYPASS",
command,
),
ErrorKind::CannotFetchPackage { package } => write!(
f,
"Fetching packages without installing them is not supported.
Use `volta install {}` to update the default version.",
package
),
ErrorKind::CannotPinPackage { package } => write!(
f,
"Only node and yarn can be pinned in a project
Use `npm install` or `yarn add` to select a version of {} for this project.",
package
),
ErrorKind::CompletionsOutFileError { path } => write!(
f,
"Completions file `{}` already exists.
Please remove the file or pass `-f` or `--force` to override.",
path.display()
),
ErrorKind::ContainingDirError { path } => write!(
f,
"Could not create the containing directory for {}
{}",
path.display(),
PERMISSIONS_CTA
),
ErrorKind::CouldNotDetermineTool => write!(
f,
"Could not determine tool name
{}",
REPORT_BUG_CTA
),
ErrorKind::CouldNotStartMigration => write!(
f,
"Could not start migration process to upgrade your Volta directory.
Please ensure you have 'volta-migrate' on your PATH and run it directly."
),
ErrorKind::CreateDirError { dir } => write!(
f,
"Could not create directory {}
Please ensure that you have the correct permissions.",
dir.display()
),
ErrorKind::CreateLayoutFileError { file } => write!(
f,
"Could not create layout file {}
{}",
file.display(), PERMISSIONS_CTA
),
ErrorKind::CreateSharedLinkError { name } => write!(
f,
"Could not create shared environment for package '{}'
{}",
name, PERMISSIONS_CTA
),
ErrorKind::CreateTempDirError { in_dir } => write!(
f,
"Could not create temporary directory
in {}
{}",
in_dir.display(),
PERMISSIONS_CTA
),
ErrorKind::CreateTempFileError { in_dir } => write!(
f,
"Could not create temporary file
in {}
{}",
in_dir.display(),
PERMISSIONS_CTA
),
ErrorKind::CurrentDirError => write!(
f,
"Could not determine current directory
Please ensure that you have the correct permissions."
),
ErrorKind::DeleteDirectoryError { directory } => write!(
f,
"Could not remove directory
at {}
{}",
directory.display(),
PERMISSIONS_CTA
),
ErrorKind::DeleteFileError { file } => write!(
f,
"Could not remove file
at {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::DeprecatedCommandError { command, advice } => {
write!(f, "The subcommand `{}` is deprecated.\n{}", command, advice)
}
ErrorKind::DownloadToolNetworkError { tool, from_url } => write!(
f,
"Could not download {}
from {}
Please verify your internet connection and ensure the correct version is specified.",
tool, from_url
),
ErrorKind::ExecuteHookError { command } => write!(
f,
"Could not execute hook command: '{}'
Please ensure that the correct command is specified.",
command
),
ErrorKind::ExtensionCycleError { paths, duplicate } => {
// Detected infinite loop in project workspace:
//
// --> /home/user/workspace/project/package.json
// /home/user/workspace/package.json
// --> /home/user/workspace/project/package.json
//
// Please ensure that project workspaces do not depend on each other.
f.write_str("Detected infinite loop in project workspace:\n\n")?;
for path in paths {
if path == duplicate {
f.write_str("--> ")?;
} else {
f.write_str(" ")?;
}
writeln!(f, "{}", path.display())?;
}
writeln!(f, "--> {}", duplicate.display())?;
writeln!(f)?;
f.write_str("Please ensure that project workspaces do not depend on each other.")
}
ErrorKind::ExtensionPathError { path } => write!(
f,
"Could not determine path to project workspace: '{}'
Please ensure that the file exists and is accessible.",
path.display(),
),
ErrorKind::HookCommandFailed { command } => write!(
f,
"Hook command '{}' indicated a failure.
Please verify the requested tool and version.",
command
),
ErrorKind::HookMultipleFieldsSpecified => write!(
f,
"Hook configuration includes multiple hook types.
Please include only one of 'bin', 'prefix', or 'template'"
),
ErrorKind::HookNoFieldsSpecified => write!(
f,
"Hook configuration includes no hook types.
Please include one of 'bin', 'prefix', or 'template'"
),
ErrorKind::HookPathError { command } => write!(
f,
"Could not determine path to hook command: '{}'
Please ensure that the correct command is specified.",
command
),
ErrorKind::InstalledPackageNameError => write!(
f,
"Could not determine the name of the package that was just installed.
{}",
REPORT_BUG_CTA
),
ErrorKind::InvalidHookCommand { command } => write!(
f,
"Invalid hook command: '{}'
Please ensure that the correct command is specified.",
command
),
ErrorKind::InvalidHookOutput { command } => write!(
f,
"Could not read output from hook command: '{}'
Please ensure that the command output is valid UTF-8 text.",
command
),
ErrorKind::InvalidInvocation {
action,
name,
version,
} => {
let error = format!(
"`volta {action} {name} {version}` is not supported.",
action = action,
name = name,
version = version
);
let call_to_action = format!(
"To {action} '{name}' version '{version}', please run `volta {action} {formatted}`. \
To {action} the packages '{name}' and '{version}', please {action} them in separate commands, or with explicit versions.",
action=action,
name=name,
version=version,
formatted=tool_version(name, version)
);
let wrapped_cta = match text_width() {
Some(width) => fill(&call_to_action, width),
None => call_to_action,
};
write!(f, "{}\n\n{}", error, wrapped_cta)
}
ErrorKind::InvalidInvocationOfBareVersion {
action,
version,
} => {
let error = format!(
"`volta {action} {version}` is not supported.",
action = action,
version = version
);
let call_to_action = format!(
"To {action} node version '{version}', please run `volta {action} {formatted}`. \
To {action} the package '{version}', please use an explicit version such as '{version}@latest'.",
action=action,
version=version,
formatted=tool_version("node", version)
);
let wrapped_cta = match text_width() {
Some(width) => fill(&call_to_action, width),
None => call_to_action,
};
write!(f, "{}\n\n{}", error, wrapped_cta)
}
ErrorKind::InvalidRegistryFormat { format } => write!(
f,
"Unrecognized index registry format: '{}'
Please specify either 'npm' or 'github' for the format.",
format
),
ErrorKind::InvalidToolName { name, errors } => {
let indentation = " ";
let wrapped = match text_width() {
Some(width) => fill(&errors.join("\n"), width - indentation.len()),
None => errors.join("\n"),
};
let formatted_errs = indent(&wrapped, indentation);
let call_to_action = if errors.len() > 1 {
"Please fix the following errors:"
} else {
"Please fix the following error:"
};
write!(
f,
"Invalid tool name `{}`\n\n{}\n{}",
name, call_to_action, formatted_errs
)
}
// Note: No CTA as this error is purely informational and shouldn't be exposed to the user
ErrorKind::LockAcquireError => write!(
f,
"Unable to acquire lock on Volta directory"
),
ErrorKind::NoBundledNpm { command } => write!(
f,
"Could not detect bundled npm version.
Please ensure you have a Node version selected with `volta {} node` (see `volta help {0}` for more info).",
command
),
ErrorKind::NoCommandLineYarn => write!(
f,
"No Yarn version specified.
Use `volta run --yarn` to select a version (see `volta help run` for more info)."
),
ErrorKind::NoDefaultNodeVersion { tool } => write!(
f,
"Cannot install {} because the default Node version is not set.
Use `volta install node` to select a default Node first, then install a {0} version.",
tool
),
ErrorKind::NodeVersionNotFound { matching } => write!(
f,
r#"Could not find Node version matching "{}" in the version registry.
Please verify that the version is correct."#,
matching
),
ErrorKind::NoHomeEnvironmentVar => write!(
f,
"Could not determine home directory.
Please ensure the environment variable 'HOME' is set."
),
ErrorKind::NoInstallDir => write!(
f,
"Could not determine Volta install directory.
Please ensure Volta was installed correctly"
),
ErrorKind::NoLocalDataDir => write!(
f,
"Could not determine LocalAppData directory.
Please ensure the directory is available."
),
ErrorKind::NoPinnedNodeVersion { tool } => write!(
f,
"Cannot pin {} because the Node version is not pinned in this project.
Use `volta pin node` to pin Node first, then pin a {0} version.",
tool
),
ErrorKind::NoPlatform => write!(
f,
"Node is not available.
To run any Node command, first set a default version using `volta install node`"
),
ErrorKind::NoProjectNodeInManifest => write!(
f,
"No Node version found in this project.
Use `volta pin node` to select a version (see `volta help pin` for more info)."
),
ErrorKind::NoProjectYarn => write!(
f,
"No Yarn version found in this project.
Use `volta pin yarn` to select a version (see `volta help pin` for more info)."
),
ErrorKind::NoShellProfile { env_profile, bin_dir } => write!(
f,
"Could not locate user profile.
Tried $PROFILE ({}), ~/.bashrc, ~/.bash_profile, ~/.zshrc, ~/.profile, and ~/.config/fish/config.fish
Please create one of these and try again; or you can edit your profile manually to add '{}' to your PATH",
env_profile, bin_dir.display()
),
ErrorKind::NotInPackage => write!(
f,
"Not in a node package.
Use `volta install` to select a default version of a tool."
),
ErrorKind::NoDefaultYarn => write!(
f,
"Yarn is not available.
Use `volta install yarn` to select a default version (see `volta help install` for more info)."
),
ErrorKind::NpmLinkMissingPackage { package } => write!(
f,
"Could not locate the package '{}'
Please ensure it is available by running `npm link` in its source directory.",
package
),
ErrorKind::NpmLinkWrongManager { package } => write!(
f,
"The package '{}' was not installed using npm and cannot be linked with `npm link`
Please ensure it is linked with `npm link` or installed with `npm i -g {0}`.",
package
),
ErrorKind::NpmVersionNotFound { matching } => write!(
f,
r#"Could not find Node version matching "{}" in the version registry.
Please verify that the version is correct."#,
matching
),
ErrorKind::NpxNotAvailable { version } => write!(
f,
"'npx' is only available with npm >= 5.2.0
This project is configured to use version {} of npm.",
version
),
ErrorKind::PackageInstallFailed { package } => write!(
f,
"Could not install package '{}'
Please confirm the package is valid and run with `--verbose` for more diagnostics.",
package
),
ErrorKind::PackageManifestParseError { package } => write!(
f,
"Could not parse package.json manifest for {}
Please ensure the package includes a valid manifest file.",
package
),
ErrorKind::PackageManifestReadError { package } => write!(
f,
"Could not read package.json manifest for {}
Please ensure the package includes a valid manifest file.",
package
),
ErrorKind::PackageNotFound { package } => write!(
f,
"Could not find '{}' in the package registry.
Please verify the requested package is correct.",
package
),
ErrorKind::PackageParseError { file } => write!(
f,
"Could not parse project manifest
at {}
Please ensure that the file is correctly formatted.",
file.display()
),
ErrorKind::PackageReadError { file } => write!(
f,
"Could not read project manifest
from {}
Please ensure that the file exists.",
file.display()
),
ErrorKind::PackageUnpackError => write!(
f,
"Could not determine package directory layout.
Please ensure the package is correctly formatted."
),
ErrorKind::PackageWriteError { file } => write!(
f,
"Could not write project manifest
to {}
Please ensure you have correct permissions.",
file.display()
),
ErrorKind::ParseBinConfigError => write!(
f,
"Could not parse executable configuration file.
{}",
REPORT_BUG_CTA
),
ErrorKind::ParseHooksError { file } => write!(
f,
"Could not parse hooks configuration file.
from {}
Please ensure the file is correctly formatted.",
file.display()
),
ErrorKind::ParseNodeIndexCacheError => write!(
f,
"Could not parse Node index cache file.
{}",
REPORT_BUG_CTA
),
ErrorKind::ParseNodeIndexError { from_url } => write!(
f,
"Could not parse Node version index
from {}
Please verify your internet connection.",
from_url
),
ErrorKind::ParseNodeIndexExpiryError => write!(
f,
"Could not parse Node index cache expiration file.
{}",
REPORT_BUG_CTA
),
ErrorKind::ParseNpmManifestError => write!(
f,
"Could not parse package.json file for bundled npm.
Please ensure the version of Node is correct."
),
ErrorKind::ParsePackageConfigError => write!(
f,
"Could not parse package configuration file.
{}",
REPORT_BUG_CTA
),
ErrorKind::ParsePlatformError => write!(
f,
"Could not parse platform settings file.
{}",
REPORT_BUG_CTA
),
ErrorKind::ParseToolSpecError { tool_spec } => write!(
f,
"Could not parse tool spec `{}`
Please supply a spec in the format `<tool name>[@<version>]`.",
tool_spec
),
ErrorKind::PersistInventoryError { tool } => write!(
f,
"Could not store {} archive in inventory cache
{}",
tool, PERMISSIONS_CTA
),
ErrorKind::ProjectLocalBinaryExecError { command } => write!(
f,
"Could not execute `{}`
Please ensure you have correct permissions to access the file.",
command
),
ErrorKind::ProjectLocalBinaryNotFound { command } => write!(
f,
"Could not locate executable `{}` in your project.
Please ensure that all project dependencies are installed with `npm install` or `yarn install`",
command
),
ErrorKind::PublishHookBothUrlAndBin => write!(
f,
"Publish hook configuration includes both hook types.
Please include only one of 'bin' or 'url'"
),
ErrorKind::PublishHookNeitherUrlNorBin => write!(
f,
"Publish hook configuration includes no hook types.
Please include one of 'bin' or 'url'"
),
ErrorKind::ReadBinConfigDirError { dir } => write!(
f,
"Could not read executable metadata directory
at {}
{}",
dir.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadBinConfigError { file } => write!(
f,
"Could not read executable configuration
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadDefaultNpmError { file } => write!(
f,
"Could not read default npm version
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadDirError { dir } => write!(
f,
"Could not read contents from directory {}
{}",
dir.display(), PERMISSIONS_CTA
),
ErrorKind::ReadHooksError { file } => write!(
f,
"Could not read hooks file
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadNodeIndexCacheError { file } => write!(
f,
"Could not read Node index cache
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadNodeIndexExpiryError { file } => write!(
f,
"Could not read Node index cache expiration
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadNpmManifestError => write!(
f,
"Could not read package.json file for bundled npm.
Please ensure the version of Node is correct."
),
ErrorKind::ReadPackageConfigError { file } => write!(
f,
"Could not read package configuration file
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::ReadPlatformError { file } => write!(
f,
"Could not read default platform file
from {}
{}",
file.display(),
PERMISSIONS_CTA
),
#[cfg(windows)]
ErrorKind::ReadUserPathError => write!(
f,
"Could not read user Path environment variable.
Please ensure you have access to the your environment variables."
),
ErrorKind::RegistryFetchError { tool, from_url } => write!(
f,
"Could not download {} version registry
from {}
Please verify your internet connection.",
tool, from_url
),
ErrorKind::RunShimDirectly => write!(
f,
"'volta-shim' should not be called directly.
Please use the existing shims provided by Volta (node, yarn, etc.) to run tools."
),
ErrorKind::SetToolExecutable { tool } => write!(
f,
r#"Could not set "{}" to executable
{}"#,
tool, PERMISSIONS_CTA
),
ErrorKind::SetupToolImageError { tool, version, dir } => write!(
f,
"Could not create environment for {} v{}
at {}
{}",
tool,
version,
dir.display(),
PERMISSIONS_CTA
),
ErrorKind::ShimCreateError { name } => write!(
f,
r#"Could not create shim for "{}"
{}"#,
name, PERMISSIONS_CTA
),
ErrorKind::ShimRemoveError { name } => write!(
f,
r#"Could not remove shim for "{}"
{}"#,
name, PERMISSIONS_CTA
),
ErrorKind::StringifyBinConfigError => write!(
f,
"Could not serialize executable configuration.
{}",
REPORT_BUG_CTA
),
ErrorKind::StringifyPackageConfigError => write!(
f,
"Could not serialize package configuration.
{}",
REPORT_BUG_CTA
),
ErrorKind::StringifyPlatformError => write!(
f,
"Could not serialize platform settings.
{}",
REPORT_BUG_CTA
),
ErrorKind::Unimplemented { feature } => {
write!(f, "{} is not supported yet.", feature)
}
ErrorKind::UnpackArchiveError { tool, version } => write!(
f,
"Could not unpack {} v{}
Please ensure the correct version is specified.",
tool, version
),
ErrorKind::UpgradePackageNotFound { package, manager } => write!(
f,
r#"Could not locate the package '{}' to upgrade.
Please ensure it is installed with `{} {0}`"#,
package,
match manager {
PackageManager::Npm => "npm i -g",
PackageManager::Yarn => "yarn global add",
}
),
ErrorKind::UpgradePackageWrongManager { package, manager } => {
let (name, command) = match manager {
PackageManager::Npm => ("npm", "npm update -g"),
PackageManager::Yarn => ("Yarn", "yarn global upgrade"),
};
write!(
f,
r#"The package '{}' was installed using {}.
To upgrade it, please use the command `{} {0}`"#,
package, name, command
)
}
ErrorKind::VersionParseError { version } => write!(
f,
r#"Could not parse version "{}"
Please verify the intended version."#,
version
),
ErrorKind::WriteBinConfigError { file } => write!(
f,
"Could not write executable configuration
to {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::WriteDefaultNpmError { file } => write!(
f,
"Could not write bundled npm version
to {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::WriteLauncherError { tool } => write!(
f,
"Could not set up launcher for {}
This is most likely an intermittent failure, please try again.",
tool
),
ErrorKind::WriteNodeIndexCacheError { file } => write!(
f,
"Could not write Node index cache
to {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::WriteNodeIndexExpiryError { file } => write!(
f,
"Could not write Node index cache expiration
to {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::WritePackageConfigError { file } => write!(
f,
"Could not write package configuration
to {}
{}",
file.display(),
PERMISSIONS_CTA
),
ErrorKind::WritePlatformError { file } => write!(
f,
"Could not save platform settings
to {}
{}",
file.display(),
PERMISSIONS_CTA
),
#[cfg(windows)]
ErrorKind::WriteUserPathError => write!(
f,
"Could not write Path environment variable.
Please ensure you have permissions to edit your environment variables."
),
ErrorKind::Yarn2NotSupported => write!(
f,
"Yarn version 2 is not recommended for use, and not supported by Volta.
Please use version 3 or greater instead."
),
ErrorKind::YarnLatestFetchError { from_url } => write!(
f,
"Could not fetch latest version of Yarn
from {}
Please verify your internet connection.",
from_url
),
ErrorKind::YarnVersionNotFound { matching } => write!(
f,
r#"Could not find Yarn version matching "{}" in the version registry.
Please verify that the version is correct."#,
matching
),
}
}
}
impl ErrorKind {
pub fn exit_code(&self) -> ExitCode {
match self {
ErrorKind::BinaryAlreadyInstalled { .. } => ExitCode::FileSystemError,
ErrorKind::BinaryExecError => ExitCode::ExecutionFailure,
ErrorKind::BinaryNotFound { .. } => ExitCode::ExecutableNotFound,
ErrorKind::BuildPathError => ExitCode::EnvironmentError,
ErrorKind::BypassError { .. } => ExitCode::ExecutionFailure,
ErrorKind::CannotFetchPackage { .. } => ExitCode::InvalidArguments,
ErrorKind::CannotPinPackage { .. } => ExitCode::InvalidArguments,
ErrorKind::CompletionsOutFileError { .. } => ExitCode::InvalidArguments,
ErrorKind::ContainingDirError { .. } => ExitCode::FileSystemError,
ErrorKind::CouldNotDetermineTool => ExitCode::UnknownError,
ErrorKind::CouldNotStartMigration => ExitCode::EnvironmentError,
ErrorKind::CreateDirError { .. } => ExitCode::FileSystemError,
ErrorKind::CreateLayoutFileError { .. } => ExitCode::FileSystemError,
ErrorKind::CreateSharedLinkError { .. } => ExitCode::FileSystemError,
ErrorKind::CreateTempDirError { .. } => ExitCode::FileSystemError,
ErrorKind::CreateTempFileError { .. } => ExitCode::FileSystemError,
ErrorKind::CurrentDirError => ExitCode::EnvironmentError,
ErrorKind::DeleteDirectoryError { .. } => ExitCode::FileSystemError,
ErrorKind::DeleteFileError { .. } => ExitCode::FileSystemError,
ErrorKind::DeprecatedCommandError { .. } => ExitCode::InvalidArguments,
ErrorKind::DownloadToolNetworkError { .. } => ExitCode::NetworkError,
ErrorKind::ExecuteHookError { .. } => ExitCode::ExecutionFailure,
ErrorKind::ExtensionCycleError { .. } => ExitCode::ConfigurationError,
ErrorKind::ExtensionPathError { .. } => ExitCode::FileSystemError,
ErrorKind::HookCommandFailed { .. } => ExitCode::ConfigurationError,
ErrorKind::HookMultipleFieldsSpecified => ExitCode::ConfigurationError,
ErrorKind::HookNoFieldsSpecified => ExitCode::ConfigurationError,
ErrorKind::HookPathError { .. } => ExitCode::ConfigurationError,
ErrorKind::InstalledPackageNameError => ExitCode::UnknownError,
ErrorKind::InvalidHookCommand { .. } => ExitCode::ExecutableNotFound,
ErrorKind::InvalidHookOutput { .. } => ExitCode::ExecutionFailure,
ErrorKind::InvalidInvocation { .. } => ExitCode::InvalidArguments,
ErrorKind::InvalidInvocationOfBareVersion { .. } => ExitCode::InvalidArguments,
ErrorKind::InvalidRegistryFormat { .. } => ExitCode::ConfigurationError,
ErrorKind::InvalidToolName { .. } => ExitCode::InvalidArguments,
ErrorKind::LockAcquireError => ExitCode::FileSystemError,
ErrorKind::NoBundledNpm { .. } => ExitCode::ConfigurationError,
ErrorKind::NoCommandLineYarn => ExitCode::ConfigurationError,
ErrorKind::NoDefaultNodeVersion { .. } => ExitCode::ConfigurationError,
ErrorKind::NodeVersionNotFound { .. } => ExitCode::NoVersionMatch,
ErrorKind::NoHomeEnvironmentVar => ExitCode::EnvironmentError,
ErrorKind::NoInstallDir => ExitCode::EnvironmentError,
ErrorKind::NoLocalDataDir => ExitCode::EnvironmentError,
ErrorKind::NoPinnedNodeVersion { .. } => ExitCode::ConfigurationError,
ErrorKind::NoPlatform => ExitCode::ConfigurationError,
ErrorKind::NoProjectNodeInManifest => ExitCode::ConfigurationError,
ErrorKind::NoProjectYarn => ExitCode::ConfigurationError,
ErrorKind::NoShellProfile { .. } => ExitCode::EnvironmentError,
ErrorKind::NotInPackage => ExitCode::ConfigurationError,
ErrorKind::NoDefaultYarn => ExitCode::ConfigurationError,
ErrorKind::NpmLinkMissingPackage { .. } => ExitCode::ConfigurationError,
ErrorKind::NpmLinkWrongManager { .. } => ExitCode::ConfigurationError,
ErrorKind::NpmVersionNotFound { .. } => ExitCode::NoVersionMatch,
ErrorKind::NpxNotAvailable { .. } => ExitCode::ExecutableNotFound,
ErrorKind::PackageInstallFailed { .. } => ExitCode::UnknownError,
ErrorKind::PackageManifestParseError { .. } => ExitCode::ConfigurationError,
ErrorKind::PackageManifestReadError { .. } => ExitCode::FileSystemError,
ErrorKind::PackageNotFound { .. } => ExitCode::InvalidArguments,
ErrorKind::PackageParseError { .. } => ExitCode::ConfigurationError,
ErrorKind::PackageReadError { .. } => ExitCode::FileSystemError,
ErrorKind::PackageUnpackError => ExitCode::ConfigurationError,
ErrorKind::PackageWriteError { .. } => ExitCode::FileSystemError,
ErrorKind::ParseBinConfigError => ExitCode::UnknownError,
ErrorKind::ParseHooksError { .. } => ExitCode::ConfigurationError,
ErrorKind::ParseToolSpecError { .. } => ExitCode::InvalidArguments,
ErrorKind::ParseNodeIndexCacheError => ExitCode::UnknownError,
ErrorKind::ParseNodeIndexError { .. } => ExitCode::NetworkError,
ErrorKind::ParseNodeIndexExpiryError => ExitCode::UnknownError,
ErrorKind::ParseNpmManifestError => ExitCode::UnknownError,
ErrorKind::ParsePackageConfigError => ExitCode::UnknownError,
ErrorKind::ParsePlatformError => ExitCode::ConfigurationError,
ErrorKind::PersistInventoryError { .. } => ExitCode::FileSystemError,
ErrorKind::ProjectLocalBinaryExecError { .. } => ExitCode::ExecutionFailure,
ErrorKind::ProjectLocalBinaryNotFound { .. } => ExitCode::FileSystemError,
ErrorKind::PublishHookBothUrlAndBin => ExitCode::ConfigurationError,
ErrorKind::PublishHookNeitherUrlNorBin => ExitCode::ConfigurationError,
ErrorKind::ReadBinConfigDirError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadBinConfigError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadDefaultNpmError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadDirError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadHooksError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadNodeIndexCacheError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadNodeIndexExpiryError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadNpmManifestError => ExitCode::UnknownError,
ErrorKind::ReadPackageConfigError { .. } => ExitCode::FileSystemError,
ErrorKind::ReadPlatformError { .. } => ExitCode::FileSystemError,
#[cfg(windows)]
ErrorKind::ReadUserPathError => ExitCode::EnvironmentError,
ErrorKind::RegistryFetchError { .. } => ExitCode::NetworkError,
ErrorKind::RunShimDirectly => ExitCode::InvalidArguments,
ErrorKind::SetupToolImageError { .. } => ExitCode::FileSystemError,
ErrorKind::SetToolExecutable { .. } => ExitCode::FileSystemError,
ErrorKind::ShimCreateError { .. } => ExitCode::FileSystemError,
ErrorKind::ShimRemoveError { .. } => ExitCode::FileSystemError,
ErrorKind::StringifyBinConfigError => ExitCode::UnknownError,
ErrorKind::StringifyPackageConfigError => ExitCode::UnknownError,
ErrorKind::StringifyPlatformError => ExitCode::UnknownError,
ErrorKind::Unimplemented { .. } => ExitCode::UnknownError,
ErrorKind::UnpackArchiveError { .. } => ExitCode::UnknownError,
ErrorKind::UpgradePackageNotFound { .. } => ExitCode::ConfigurationError,
ErrorKind::UpgradePackageWrongManager { .. } => ExitCode::ConfigurationError,
ErrorKind::VersionParseError { .. } => ExitCode::NoVersionMatch,
ErrorKind::WriteBinConfigError { .. } => ExitCode::FileSystemError,
ErrorKind::WriteDefaultNpmError { .. } => ExitCode::FileSystemError,
ErrorKind::WriteLauncherError { .. } => ExitCode::FileSystemError,
ErrorKind::WriteNodeIndexCacheError { .. } => ExitCode::FileSystemError,
ErrorKind::WriteNodeIndexExpiryError { .. } => ExitCode::FileSystemError,
ErrorKind::WritePackageConfigError { .. } => ExitCode::FileSystemError,
ErrorKind::WritePlatformError { .. } => ExitCode::FileSystemError,
#[cfg(windows)]
ErrorKind::WriteUserPathError => ExitCode::EnvironmentError,
ErrorKind::Yarn2NotSupported => ExitCode::NoVersionMatch,
ErrorKind::YarnLatestFetchError { .. } => ExitCode::NetworkError,
ErrorKind::YarnVersionNotFound { .. } => ExitCode::NoVersionMatch,
}
}
}
| 33.53316 | 122 | 0.559138 |
50b9ae592d8fd758b5e467da4f26101d1aece3d9 | 14,364 | // Copyright 2019 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use byte_unit::Byte;
use error_chain::bail;
use flate2::read::GzDecoder;
use nix::unistd::isatty;
use std::fs::{remove_file, File, OpenOptions};
use std::io::{self, copy, stderr, BufRead, BufReader, BufWriter, Read, Seek, SeekFrom, Write};
use std::num::{NonZeroU32, NonZeroU64};
use std::os::unix::io::AsRawFd;
use std::path::{Path, PathBuf};
use std::result;
use std::time::{Duration, Instant};
use xz2::read::XzDecoder;
use crate::blockdev::detect_formatted_sector_size_start;
use crate::cmdline::*;
use crate::errors::*;
use crate::io::*;
use crate::source::*;
use crate::verify::*;
// Download all artifacts for an image and verify their signatures.
pub fn download(config: &DownloadConfig) -> Result<()> {
// walk sources
let mut sources = config.location.sources()?;
if sources.is_empty() {
bail!("no artifacts found");
}
for mut source in sources.iter_mut() {
// set up image source
if source.signature.is_none() {
if config.insecure {
eprintln!("Signature not found; skipping verification as requested");
} else {
bail!("--insecure not specified and signature not found");
}
}
// calculate paths
let filename = if config.decompress {
// Drop any compression suffix. Hacky.
source
.filename
.trim_end_matches(".gz")
.trim_end_matches(".xz")
.to_string()
} else {
source.filename.to_string()
};
let mut path = PathBuf::new();
path.push(&config.directory);
path.push(&filename);
let sig_path = path.with_file_name(format!("{}.sig", &filename));
// check existing image and signature; don't redownload if OK
// If we decompressed last time, the call will fail because we can't
// check the old signature. If we didn't decompress last time but are
// decompressing this time, we're not smart enough to decompress the
// existing file.
if !config.decompress && check_image_and_sig(&source, &path, &sig_path).is_ok() {
// report the output file path and keep going
println!("{}", path.display());
continue;
}
// write the image and signature
if let Err(err) = write_image_and_sig(&mut source, &path, &sig_path, config.decompress) {
// delete output files, which may not have been created yet
let _ = remove_file(&path);
let _ = remove_file(&sig_path);
// fail
return Err(err);
}
// report the output file path
println!("{}", path.display());
}
Ok(())
}
// Check an existing image and signature for validity. The image cannot
// have been decompressed after downloading. Return an error if invalid for
// any reason.
fn check_image_and_sig(source: &ImageSource, path: &Path, sig_path: &Path) -> Result<()> {
// ensure we have something to check
if source.signature.is_none() {
return Err("no signature available; can't check existing file".into());
}
let signature = source.signature.as_ref().unwrap();
// compare signature to expectation
let mut sig_file = OpenOptions::new()
.read(true)
.open(sig_path)
.chain_err(|| format!("opening {}", sig_path.display()))?;
let mut buf = Vec::new();
sig_file
.read_to_end(&mut buf)
.chain_err(|| format!("reading {}", sig_path.display()))?;
if &buf != signature {
return Err("signature file doesn't match source".into());
}
// open image file
let mut file = OpenOptions::new()
.read(true)
.open(path)
.chain_err(|| format!("opening {}", path.display()))?;
// perform GPG verification
GpgReader::new(&mut file, signature)?.consume()?;
Ok(())
}
/// Copy the image to disk, and also the signature if appropriate.
fn write_image_and_sig(
source: &mut ImageSource,
path: &Path,
sig_path: &Path,
decompress: bool,
) -> Result<()> {
// open output
let mut dest = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
.chain_err(|| format!("opening {}", path.display()))?;
// download and verify image
// don't check sector size
write_image(
source,
&mut dest,
path,
image_copy_default,
decompress,
None,
None,
)?;
// write signature, if relevant
if let (false, Some(signature)) = (decompress, source.signature.as_ref()) {
let mut sig_dest = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(sig_path)
.chain_err(|| format!("opening {}", sig_path.display()))?;
sig_dest
.write_all(signature)
.chain_err(|| "writing signature data")?;
}
Ok(())
}
/// Copy the image to disk and verify its signature.
pub fn write_image<F>(
source: &mut ImageSource,
dest: &mut File,
dest_path: &Path,
image_copy: F,
decompress: bool,
byte_limit: Option<(u64, String)>, // limit and explanation
expected_sector_size: Option<NonZeroU32>,
) -> Result<()>
where
F: FnOnce(&[u8], &mut dyn Read, &mut File, &Path) -> Result<()>,
{
// wrap source for GPG verification
let mut verify_reader: Box<dyn Read> = {
if let Some(signature) = source.signature.as_ref() {
Box::new(GpgReader::new(&mut source.reader, signature)?)
} else {
Box::new(&mut source.reader)
}
};
// wrap again for progress reporting
let mut progress_reader = ProgressReader::new(
&mut verify_reader,
source.length_hint,
&source.artifact_type,
)?;
// Wrap in a BufReader so we can peek at the first few bytes for
// format sniffing, and to amortize read overhead. Don't trust the
// content-type since the server may not be configured correctly, or
// the file might be local. Then wrap in a reader for decompression.
let mut buf_reader = BufReader::with_capacity(BUFFER_SIZE, &mut progress_reader);
let decompress_reader: Box<dyn Read> = {
let sniff = buf_reader.fill_buf().chain_err(|| "sniffing input")?;
if !decompress {
Box::new(buf_reader)
} else if sniff.len() > 2 && &sniff[0..2] == b"\x1f\x8b" {
Box::new(GzDecoder::new(buf_reader))
} else if sniff.len() > 6 && &sniff[0..6] == b"\xfd7zXZ\x00" {
Box::new(XzDecoder::new(buf_reader))
} else {
Box::new(buf_reader)
}
};
// Wrap again for limit checking.
let mut limit_reader: Box<dyn Read> = match byte_limit {
None => Box::new(decompress_reader),
Some((limit, reason)) => Box::new(LimitReader::new(decompress_reader, limit, reason)),
};
// Read the first MiB of input and, if requested, check it against the
// image's formatted sector size.
let mut first_mb = [0u8; 1024 * 1024];
limit_reader
.read_exact(&mut first_mb)
.chain_err(|| "decoding first MiB of image")?;
// Were we asked to check sector size?
if let Some(expected) = expected_sector_size {
// Can we derive one from source data?
if let Some(actual) = detect_formatted_sector_size_start(&first_mb) {
// Do they match?
if expected != actual {
bail!(
"source has sector size {} but destination has sector size {}",
actual.get(),
expected.get()
);
}
}
}
// call the callback to copy the image
image_copy(&first_mb, &mut limit_reader, dest, dest_path)?;
// flush
dest.flush().chain_err(|| "flushing data to disk")?;
dest.sync_all().chain_err(|| "syncing data to disk")?;
Ok(())
}
pub fn image_copy_default(
first_mb: &[u8],
source: &mut dyn Read,
dest_file: &mut File,
_dest_path: &Path,
) -> Result<()> {
// Amortize write overhead. The decompressor will produce bytes in
// whatever chunk size it chooses.
let mut dest = BufWriter::with_capacity(BUFFER_SIZE, dest_file);
// Cache the first MiB and write zeroes to dest instead. This ensures
// that the disk image can't be used accidentally before its GPG signature
// is verified.
dest.write_all(&[0u8; 1024 * 1024])
.chain_err(|| "clearing first MiB of disk")?;
// do the rest of the copy
// This physically writes any runs of zeroes, rather than sparsifying,
// but sparsifying is unsafe. We can't trust that all runs of zeroes in
// the image represent unallocated blocks, so we must ensure that zero
// blocks are actually stored as zeroes to avoid image corruption.
// Discard is insufficient for this: even if our discard request
// succeeds, discard is not guaranteed to zero blocks (see kernel
// commits 98262f2762f0 and 48920ff2a5a9). Ideally we could use
// BLKZEROOUT to perform hardware-accelerated zeroing and then
// sparse-copy the image, falling back to non-sparse copy if hardware
// acceleration is unavailable. But BLKZEROOUT doesn't support
// BLKDEV_ZERO_NOFALLBACK, so we'd risk gigabytes of redundant I/O.
copy(source, &mut dest).chain_err(|| "decoding and writing image")?;
// verify_reader has now checked the signature, so fill in the first MiB
dest.seek(SeekFrom::Start(0))
.chain_err(|| "seeking to start of disk")?;
dest.write_all(first_mb)
.chain_err(|| "writing to first MiB of disk")?;
// Flush buffer.
dest.flush().chain_err(|| "flushing data to disk")?;
Ok(())
}
pub fn download_to_tempfile(url: &str) -> Result<File> {
let mut f = tempfile::tempfile()?;
let client = new_http_client()?;
let mut resp = client
.get(url)
.send()
.chain_err(|| format!("sending request for '{}'", url))?
.error_for_status()
.chain_err(|| format!("fetching '{}'", url))?;
let mut writer = BufWriter::with_capacity(BUFFER_SIZE, &mut f);
copy(
&mut BufReader::with_capacity(BUFFER_SIZE, &mut resp),
&mut writer,
)
.chain_err(|| format!("couldn't copy '{}'", url))?;
writer
.flush()
.chain_err(|| format!("couldn't write '{}' to disk", url))?;
drop(writer);
f.seek(SeekFrom::Start(0))
.chain_err(|| format!("rewinding file for '{}'", url))?;
Ok(f)
}
struct ProgressReader<'a, R: Read> {
source: R,
length: Option<(NonZeroU64, String)>,
artifact_type: &'a str,
position: u64,
last_report: Instant,
tty: bool,
prologue: &'static str,
epilogue: &'static str,
}
impl<'a, R: Read> ProgressReader<'a, R> {
fn new(source: R, length: Option<u64>, artifact_type: &'a str) -> Result<Self> {
let tty = isatty(stderr().as_raw_fd()).unwrap_or_else(|e| {
eprintln!("checking if stderr is a TTY: {}", e);
false
});
// disable percentage reporting for zero-length files to avoid
// division by zero
let length = length.map(NonZeroU64::new).flatten();
Ok(ProgressReader {
source,
length: length.map(|l| (l, Self::format_bytes(l.get()))),
artifact_type,
position: 0,
last_report: Instant::now(),
tty,
// If stderr is a tty, draw a status line that updates itself in
// place. The prologue leaves a place for the cursor to rest
// between updates. The epilogue writes three spaces to cover
// the switch from e.g. 1000 KiB to 1 MiB, and then uses CR to
// return to the start of the line.
//
// Otherwise, stderr is being read by another process, e.g.
// journald, and fanciness may confuse it. Just log regular
// lines.
prologue: if tty { "> " } else { "" },
epilogue: if tty { " \r" } else { "\n" },
})
}
/// Format a size in bytes.
fn format_bytes(count: u64) -> String {
Byte::from_bytes(count.into())
.get_appropriate_unit(true)
.format(1)
}
}
impl<'a, R: Read> Read for ProgressReader<'a, R> {
fn read(&mut self, buf: &mut [u8]) -> result::Result<usize, io::Error> {
let count = self.source.read(buf)?;
self.position += count as u64;
if self.last_report.elapsed() >= Duration::from_secs(1)
|| self.length.as_ref().map(|(l, _)| l.get()) == Some(self.position)
{
self.last_report = Instant::now();
match self.length {
Some((length, ref length_str)) => eprint!(
"{}Read {} {}/{} ({}%){}",
self.prologue,
self.artifact_type,
Self::format_bytes(self.position),
length_str,
100 * self.position / length.get(),
self.epilogue
),
None => eprint!(
"{}Read {} {}{}",
self.prologue,
self.artifact_type,
Self::format_bytes(self.position),
self.epilogue
),
}
let _ = std::io::stdout().flush();
}
Ok(count)
}
}
impl<'a, R: Read> Drop for ProgressReader<'a, R> {
fn drop(&mut self) {
// if we reported progress using CRs, log final newline
if self.tty {
eprintln!();
}
}
}
| 34.2 | 97 | 0.585143 |
229d1a07caaabbe4f79262e5d9566d657eafcae9 | 9,748 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Utilities that leverage libuv's `uv_timer_*` API
use uv;
use uv::iotask;
use uv::iotask::IoTask;
use core::either;
use core::libc;
use core::libc::c_void;
use core::cast::transmute;
use core::comm::{stream, Chan, SharedChan, Port, select2i};
use core::prelude::*;
use core::ptr;
/**
* Wait for timeout period then send provided value over a channel
*
* This call returns immediately. Useful as the building block for a number
* of higher-level timer functions.
*
* Is not guaranteed to wait for exactly the specified time, but will wait
* for *at least* that period of time.
*
* # Arguments
*
* * `hl_loop` - a `uv::hl::high_level_loop` that the tcp request will run on
* * msecs - a timeout period, in milliseconds, to wait
* * ch - a channel of type T to send a `val` on
* * val - a value of type T to send over the provided `ch`
*/
pub fn delayed_send<T:Owned>(iotask: &IoTask,
msecs: uint,
ch: &Chan<T>,
val: T) {
unsafe {
let (timer_done_po, timer_done_ch) = stream::<()>();
let timer_done_ch = SharedChan(timer_done_ch);
let timer = uv::ll::timer_t();
let timer_ptr = ptr::addr_of(&timer);
do iotask::interact(iotask) |loop_ptr| {
unsafe {
let init_result = uv::ll::timer_init(loop_ptr, timer_ptr);
if (init_result == 0i32) {
let start_result = uv::ll::timer_start(
timer_ptr, delayed_send_cb, msecs, 0u);
if (start_result == 0i32) {
// Note: putting the channel into a ~
// to cast to *c_void
let timer_done_ch_clone = ~timer_done_ch.clone();
let timer_done_ch_ptr = transmute::<
~SharedChan<()>, *c_void>(
timer_done_ch_clone);
uv::ll::set_data_for_uv_handle(
timer_ptr,
timer_done_ch_ptr);
} else {
let error_msg = uv::ll::get_last_err_info(
loop_ptr);
fail!(~"timer::delayed_send() start failed: " +
error_msg);
}
} else {
let error_msg = uv::ll::get_last_err_info(loop_ptr);
fail!(~"timer::delayed_send() init failed: " +
error_msg);
}
}
};
// delayed_send_cb has been processed by libuv
timer_done_po.recv();
// notify the caller immediately
ch.send(val);
// uv_close for this timer has been processed
timer_done_po.recv();
};
}
/**
* Blocks the current task for (at least) the specified time period.
*
* Is not guaranteed to sleep for exactly the specified time, but will sleep
* for *at least* that period of time.
*
* # Arguments
*
* * `iotask` - a `uv::iotask` that the tcp request will run on
* * msecs - an amount of time, in milliseconds, for the current task to block
*/
pub fn sleep(iotask: &IoTask, msecs: uint) {
let (exit_po, exit_ch) = stream::<()>();
delayed_send(iotask, msecs, &exit_ch, ());
exit_po.recv();
}
/**
* Receive on a port for (up to) a specified time, then return an `Option<T>`
*
* This call will block to receive on the provided port for up to the
* specified timeout. Depending on whether the provided port receives in that
* time period, `recv_timeout` will return an `Option<T>` representing the
* result.
*
* # Arguments
*
* * `iotask' - `uv::iotask` that the tcp request will run on
* * msecs - an mount of time, in milliseconds, to wait to receive
* * wait_port - a `core::comm::port<T>` to receive on
*
* # Returns
*
* An `Option<T>` representing the outcome of the call. If the call `recv`'d
* on the provided port in the allotted timeout period, then the result will
* be a `Some(T)`. If not, then `None` will be returned.
*/
pub fn recv_timeout<T:Copy + Owned>(iotask: &IoTask,
msecs: uint,
wait_po: &Port<T>)
-> Option<T> {
let (timeout_po, timeout_ch) = stream::<()>();
delayed_send(iotask, msecs, &timeout_ch, ());
// FIXME: This could be written clearer (#2618)
either::either(
|_| {
None
}, |_| {
Some(wait_po.recv())
}, &select2i(&timeout_po, wait_po)
)
}
// INTERNAL API
extern fn delayed_send_cb(handle: *uv::ll::uv_timer_t,
status: libc::c_int) {
unsafe {
debug!(
"delayed_send_cb handle %? status %?", handle, status);
// Faking a borrowed pointer to our ~SharedChan
let timer_done_ch_ptr: &*c_void = &uv::ll::get_data_for_uv_handle(
handle);
let timer_done_ch_ptr = transmute::<&*c_void, &~SharedChan<()>>(
timer_done_ch_ptr);
let stop_result = uv::ll::timer_stop(handle);
if (stop_result == 0i32) {
timer_done_ch_ptr.send(());
uv::ll::close(handle, delayed_send_close_cb);
} else {
let loop_ptr = uv::ll::get_loop_for_uv_handle(handle);
let error_msg = uv::ll::get_last_err_info(loop_ptr);
fail!(~"timer::sleep() init failed: "+error_msg);
}
}
}
extern fn delayed_send_close_cb(handle: *uv::ll::uv_timer_t) {
unsafe {
debug!("delayed_send_close_cb handle %?", handle);
let timer_done_ch_ptr = uv::ll::get_data_for_uv_handle(handle);
let timer_done_ch = transmute::<*c_void, ~SharedChan<()>>(
timer_done_ch_ptr);
timer_done_ch.send(());
}
}
#[cfg(test)]
mod test {
use core::prelude::*;
use timer::*;
use uv;
use core::iter;
use core::rand::RngUtil;
use core::rand;
use core::task;
use core::pipes::{stream, SharedChan};
#[test]
pub fn test_gl_timer_simple_sleep_test() {
let hl_loop = &uv::global_loop::get();
sleep(hl_loop, 1u);
}
#[test]
pub fn test_gl_timer_sleep_stress1() {
let hl_loop = &uv::global_loop::get();
for iter::repeat(50u) {
sleep(hl_loop, 1u);
}
}
#[test]
pub fn test_gl_timer_sleep_stress2() {
let (po, ch) = stream();
let ch = SharedChan(ch);
let hl_loop = &uv::global_loop::get();
let repeat = 20u;
let spec = {
~[(1u, 20u),
(10u, 10u),
(20u, 2u)]
};
for iter::repeat(repeat) {
let ch = ch.clone();
for spec.each |spec| {
let (times, maxms) = *spec;
let ch = ch.clone();
let hl_loop_clone = hl_loop.clone();
do task::spawn {
use core::rand::*;
let rng = Rng();
for iter::repeat(times) {
sleep(&hl_loop_clone, rng.next() as uint % maxms);
}
ch.send(());
}
}
}
for iter::repeat(repeat * spec.len()) {
po.recv()
}
}
// Because valgrind serializes multithreaded programs it can
// make timing-sensitive tests fail in wierd ways. In these
// next test we run them many times and expect them to pass
// the majority of tries.
#[test]
#[cfg(ignore)]
pub fn test_gl_timer_recv_timeout_before_time_passes() {
let times = 100;
let mut successes = 0;
let mut failures = 0;
let hl_loop = uv::global_loop::get();
for iter::repeat(times as uint) {
task::yield();
let expected = rand::rng().gen_str(16u);
let (test_po, test_ch) = stream::<~str>();
do task::spawn() {
delayed_send(hl_loop, 1u, &test_ch, expected);
};
match recv_timeout(hl_loop, 10u, &test_po) {
Some(val) => {
assert!(val == expected);
successes += 1;
}
_ => failures += 1
};
}
assert!(successes > times / 2);
}
#[test]
pub fn test_gl_timer_recv_timeout_after_time_passes() {
let times = 100;
let mut successes = 0;
let mut failures = 0;
let hl_loop = uv::global_loop::get();
for iter::repeat(times as uint) {
let expected = rand::Rng().gen_str(16u);
let (test_po, test_ch) = stream::<~str>();
let hl_loop_clone = hl_loop.clone();
do task::spawn() {
delayed_send(&hl_loop_clone, 50u, &test_ch, expected);
};
match recv_timeout(&hl_loop, 1u, &test_po) {
None => successes += 1,
_ => failures += 1
};
}
assert!(successes > times / 2);
}
}
| 32.711409 | 78 | 0.52739 |
d5bc6b85424f0893bf583b4975790664d018d45e | 98,075 | // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Windows file path handling
use ascii::AsciiCast;
use c_str::{CString, ToCStr};
use cast;
use cmp::Eq;
use from_str::FromStr;
use iter::{AdditiveIterator, DoubleEndedIterator, Extendable, Invert, Iterator, Map};
use option::{Option, Some, None};
use str;
use str::{CharSplitIterator, OwnedStr, Str, StrVector};
use to_bytes::IterBytes;
use vec::Vector;
use super::{contains_nul, BytesContainer, GenericPath, GenericPathUnsafe};
/// Iterator that yields successive components of a Path as &str
///
/// Each component is yielded as Option<&str> for compatibility with PosixPath, but
/// every component in WindowsPath is guaranteed to be Some.
pub type StrComponentIter<'self> = Map<'self, &'self str, Option<&'self str>,
CharSplitIterator<'self, char>>;
/// Iterator that yields components of a Path in reverse as &str
///
/// Each component is yielded as Option<&str> for compatibility with PosixPath, but
/// every component in WindowsPath is guaranteed to be Some.
pub type RevStrComponentIter<'self> = Invert<Map<'self, &'self str, Option<&'self str>,
CharSplitIterator<'self, char>>>;
/// Iterator that yields successive components of a Path as &[u8]
pub type ComponentIter<'self> = Map<'self, Option<&'self str>, &'self [u8],
StrComponentIter<'self>>;
/// Iterator that yields components of a Path in reverse as &[u8]
pub type RevComponentIter<'self> = Map<'self, Option<&'self str>, &'self [u8],
RevStrComponentIter<'self>>;
/// Represents a Windows path
// Notes for Windows path impl:
// The MAX_PATH is 260, but 253 is the practical limit due to some API bugs
// See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247.aspx for good information
// about windows paths.
// That same page puts a bunch of restrictions on allowed characters in a path.
// `\foo.txt` means "relative to current drive", but will not be considered to be absolute here
// as `∃P | P.join("\foo.txt") != "\foo.txt"`.
// `C:` is interesting, that means "the current directory on drive C".
// Long absolute paths need to have \\?\ prefix (or, for UNC, \\?\UNC\). I think that can be
// ignored for now, though, and only added in a hypothetical .to_pwstr() function.
// However, if a path is parsed that has \\?\, this needs to be preserved as it disables the
// processing of "." and ".." components and / as a separator.
// Experimentally, \\?\foo is not the same thing as \foo.
// Also, \\foo is not valid either (certainly not equivalent to \foo).
// Similarly, C:\\Users is not equivalent to C:\Users, although C:\Users\\foo is equivalent
// to C:\Users\foo. In fact the command prompt treats C:\\foo\bar as UNC path. But it might be
// best to just ignore that and normalize it to C:\foo\bar.
//
// Based on all this, I think the right approach is to do the following:
// * Require valid utf-8 paths. Windows API may use WCHARs, but we don't, and utf-8 is convertible
// to UTF-16 anyway (though does Windows use UTF-16 or UCS-2? Not sure).
// * Parse the prefixes \\?\UNC\, \\?\, and \\.\ explicitly.
// * If \\?\UNC\, treat following two path components as server\share. Don't error for missing
// server\share.
// * If \\?\, parse disk from following component, if present. Don't error for missing disk.
// * If \\.\, treat rest of path as just regular components. I don't know how . and .. are handled
// here, they probably aren't, but I'm not going to worry about that.
// * Else if starts with \\, treat following two components as server\share. Don't error for missing
// server\share.
// * Otherwise, attempt to parse drive from start of path.
//
// The only error condition imposed here is valid utf-8. All other invalid paths are simply
// preserved by the data structure; let the Windows API error out on them.
#[deriving(Clone, DeepClone)]
pub struct Path {
priv repr: ~str, // assumed to never be empty
priv prefix: Option<PathPrefix>,
priv sepidx: Option<uint> // index of the final separator in the non-prefix portion of repr
}
impl Eq for Path {
#[inline]
fn eq(&self, other: &Path) -> bool {
self.repr == other.repr
}
}
impl FromStr for Path {
fn from_str(s: &str) -> Option<Path> {
Path::new_opt(s)
}
}
impl ToCStr for Path {
#[inline]
fn to_c_str(&self) -> CString {
// The Path impl guarantees no embedded NULs
unsafe { self.as_vec().to_c_str_unchecked() }
}
#[inline]
unsafe fn to_c_str_unchecked(&self) -> CString {
self.as_vec().to_c_str_unchecked()
}
}
impl IterBytes for Path {
#[inline]
fn iter_bytes(&self, lsb0: bool, f: |&[u8]| -> bool) -> bool {
self.repr.iter_bytes(lsb0, f)
}
}
impl BytesContainer for Path {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_vec()
}
#[inline]
fn container_into_owned_bytes(self) -> ~[u8] {
self.into_vec()
}
#[inline]
fn container_as_str<'a>(&'a self) -> &'a str {
self.as_str().unwrap()
}
#[inline]
fn container_as_str_opt<'a>(&'a self) -> Option<&'a str> {
self.as_str()
}
#[inline]
fn is_str(_: Option<Path>) -> bool { true }
}
impl<'self> BytesContainer for &'self Path {
#[inline]
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
self.as_vec()
}
#[inline]
fn container_as_str<'a>(&'a self) -> &'a str {
self.as_str().unwrap()
}
#[inline]
fn container_as_str_opt<'a>(&'a self) -> Option<&'a str> {
self.as_str()
}
#[inline]
fn is_str(_: Option<&'self Path>) -> bool { true }
}
impl GenericPathUnsafe for Path {
/// See `GenericPathUnsafe::from_vec_unchecked`.
///
/// # Failure
///
/// Raises the `str::not_utf8` condition if not valid UTF-8.
#[inline]
unsafe fn new_unchecked<T: BytesContainer>(path: T) -> Path {
let (prefix, path) = Path::normalize_(path.container_as_str());
assert!(!path.is_empty());
let mut ret = Path{ repr: path, prefix: prefix, sepidx: None };
ret.update_sepidx();
ret
}
/// See `GenericPathUnsafe::set_filename_unchecekd`.
///
/// # Failure
///
/// Raises the `str::not_utf8` condition if not valid UTF-8.
unsafe fn set_filename_unchecked<T: BytesContainer>(&mut self, filename: T) {
let filename = filename.container_as_str();
match self.sepidx_or_prefix_len() {
None if ".." == self.repr => {
let mut s = str::with_capacity(3 + filename.len());
s.push_str("..");
s.push_char(sep);
s.push_str(filename);
self.update_normalized(s);
}
None => {
self.update_normalized(filename);
}
Some((_,idxa,end)) if self.repr.slice(idxa,end) == ".." => {
let mut s = str::with_capacity(end + 1 + filename.len());
s.push_str(self.repr.slice_to(end));
s.push_char(sep);
s.push_str(filename);
self.update_normalized(s);
}
Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
let mut s = str::with_capacity(idxb + filename.len());
s.push_str(self.repr.slice_to(idxb));
s.push_str(filename);
self.update_normalized(s);
}
Some((idxb,_,_)) => {
let mut s = str::with_capacity(idxb + 1 + filename.len());
s.push_str(self.repr.slice_to(idxb));
s.push_char(sep);
s.push_str(filename);
self.update_normalized(s);
}
}
}
/// See `GenericPathUnsafe::push_unchecked`.
///
/// Concatenating two Windows Paths is rather complicated.
/// For the most part, it will behave as expected, except in the case of
/// pushing a volume-relative path, e.g. `C:foo.txt`. Because we have no
/// concept of per-volume cwds like Windows does, we can't behave exactly
/// like Windows will. Instead, if the receiver is an absolute path on
/// the same volume as the new path, it will be treated as the cwd that
/// the new path is relative to. Otherwise, the new path will be treated
/// as if it were absolute and will replace the receiver outright.
unsafe fn push_unchecked<T: BytesContainer>(&mut self, path: T) {
let path = path.container_as_str();
fn is_vol_abs(path: &str, prefix: Option<PathPrefix>) -> bool {
// assume prefix is Some(DiskPrefix)
let rest = path.slice_from(prefix_len(prefix));
!rest.is_empty() && rest[0].is_ascii() && is_sep(rest[0] as char)
}
fn shares_volume(me: &Path, path: &str) -> bool {
// path is assumed to have a prefix of Some(DiskPrefix)
match me.prefix {
Some(DiskPrefix) => me.repr[0] == path[0].to_ascii().to_upper().to_byte(),
Some(VerbatimDiskPrefix) => me.repr[4] == path[0].to_ascii().to_upper().to_byte(),
_ => false
}
}
fn is_sep_(prefix: Option<PathPrefix>, u: u8) -> bool {
if prefix_is_verbatim(prefix) { is_sep_verbatim(u as char) }
else { is_sep(u as char) }
}
fn replace_path(me: &mut Path, path: &str, prefix: Option<PathPrefix>) {
let newpath = Path::normalize__(path, prefix);
me.repr = match newpath {
Some(p) => p,
None => path.to_owned()
};
me.prefix = prefix;
me.update_sepidx();
}
fn append_path(me: &mut Path, path: &str) {
// appends a path that has no prefix
// if me is verbatim, we need to pre-normalize the new path
let path_ = if is_verbatim(me) { Path::normalize__(path, None) }
else { None };
let pathlen = path_.as_ref().map_default(path.len(), |p| p.len());
let mut s = str::with_capacity(me.repr.len() + 1 + pathlen);
s.push_str(me.repr);
let plen = me.prefix_len();
if !(me.repr.len() > plen && me.repr[me.repr.len()-1] == sep as u8) {
s.push_char(sep);
}
match path_ {
None => s.push_str(path),
Some(p) => s.push_str(p)
};
me.update_normalized(s)
}
if !path.is_empty() {
let prefix = parse_prefix(path);
match prefix {
Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => {
// cwd-relative path, self is on the same volume
append_path(self, path.slice_from(prefix_len(prefix)));
}
Some(_) => {
// absolute path, or cwd-relative and self is not same volume
replace_path(self, path, prefix);
}
None if !path.is_empty() && is_sep_(self.prefix, path[0]) => {
// volume-relative path
if self.prefix.is_some() {
// truncate self down to the prefix, then append
let n = self.prefix_len();
self.repr.truncate(n);
append_path(self, path);
} else {
// we have no prefix, so nothing to be relative to
replace_path(self, path, prefix);
}
}
None => {
// relative path
append_path(self, path);
}
}
}
}
}
impl GenericPath for Path {
#[inline]
fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
let s = path.container_as_str_opt();
match s {
None => None,
Some(s) => {
if contains_nul(s.as_bytes()) {
None
} else {
Some(unsafe { GenericPathUnsafe::new_unchecked(s) })
}
}
}
}
/// See `GenericPath::as_str` for info.
/// Always returns a `Some` value.
#[inline]
fn as_str<'a>(&'a self) -> Option<&'a str> {
Some(self.repr.as_slice())
}
#[inline]
fn as_vec<'a>(&'a self) -> &'a [u8] {
self.repr.as_bytes()
}
#[inline]
fn into_vec(self) -> ~[u8] {
self.repr.into_bytes()
}
#[inline]
fn dirname<'a>(&'a self) -> &'a [u8] {
self.dirname_str().unwrap().as_bytes()
}
/// See `GenericPath::dirname_str` for info.
/// Always returns a `Some` value.
fn dirname_str<'a>(&'a self) -> Option<&'a str> {
Some(match self.sepidx_or_prefix_len() {
None if ".." == self.repr => self.repr.as_slice(),
None => ".",
Some((_,idxa,end)) if self.repr.slice(idxa, end) == ".." => {
self.repr.as_slice()
}
Some((idxb,_,end)) if self.repr.slice(idxb, end) == "\\" => {
self.repr.as_slice()
}
Some((0,idxa,_)) => self.repr.slice_to(idxa),
Some((idxb,idxa,_)) => {
match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => {
self.repr.slice_to(idxa)
}
_ => self.repr.slice_to(idxb)
}
}
})
}
#[inline]
fn filename<'a>(&'a self) -> Option<&'a [u8]> {
self.filename_str().map(|x| x.as_bytes())
}
/// See `GenericPath::filename_str` for info.
/// Always returns a `Some` value if `filename` returns a `Some` value.
fn filename_str<'a>(&'a self) -> Option<&'a str> {
match self.sepidx_or_prefix_len() {
None if "." == self.repr || ".." == self.repr => None,
None => Some(self.repr.as_slice()),
Some((_,idxa,end)) if self.repr.slice(idxa, end) == ".." => None,
Some((_,idxa,end)) if idxa == end => None,
Some((_,idxa,end)) => Some(self.repr.slice(idxa, end))
}
}
/// See `GenericPath::filestem_str` for info.
/// Always returns a `Some` value if `filestem` returns a `Some` value.
#[inline]
fn filestem_str<'a>(&'a self) -> Option<&'a str> {
// filestem() returns a byte vector that's guaranteed valid UTF-8
self.filestem().map(cast::transmute)
}
#[inline]
fn extension_str<'a>(&'a self) -> Option<&'a str> {
// extension() returns a byte vector that's guaranteed valid UTF-8
self.extension().map(cast::transmute)
}
fn dir_path(&self) -> Path {
unsafe { GenericPathUnsafe::new_unchecked(self.dirname_str().unwrap()) }
}
#[inline]
fn pop(&mut self) -> bool {
match self.sepidx_or_prefix_len() {
None if "." == self.repr => false,
None => {
self.repr = ~".";
self.sepidx = None;
true
}
Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false,
Some((idxb,_,end)) if self.repr.slice(idxb, end) == "\\" => false,
Some((idxb,idxa,_)) => {
let trunc = match self.prefix {
Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => {
let plen = self.prefix_len();
if idxb == plen { idxa } else { idxb }
}
_ => idxb
};
self.repr.truncate(trunc);
self.update_sepidx();
true
}
}
}
fn root_path(&self) -> Option<Path> {
if self.is_absolute() {
Some(Path::new(match self.prefix {
Some(VerbatimDiskPrefix)|Some(DiskPrefix) => {
self.repr.slice_to(self.prefix_len()+1)
}
_ => self.repr.slice_to(self.prefix_len())
}))
} else if is_vol_relative(self) {
Some(Path::new(self.repr.slice_to(1)))
} else {
None
}
}
/// See `GenericPath::is_absolute` for info.
///
/// A Windows Path is considered absolute only if it has a non-volume prefix,
/// or if it has a volume prefix and the path starts with '\'.
/// A path of `\foo` is not considered absolute because it's actually
/// relative to the "current volume". A separate method `Path::is_vol_relative`
/// is provided to indicate this case. Similarly a path of `C:foo` is not
/// considered absolute because it's relative to the cwd on volume C:. A
/// separate method `Path::is_cwd_relative` is provided to indicate this case.
#[inline]
fn is_absolute(&self) -> bool {
match self.prefix {
Some(DiskPrefix) => {
let rest = self.repr.slice_from(self.prefix_len());
rest.len() > 0 && rest[0] == sep as u8
}
Some(_) => true,
None => false
}
}
#[inline]
fn is_relative(&self) -> bool {
self.prefix.is_none() && !is_vol_relative(self)
}
fn is_ancestor_of(&self, other: &Path) -> bool {
if !self.equiv_prefix(other) {
false
} else if self.is_absolute() != other.is_absolute() ||
is_vol_relative(self) != is_vol_relative(other) {
false
} else {
let mut ita = self.str_component_iter().map(|x|x.unwrap());
let mut itb = other.str_component_iter().map(|x|x.unwrap());
if "." == self.repr {
return itb.next() != Some("..");
}
loop {
match (ita.next(), itb.next()) {
(None, _) => break,
(Some(a), Some(b)) if a == b => { continue },
(Some(a), _) if a == ".." => {
// if ita contains only .. components, it's an ancestor
return ita.all(|x| x == "..");
}
_ => return false
}
}
true
}
}
fn path_relative_from(&self, base: &Path) -> Option<Path> {
fn comp_requires_verbatim(s: &str) -> bool {
s == "." || s == ".." || s.contains_char(sep2)
}
if !self.equiv_prefix(base) {
// prefixes differ
if self.is_absolute() {
Some(self.clone())
} else if self.prefix == Some(DiskPrefix) && base.prefix == Some(DiskPrefix) {
// both drives, drive letters must differ or they'd be equiv
Some(self.clone())
} else {
None
}
} else if self.is_absolute() != base.is_absolute() {
if self.is_absolute() {
Some(self.clone())
} else {
None
}
} else if is_vol_relative(self) != is_vol_relative(base) {
if is_vol_relative(self) {
Some(self.clone())
} else {
None
}
} else {
let mut ita = self.str_component_iter().map(|x|x.unwrap());
let mut itb = base.str_component_iter().map(|x|x.unwrap());
let mut comps = ~[];
let a_verb = is_verbatim(self);
let b_verb = is_verbatim(base);
loop {
match (ita.next(), itb.next()) {
(None, None) => break,
(Some(a), None) if a_verb && comp_requires_verbatim(a) => {
return Some(self.clone())
}
(Some(a), None) => {
comps.push(a);
if !a_verb {
comps.extend(&mut ita);
break;
}
}
(None, _) => comps.push(".."),
(Some(a), Some(b)) if comps.is_empty() && a == b => (),
(Some(a), Some(b)) if !b_verb && b == "." => {
if a_verb && comp_requires_verbatim(a) {
return Some(self.clone())
} else { comps.push(a) }
}
(Some(_), Some(b)) if !b_verb && b == ".." => return None,
(Some(a), Some(_)) if a_verb && comp_requires_verbatim(a) => {
return Some(self.clone())
}
(Some(a), Some(_)) => {
comps.push("..");
for _ in itb {
comps.push("..");
}
comps.push(a);
if !a_verb {
comps.extend(&mut ita);
break;
}
}
}
}
Some(Path::new(comps.connect("\\")))
}
}
fn ends_with_path(&self, child: &Path) -> bool {
if !child.is_relative() { return false; }
let mut selfit = self.str_component_iter().invert();
let mut childit = child.str_component_iter().invert();
loop {
match (selfit.next(), childit.next()) {
(Some(a), Some(b)) => if a != b { return false; },
(Some(_), None) => break,
(None, Some(_)) => return false,
(None, None) => break
}
}
true
}
}
impl Path {
/// Returns a new Path from a byte vector or string
///
/// # Failure
///
/// Raises the `null_byte` condition if the vector contains a NUL.
/// Raises the `str::not_utf8` condition if invalid UTF-8.
#[inline]
pub fn new<T: BytesContainer>(path: T) -> Path {
GenericPath::new(path)
}
/// Returns a new Path from a byte vector or string, if possible
#[inline]
pub fn new_opt<T: BytesContainer>(path: T) -> Option<Path> {
GenericPath::new_opt(path)
}
/// Returns an iterator that yields each component of the path in turn as a Option<&str>.
/// Every component is guaranteed to be Some.
/// Does not yield the path prefix (including server/share components in UNC paths).
/// Does not distinguish between volume-relative and relative paths, e.g.
/// \a\b\c and a\b\c.
/// Does not distinguish between absolute and cwd-relative paths, e.g.
/// C:\foo and C:foo.
pub fn str_component_iter<'a>(&'a self) -> StrComponentIter<'a> {
let s = match self.prefix {
Some(_) => {
let plen = self.prefix_len();
if self.repr.len() > plen && self.repr[plen] == sep as u8 {
self.repr.slice_from(plen+1)
} else { self.repr.slice_from(plen) }
}
None if self.repr[0] == sep as u8 => self.repr.slice_from(1),
None => self.repr.as_slice()
};
let ret = s.split_terminator_iter(sep).map(Some);
ret
}
/// Returns an iterator that yields each component of the path in reverse as an Option<&str>
/// See str_component_iter() for details.
pub fn rev_str_component_iter<'a>(&'a self) -> RevStrComponentIter<'a> {
self.str_component_iter().invert()
}
/// Returns an iterator that yields each component of the path in turn as a &[u8].
/// See str_component_iter() for details.
pub fn component_iter<'a>(&'a self) -> ComponentIter<'a> {
fn convert<'a>(x: Option<&'a str>) -> &'a [u8] {
#[inline];
x.unwrap().as_bytes()
}
self.str_component_iter().map(convert)
}
/// Returns an iterator that yields each component of the path in reverse as a &[u8].
/// See str_component_iter() for details.
pub fn rev_component_iter<'a>(&'a self) -> RevComponentIter<'a> {
fn convert<'a>(x: Option<&'a str>) -> &'a [u8] {
#[inline];
x.unwrap().as_bytes()
}
self.rev_str_component_iter().map(convert)
}
fn equiv_prefix(&self, other: &Path) -> bool {
match (self.prefix, other.prefix) {
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
self.is_absolute() &&
self.repr[0].to_ascii().eq_ignore_case(other.repr[4].to_ascii())
}
(Some(VerbatimDiskPrefix), Some(DiskPrefix)) => {
other.is_absolute() &&
self.repr[4].to_ascii().eq_ignore_case(other.repr[0].to_ascii())
}
(Some(VerbatimDiskPrefix), Some(VerbatimDiskPrefix)) => {
self.repr[4].to_ascii().eq_ignore_case(other.repr[4].to_ascii())
}
(Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => {
self.repr.slice(2, self.prefix_len()) == other.repr.slice(8, other.prefix_len())
}
(Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => {
self.repr.slice(8, self.prefix_len()) == other.repr.slice(2, other.prefix_len())
}
(None, None) => true,
(a, b) if a == b => {
self.repr.slice_to(self.prefix_len()) == other.repr.slice_to(other.prefix_len())
}
_ => false
}
}
fn normalize_<S: Str>(s: S) -> (Option<PathPrefix>, ~str) {
// make borrowck happy
let (prefix, val) = {
let prefix = parse_prefix(s.as_slice());
let path = Path::normalize__(s.as_slice(), prefix);
(prefix, path)
};
(prefix, match val {
None => s.into_owned(),
Some(val) => val
})
}
fn normalize__(s: &str, prefix: Option<PathPrefix>) -> Option<~str> {
if prefix_is_verbatim(prefix) {
// don't do any normalization
match prefix {
Some(VerbatimUNCPrefix(x, 0)) if s.len() == 8 + x => {
// the server component has no trailing '\'
let mut s = s.into_owned();
s.push_char(sep);
Some(s)
}
_ => None
}
} else {
let (is_abs, comps) = normalize_helper(s, prefix);
let mut comps = comps;
match (comps.is_some(),prefix) {
(false, Some(DiskPrefix)) => {
if s[0] >= 'a' as u8 && s[0] <= 'z' as u8 {
comps = Some(~[]);
}
}
(false, Some(VerbatimDiskPrefix)) => {
if s[4] >= 'a' as u8 && s[0] <= 'z' as u8 {
comps = Some(~[]);
}
}
_ => ()
}
match comps {
None => None,
Some(comps) => {
if prefix.is_some() && comps.is_empty() {
match prefix.unwrap() {
DiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
let mut s = s.slice_to(len).to_owned();
unsafe {
str::raw::as_owned_vec(&mut s)[0] =
s[0].to_ascii().to_upper().to_byte();
}
if is_abs {
// normalize C:/ to C:\
unsafe {
str::raw::as_owned_vec(&mut s)[2] = sep as u8;
}
}
Some(s)
}
VerbatimDiskPrefix => {
let len = prefix_len(prefix) + is_abs as uint;
let mut s = s.slice_to(len).to_owned();
unsafe {
str::raw::as_owned_vec(&mut s)[4] =
s[4].to_ascii().to_upper().to_byte();
}
Some(s)
}
_ => {
let plen = prefix_len(prefix);
if s.len() > plen {
Some(s.slice_to(plen).to_owned())
} else { None }
}
}
} else if is_abs && comps.is_empty() {
Some(str::from_char(sep))
} else {
let prefix_ = s.slice_to(prefix_len(prefix));
let n = prefix_.len() +
if is_abs { comps.len() } else { comps.len() - 1} +
comps.iter().map(|v| v.len()).sum();
let mut s = str::with_capacity(n);
match prefix {
Some(DiskPrefix) => {
s.push_char(prefix_[0].to_ascii().to_upper().to_char());
s.push_char(':');
}
Some(VerbatimDiskPrefix) => {
s.push_str(prefix_.slice_to(4));
s.push_char(prefix_[4].to_ascii().to_upper().to_char());
s.push_str(prefix_.slice_from(5));
}
Some(UNCPrefix(a,b)) => {
s.push_str("\\\\");
s.push_str(prefix_.slice(2, a+2));
s.push_char(sep);
s.push_str(prefix_.slice(3+a, 3+a+b));
}
Some(_) => s.push_str(prefix_),
None => ()
}
let mut it = comps.move_iter();
if !is_abs {
match it.next() {
None => (),
Some(comp) => s.push_str(comp)
}
}
for comp in it {
s.push_char(sep);
s.push_str(comp);
}
Some(s)
}
}
}
}
}
fn update_sepidx(&mut self) {
let s = if self.has_nonsemantic_trailing_slash() {
self.repr.slice_to(self.repr.len()-1)
} else { self.repr.as_slice() };
let idx = s.rfind(if !prefix_is_verbatim(self.prefix) { is_sep }
else { is_sep_verbatim });
let prefixlen = self.prefix_len();
self.sepidx = idx.and_then(|x| if x < prefixlen { None } else { Some(x) });
}
fn prefix_len(&self) -> uint {
prefix_len(self.prefix)
}
// Returns a tuple (before, after, end) where before is the index of the separator
// and after is the index just after the separator.
// end is the length of the string, normally, or the index of the final character if it is
// a non-semantic trailing separator in a verbatim string.
// If the prefix is considered the separator, before and after are the same.
fn sepidx_or_prefix_len(&self) -> Option<(uint,uint,uint)> {
match self.sepidx {
None => match self.prefix_len() { 0 => None, x => Some((x,x,self.repr.len())) },
Some(x) => {
if self.has_nonsemantic_trailing_slash() {
Some((x,x+1,self.repr.len()-1))
} else { Some((x,x+1,self.repr.len())) }
}
}
}
fn has_nonsemantic_trailing_slash(&self) -> bool {
is_verbatim(self) && self.repr.len() > self.prefix_len()+1 &&
self.repr[self.repr.len()-1] == sep as u8
}
fn update_normalized<S: Str>(&mut self, s: S) {
let (prefix, path) = Path::normalize_(s);
self.repr = path;
self.prefix = prefix;
self.update_sepidx();
}
}
/// Returns whether the path is considered "volume-relative", which means a path
/// that looks like "\foo". Paths of this form are relative to the current volume,
/// but absolute within that volume.
#[inline]
pub fn is_vol_relative(path: &Path) -> bool {
path.prefix.is_none() && is_sep_byte(&path.repr[0])
}
/// Returns whether the path is considered "cwd-relative", which means a path
/// with a volume prefix that is not absolute. This look like "C:foo.txt". Paths
/// of this form are relative to the cwd on the given volume.
#[inline]
pub fn is_cwd_relative(path: &Path) -> bool {
path.prefix == Some(DiskPrefix) && !path.is_absolute()
}
/// Returns the PathPrefix for this Path
#[inline]
pub fn prefix(path: &Path) -> Option<PathPrefix> {
path.prefix
}
/// Returns whether the Path's prefix is a verbatim prefix, i.e. \\?\
#[inline]
pub fn is_verbatim(path: &Path) -> bool {
prefix_is_verbatim(path.prefix)
}
/// The standard path separator character
pub static sep: char = '\\';
/// The alternative path separator character
pub static sep2: char = '/';
/// Returns whether the given char is a path separator.
/// Allows both the primary separator '\' and the alternative separator '/'.
#[inline]
pub fn is_sep(c: char) -> bool {
c == sep || c == sep2
}
/// Returns whether the given char is a path separator.
/// Only allows the primary separator '\'; use is_sep to allow '/'.
#[inline]
pub fn is_sep_verbatim(c: char) -> bool {
c == sep
}
/// Returns whether the given byte is a path separator.
/// Allows both the primary separator '\' and the alternative separator '/'.
#[inline]
pub fn is_sep_byte(u: &u8) -> bool {
*u as char == sep || *u as char == sep2
}
/// Returns whether the given byte is a path separator.
/// Only allows the primary separator '\'; use is_sep_byte to allow '/'.
#[inline]
pub fn is_sep_byte_verbatim(u: &u8) -> bool {
*u as char == sep
}
/// Prefix types for Path
#[deriving(Eq, Clone, DeepClone)]
pub enum PathPrefix {
/// Prefix `\\?\`, uint is the length of the following component
VerbatimPrefix(uint),
/// Prefix `\\?\UNC\`, uints are the lengths of the UNC components
VerbatimUNCPrefix(uint, uint),
/// Prefix `\\?\C:\` (for any alphabetic character)
VerbatimDiskPrefix,
/// Prefix `\\.\`, uint is the length of the following component
DeviceNSPrefix(uint),
/// UNC prefix `\\server\share`, uints are the lengths of the server/share
UNCPrefix(uint, uint),
/// Prefix `C:` for any alphabetic character
DiskPrefix
}
// FIXME (#8169): Make private once visibility is fixed
fn parse_prefix<'a>(mut path: &'a str) -> Option<PathPrefix> {
if path.starts_with("\\\\") {
// \\
path = path.slice_from(2);
if path.starts_with("?\\") {
// \\?\
path = path.slice_from(2);
if path.starts_with("UNC\\") {
// \\?\UNC\server\share
path = path.slice_from(4);
let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) {
Some(x) => x,
None => (path.len(), 0)
};
return Some(VerbatimUNCPrefix(idx_a, idx_b));
} else {
// \\?\path
let idx = path.find('\\');
if idx == Some(2) && path[1] == ':' as u8 {
let c = path[0];
if c.is_ascii() && ::char::is_alphabetic(c as char) {
// \\?\C:\ path
return Some(VerbatimDiskPrefix);
}
}
let idx = idx.unwrap_or(path.len());
return Some(VerbatimPrefix(idx));
}
} else if path.starts_with(".\\") {
// \\.\path
path = path.slice_from(2);
let idx = path.find('\\').unwrap_or(path.len());
return Some(DeviceNSPrefix(idx));
}
match parse_two_comps(path, is_sep) {
Some((idx_a, idx_b)) if idx_a > 0 && idx_b > 0 => {
// \\server\share
return Some(UNCPrefix(idx_a, idx_b));
}
_ => ()
}
} else if path.len() > 1 && path[1] == ':' as u8 {
// C:
let c = path[0];
if c.is_ascii() && ::char::is_alphabetic(c as char) {
return Some(DiskPrefix);
}
}
return None;
fn parse_two_comps<'a>(mut path: &'a str, f: |char| -> bool)
-> Option<(uint, uint)> {
let idx_a = match path.find(|x| f(x)) {
None => return None,
Some(x) => x
};
path = path.slice_from(idx_a+1);
let idx_b = path.find(f).unwrap_or(path.len());
Some((idx_a, idx_b))
}
}
// None result means the string didn't need normalizing
fn normalize_helper<'a>(s: &'a str, prefix: Option<PathPrefix>) -> (bool,Option<~[&'a str]>) {
let f = if !prefix_is_verbatim(prefix) { is_sep } else { is_sep_verbatim };
let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix)));
let s_ = s.slice_from(prefix_len(prefix));
let s_ = if is_abs { s_.slice_from(1) } else { s_ };
if is_abs && s_.is_empty() {
return (is_abs, match prefix {
Some(DiskPrefix) | None => (if is_sep_verbatim(s.char_at(prefix_len(prefix))) { None }
else { Some(~[]) }),
Some(_) => Some(~[]), // need to trim the trailing separator
});
}
let mut comps: ~[&'a str] = ~[];
let mut n_up = 0u;
let mut changed = false;
for comp in s_.split_iter(f) {
if comp.is_empty() { changed = true }
else if comp == "." { changed = true }
else if comp == ".." {
let has_abs_prefix = match prefix {
Some(DiskPrefix) => false,
Some(_) => true,
None => false
};
if (is_abs || has_abs_prefix) && comps.is_empty() { changed = true }
else if comps.len() == n_up { comps.push(".."); n_up += 1 }
else { comps.pop(); changed = true }
} else { comps.push(comp) }
}
if !changed && !prefix_is_verbatim(prefix) {
changed = s.find(is_sep).is_some();
}
if changed {
if comps.is_empty() && !is_abs && prefix.is_none() {
if s == "." {
return (is_abs, None);
}
comps.push(".");
}
(is_abs, Some(comps))
} else {
(is_abs, None)
}
}
fn prefix_is_verbatim(p: Option<PathPrefix>) -> bool {
match p {
Some(VerbatimPrefix(_)) | Some(VerbatimUNCPrefix(_,_)) | Some(VerbatimDiskPrefix) => true,
Some(DeviceNSPrefix(_)) => true, // not really sure, but I think so
_ => false
}
}
fn prefix_len(p: Option<PathPrefix>) -> uint {
match p {
None => 0,
Some(VerbatimPrefix(x)) => 4 + x,
Some(VerbatimUNCPrefix(x,y)) => 8 + x + 1 + y,
Some(VerbatimDiskPrefix) => 6,
Some(UNCPrefix(x,y)) => 2 + x + 1 + y,
Some(DeviceNSPrefix(x)) => 4 + x,
Some(DiskPrefix) => 2
}
}
fn prefix_is_sep(p: Option<PathPrefix>, c: u8) -> bool {
c.is_ascii() && if !prefix_is_verbatim(p) { is_sep(c as char) }
else { is_sep_verbatim(c as char) }
}
#[cfg(test)]
mod tests {
use super::*;
use super::parse_prefix;
use option::{Some,None};
use iter::Iterator;
use vec::Vector;
macro_rules! t(
(s: $path:expr, $exp:expr) => (
{
let path = $path;
assert_eq!(path.as_str(), Some($exp));
}
);
(v: $path:expr, $exp:expr) => (
{
let path = $path;
assert_eq!(path.as_vec(), $exp);
}
)
)
macro_rules! b(
($($arg:expr),+) => (
bytes!($($arg),+)
)
)
#[test]
fn test_parse_prefix() {
macro_rules! t(
($path:expr, $exp:expr) => (
{
let path = $path;
let exp = $exp;
let res = parse_prefix(path);
assert!(res == exp,
"parse_prefix(\"{}\"): expected {:?}, found {:?}", path, exp, res);
}
)
)
t!("\\\\SERVER\\share\\foo", Some(UNCPrefix(6,5)));
t!("\\\\", None);
t!("\\\\SERVER", None);
t!("\\\\SERVER\\", None);
t!("\\\\SERVER\\\\", None);
t!("\\\\SERVER\\\\foo", None);
t!("\\\\SERVER\\share", Some(UNCPrefix(6,5)));
t!("\\\\SERVER/share/foo", Some(UNCPrefix(6,5)));
t!("\\\\SERVER\\share/foo", Some(UNCPrefix(6,5)));
t!("//SERVER/share/foo", None);
t!("\\\\\\a\\b\\c", None);
t!("\\\\?\\a\\b\\c", Some(VerbatimPrefix(1)));
t!("\\\\?\\a/b/c", Some(VerbatimPrefix(5)));
t!("//?/a/b/c", None);
t!("\\\\.\\a\\b", Some(DeviceNSPrefix(1)));
t!("\\\\.\\a/b", Some(DeviceNSPrefix(3)));
t!("//./a/b", None);
t!("\\\\?\\UNC\\server\\share\\foo", Some(VerbatimUNCPrefix(6,5)));
t!("\\\\?\\UNC\\\\share\\foo", Some(VerbatimUNCPrefix(0,5)));
t!("\\\\?\\UNC\\", Some(VerbatimUNCPrefix(0,0)));
t!("\\\\?\\UNC\\server/share/foo", Some(VerbatimUNCPrefix(16,0)));
t!("\\\\?\\UNC\\server", Some(VerbatimUNCPrefix(6,0)));
t!("\\\\?\\UNC\\server\\", Some(VerbatimUNCPrefix(6,0)));
t!("\\\\?\\UNC/server/share", Some(VerbatimPrefix(16)));
t!("\\\\?\\UNC", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:\\a\\b.txt", Some(VerbatimDiskPrefix));
t!("\\\\?\\z:\\", Some(VerbatimDiskPrefix));
t!("\\\\?\\C:", Some(VerbatimPrefix(2)));
t!("\\\\?\\C:a.txt", Some(VerbatimPrefix(7)));
t!("\\\\?\\C:a\\b.txt", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:/a", Some(VerbatimPrefix(4)));
t!("C:\\foo", Some(DiskPrefix));
t!("z:/foo", Some(DiskPrefix));
t!("d:", Some(DiskPrefix));
t!("ab:", None);
t!("ü:\\foo", None);
t!("3:\\foo", None);
t!(" :\\foo", None);
t!("::\\foo", None);
t!("\\\\?\\C:", Some(VerbatimPrefix(2)));
t!("\\\\?\\z:\\", Some(VerbatimDiskPrefix));
t!("\\\\?\\ab:\\", Some(VerbatimPrefix(3)));
t!("\\\\?\\C:\\a", Some(VerbatimDiskPrefix));
t!("\\\\?\\C:/a", Some(VerbatimPrefix(4)));
t!("\\\\?\\C:\\a/b", Some(VerbatimDiskPrefix));
}
#[test]
fn test_paths() {
let empty: &[u8] = [];
t!(v: Path::new(empty), b!("."));
t!(v: Path::new(b!("\\")), b!("\\"));
t!(v: Path::new(b!("a\\b\\c")), b!("a\\b\\c"));
t!(s: Path::new(""), ".");
t!(s: Path::new("\\"), "\\");
t!(s: Path::new("hi"), "hi");
t!(s: Path::new("hi\\"), "hi");
t!(s: Path::new("\\lib"), "\\lib");
t!(s: Path::new("\\lib\\"), "\\lib");
t!(s: Path::new("hi\\there"), "hi\\there");
t!(s: Path::new("hi\\there.txt"), "hi\\there.txt");
t!(s: Path::new("/"), "\\");
t!(s: Path::new("hi/"), "hi");
t!(s: Path::new("/lib"), "\\lib");
t!(s: Path::new("/lib/"), "\\lib");
t!(s: Path::new("hi/there"), "hi\\there");
t!(s: Path::new("hi\\there\\"), "hi\\there");
t!(s: Path::new("hi\\..\\there"), "there");
t!(s: Path::new("hi/../there"), "there");
t!(s: Path::new("..\\hi\\there"), "..\\hi\\there");
t!(s: Path::new("\\..\\hi\\there"), "\\hi\\there");
t!(s: Path::new("/../hi/there"), "\\hi\\there");
t!(s: Path::new("foo\\.."), ".");
t!(s: Path::new("\\foo\\.."), "\\");
t!(s: Path::new("\\foo\\..\\.."), "\\");
t!(s: Path::new("\\foo\\..\\..\\bar"), "\\bar");
t!(s: Path::new("\\.\\hi\\.\\there\\."), "\\hi\\there");
t!(s: Path::new("\\.\\hi\\.\\there\\.\\.."), "\\hi");
t!(s: Path::new("foo\\..\\.."), "..");
t!(s: Path::new("foo\\..\\..\\.."), "..\\..");
t!(s: Path::new("foo\\..\\..\\bar"), "..\\bar");
assert_eq!(Path::new(b!("foo\\bar")).into_vec(), b!("foo\\bar").to_owned());
assert_eq!(Path::new(b!("\\foo\\..\\..\\bar")).into_vec(),
b!("\\bar").to_owned());
t!(s: Path::new("\\\\a"), "\\a");
t!(s: Path::new("\\\\a\\"), "\\a");
t!(s: Path::new("\\\\a\\b"), "\\\\a\\b");
t!(s: Path::new("\\\\a\\b\\"), "\\\\a\\b");
t!(s: Path::new("\\\\a\\b/"), "\\\\a\\b");
t!(s: Path::new("\\\\\\b"), "\\b");
t!(s: Path::new("\\\\a\\\\b"), "\\a\\b");
t!(s: Path::new("\\\\a\\b\\c"), "\\\\a\\b\\c");
t!(s: Path::new("\\\\server\\share/path"), "\\\\server\\share\\path");
t!(s: Path::new("\\\\server/share/path"), "\\\\server\\share\\path");
t!(s: Path::new("C:a\\b.txt"), "C:a\\b.txt");
t!(s: Path::new("C:a/b.txt"), "C:a\\b.txt");
t!(s: Path::new("z:\\a\\b.txt"), "Z:\\a\\b.txt");
t!(s: Path::new("z:/a/b.txt"), "Z:\\a\\b.txt");
t!(s: Path::new("ab:/a/b.txt"), "ab:\\a\\b.txt");
t!(s: Path::new("C:\\"), "C:\\");
t!(s: Path::new("C:"), "C:");
t!(s: Path::new("q:"), "Q:");
t!(s: Path::new("C:/"), "C:\\");
t!(s: Path::new("C:\\foo\\.."), "C:\\");
t!(s: Path::new("C:foo\\.."), "C:");
t!(s: Path::new("C:\\a\\"), "C:\\a");
t!(s: Path::new("C:\\a/"), "C:\\a");
t!(s: Path::new("C:\\a\\b\\"), "C:\\a\\b");
t!(s: Path::new("C:\\a\\b/"), "C:\\a\\b");
t!(s: Path::new("C:a\\"), "C:a");
t!(s: Path::new("C:a/"), "C:a");
t!(s: Path::new("C:a\\b\\"), "C:a\\b");
t!(s: Path::new("C:a\\b/"), "C:a\\b");
t!(s: Path::new("\\\\?\\z:\\a\\b.txt"), "\\\\?\\z:\\a\\b.txt");
t!(s: Path::new("\\\\?\\C:/a/b.txt"), "\\\\?\\C:/a/b.txt");
t!(s: Path::new("\\\\?\\C:\\a/b.txt"), "\\\\?\\C:\\a/b.txt");
t!(s: Path::new("\\\\?\\test\\a\\b.txt"), "\\\\?\\test\\a\\b.txt");
t!(s: Path::new("\\\\?\\foo\\bar\\"), "\\\\?\\foo\\bar\\");
t!(s: Path::new("\\\\.\\foo\\bar"), "\\\\.\\foo\\bar");
t!(s: Path::new("\\\\.\\"), "\\\\.\\");
t!(s: Path::new("\\\\?\\UNC\\server\\share\\foo"), "\\\\?\\UNC\\server\\share\\foo");
t!(s: Path::new("\\\\?\\UNC\\server/share"), "\\\\?\\UNC\\server/share\\");
t!(s: Path::new("\\\\?\\UNC\\server"), "\\\\?\\UNC\\server\\");
t!(s: Path::new("\\\\?\\UNC\\"), "\\\\?\\UNC\\\\");
t!(s: Path::new("\\\\?\\UNC"), "\\\\?\\UNC");
// I'm not sure whether \\.\foo/bar should normalize to \\.\foo\bar
// as information is sparse and this isn't really googleable.
// I'm going to err on the side of not normalizing it, as this skips the filesystem
t!(s: Path::new("\\\\.\\foo/bar"), "\\\\.\\foo/bar");
t!(s: Path::new("\\\\.\\foo\\bar"), "\\\\.\\foo\\bar");
}
#[test]
fn test_opt_paths() {
assert_eq!(Path::new_opt(b!("foo\\bar", 0)), None);
assert_eq!(Path::new_opt(b!("foo\\bar", 0x80)), None);
t!(v: Path::new_opt(b!("foo\\bar")).unwrap(), b!("foo\\bar"));
assert_eq!(Path::new_opt("foo\\bar\0"), None);
t!(s: Path::new_opt("foo\\bar").unwrap(), "foo\\bar");
}
#[test]
fn test_null_byte() {
use path::null_byte::cond;
let mut handled = false;
let mut p = do cond.trap(|v| {
handled = true;
assert_eq!(v.as_slice(), b!("foo\\bar", 0));
(b!("\\bar").to_owned())
}).inside {
Path::new(b!("foo\\bar", 0))
};
assert!(handled);
assert_eq!(p.as_vec(), b!("\\bar"));
handled = false;
do cond.trap(|v| {
handled = true;
assert_eq!(v.as_slice(), b!("f", 0, "o"));
(b!("foo").to_owned())
}).inside {
p.set_filename(b!("f", 0, "o"))
};
assert!(handled);
assert_eq!(p.as_vec(), b!("\\foo"));
handled = false;
do cond.trap(|v| {
handled = true;
assert_eq!(v.as_slice(), b!("f", 0, "o"));
(b!("foo").to_owned())
}).inside {
p.push(b!("f", 0, "o"));
};
assert!(handled);
assert_eq!(p.as_vec(), b!("\\foo\\foo"));
}
#[test]
fn test_null_byte_fail() {
use path::null_byte::cond;
use task;
macro_rules! t(
($name:expr => $code:block) => (
{
let mut t = task::task();
t.supervised();
t.name($name);
let res = do t.try $code;
assert!(res.is_err());
}
)
)
t!(~"from_vec() w\\nul" => {
do cond.trap(|_| {
(b!("null", 0).to_owned())
}).inside {
Path::new(b!("foo\\bar", 0))
};
})
t!(~"set_filename w\\nul" => {
let mut p = Path::new(b!("foo\\bar"));
do cond.trap(|_| {
(b!("null", 0).to_owned())
}).inside {
p.set_filename(b!("foo", 0))
};
})
t!(~"push w\\nul" => {
let mut p = Path::new(b!("foo\\bar"));
do cond.trap(|_| {
(b!("null", 0).to_owned())
}).inside {
p.push(b!("foo", 0))
};
})
}
#[test]
#[should_fail]
fn test_not_utf8_fail() {
Path::new(b!("hello", 0x80, ".txt"));
}
#[test]
fn test_display_str() {
let path = Path::new("foo");
assert_eq!(path.display().to_str(), ~"foo");
let path = Path::new(b!("\\"));
assert_eq!(path.filename_display().to_str(), ~"");
let mut called = false;
let path = Path::new("foo");
do path.display().with_str |s| {
assert_eq!(s, "foo");
called = true;
};
assert!(called);
called = false;
let path = Path::new(b!("\\"));
do path.filename_display().with_str |s| {
assert_eq!(s, "");
called = true;
}
assert!(called);
}
#[test]
fn test_display() {
macro_rules! t(
($path:expr, $exp:expr, $expf:expr) => (
{
let path = Path::new($path);
let f = format!("{}", path.display());
assert_eq!(f.as_slice(), $exp);
let f = format!("{}", path.filename_display());
assert_eq!(f.as_slice(), $expf);
}
)
)
t!("foo", "foo", "foo");
t!("foo\\bar", "foo\\bar", "bar");
t!("\\", "\\", "");
}
#[test]
fn test_components() {
macro_rules! t(
(s: $path:expr, $op:ident, $exp:expr) => (
{
let path = Path::new($path);
assert_eq!(path.$op(), Some($exp));
}
);
(s: $path:expr, $op:ident, $exp:expr, opt) => (
{
let path = Path::new($path);
let left = path.$op();
assert_eq!(left, $exp);
}
);
(v: $path:expr, $op:ident, $exp:expr) => (
{
let path = Path::new($path);
assert_eq!(path.$op(), $exp);
}
)
)
t!(v: b!("a\\b\\c"), filename, Some(b!("c")));
t!(s: "a\\b\\c", filename_str, "c");
t!(s: "\\a\\b\\c", filename_str, "c");
t!(s: "a", filename_str, "a");
t!(s: "\\a", filename_str, "a");
t!(s: ".", filename_str, None, opt);
t!(s: "\\", filename_str, None, opt);
t!(s: "..", filename_str, None, opt);
t!(s: "..\\..", filename_str, None, opt);
t!(s: "c:\\foo.txt", filename_str, "foo.txt");
t!(s: "C:\\", filename_str, None, opt);
t!(s: "C:", filename_str, None, opt);
t!(s: "\\\\server\\share\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\server\\share", filename_str, None, opt);
t!(s: "\\\\server", filename_str, "server");
t!(s: "\\\\?\\bar\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\bar", filename_str, None, opt);
t!(s: "\\\\?\\", filename_str, None, opt);
t!(s: "\\\\?\\UNC\\server\\share\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\UNC\\server", filename_str, None, opt);
t!(s: "\\\\?\\UNC\\", filename_str, None, opt);
t!(s: "\\\\?\\C:\\foo.txt", filename_str, "foo.txt");
t!(s: "\\\\?\\C:\\", filename_str, None, opt);
t!(s: "\\\\?\\C:", filename_str, None, opt);
t!(s: "\\\\?\\foo/bar", filename_str, None, opt);
t!(s: "\\\\?\\C:/foo", filename_str, None, opt);
t!(s: "\\\\.\\foo\\bar", filename_str, "bar");
t!(s: "\\\\.\\foo", filename_str, None, opt);
t!(s: "\\\\.\\foo/bar", filename_str, None, opt);
t!(s: "\\\\.\\foo\\bar/baz", filename_str, "bar/baz");
t!(s: "\\\\.\\", filename_str, None, opt);
t!(s: "\\\\?\\a\\b\\", filename_str, "b");
t!(v: b!("a\\b\\c"), dirname, b!("a\\b"));
t!(s: "a\\b\\c", dirname_str, "a\\b");
t!(s: "\\a\\b\\c", dirname_str, "\\a\\b");
t!(s: "a", dirname_str, ".");
t!(s: "\\a", dirname_str, "\\");
t!(s: ".", dirname_str, ".");
t!(s: "\\", dirname_str, "\\");
t!(s: "..", dirname_str, "..");
t!(s: "..\\..", dirname_str, "..\\..");
t!(s: "c:\\foo.txt", dirname_str, "C:\\");
t!(s: "C:\\", dirname_str, "C:\\");
t!(s: "C:", dirname_str, "C:");
t!(s: "C:foo.txt", dirname_str, "C:");
t!(s: "\\\\server\\share\\foo.txt", dirname_str, "\\\\server\\share");
t!(s: "\\\\server\\share", dirname_str, "\\\\server\\share");
t!(s: "\\\\server", dirname_str, "\\");
t!(s: "\\\\?\\bar\\foo.txt", dirname_str, "\\\\?\\bar");
t!(s: "\\\\?\\bar", dirname_str, "\\\\?\\bar");
t!(s: "\\\\?\\", dirname_str, "\\\\?\\");
t!(s: "\\\\?\\UNC\\server\\share\\foo.txt", dirname_str, "\\\\?\\UNC\\server\\share");
t!(s: "\\\\?\\UNC\\server", dirname_str, "\\\\?\\UNC\\server\\");
t!(s: "\\\\?\\UNC\\", dirname_str, "\\\\?\\UNC\\\\");
t!(s: "\\\\?\\C:\\foo.txt", dirname_str, "\\\\?\\C:\\");
t!(s: "\\\\?\\C:\\", dirname_str, "\\\\?\\C:\\");
t!(s: "\\\\?\\C:", dirname_str, "\\\\?\\C:");
t!(s: "\\\\?\\C:/foo/bar", dirname_str, "\\\\?\\C:/foo/bar");
t!(s: "\\\\?\\foo/bar", dirname_str, "\\\\?\\foo/bar");
t!(s: "\\\\.\\foo\\bar", dirname_str, "\\\\.\\foo");
t!(s: "\\\\.\\foo", dirname_str, "\\\\.\\foo");
t!(s: "\\\\?\\a\\b\\", dirname_str, "\\\\?\\a");
t!(v: b!("hi\\there.txt"), filestem, Some(b!("there")));
t!(s: "hi\\there.txt", filestem_str, "there");
t!(s: "hi\\there", filestem_str, "there");
t!(s: "there.txt", filestem_str, "there");
t!(s: "there", filestem_str, "there");
t!(s: ".", filestem_str, None, opt);
t!(s: "\\", filestem_str, None, opt);
t!(s: "foo\\.bar", filestem_str, ".bar");
t!(s: ".bar", filestem_str, ".bar");
t!(s: "..bar", filestem_str, ".");
t!(s: "hi\\there..txt", filestem_str, "there.");
t!(s: "..", filestem_str, None, opt);
t!(s: "..\\..", filestem_str, None, opt);
// filestem is based on filename, so we don't need the full set of prefix tests
t!(v: b!("hi\\there.txt"), extension, Some(b!("txt")));
t!(v: b!("hi\\there"), extension, None);
t!(s: "hi\\there.txt", extension_str, Some("txt"), opt);
t!(s: "hi\\there", extension_str, None, opt);
t!(s: "there.txt", extension_str, Some("txt"), opt);
t!(s: "there", extension_str, None, opt);
t!(s: ".", extension_str, None, opt);
t!(s: "\\", extension_str, None, opt);
t!(s: "foo\\.bar", extension_str, None, opt);
t!(s: ".bar", extension_str, None, opt);
t!(s: "..bar", extension_str, Some("bar"), opt);
t!(s: "hi\\there..txt", extension_str, Some("txt"), opt);
t!(s: "..", extension_str, None, opt);
t!(s: "..\\..", extension_str, None, opt);
// extension is based on filename, so we don't need the full set of prefix tests
}
#[test]
fn test_push() {
macro_rules! t(
(s: $path:expr, $join:expr) => (
{
let path = ($path);
let join = ($join);
let mut p1 = Path::new(path);
let p2 = p1.clone();
p1.push(join);
assert_eq!(p1, p2.join(join));
}
)
)
t!(s: "a\\b\\c", "..");
t!(s: "\\a\\b\\c", "d");
t!(s: "a\\b", "c\\d");
t!(s: "a\\b", "\\c\\d");
// this is just a sanity-check test. push and join share an implementation,
// so there's no need for the full set of prefix tests
// we do want to check one odd case though to ensure the prefix is re-parsed
let mut p = Path::new("\\\\?\\C:");
assert_eq!(prefix(&p), Some(VerbatimPrefix(2)));
p.push("foo");
assert_eq!(prefix(&p), Some(VerbatimDiskPrefix));
assert_eq!(p.as_str(), Some("\\\\?\\C:\\foo"));
// and another with verbatim non-normalized paths
let mut p = Path::new("\\\\?\\C:\\a\\");
p.push("foo");
assert_eq!(p.as_str(), Some("\\\\?\\C:\\a\\foo"));
}
#[test]
fn test_push_path() {
macro_rules! t(
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
let push = Path::new($push);
p.push(&push);
assert_eq!(p.as_str(), Some($exp));
}
)
)
t!(s: "a\\b\\c", "d", "a\\b\\c\\d");
t!(s: "\\a\\b\\c", "d", "\\a\\b\\c\\d");
t!(s: "a\\b", "c\\d", "a\\b\\c\\d");
t!(s: "a\\b", "\\c\\d", "\\c\\d");
t!(s: "a\\b", ".", "a\\b");
t!(s: "a\\b", "..\\c", "a\\c");
t!(s: "a\\b", "C:a.txt", "C:a.txt");
t!(s: "a\\b", "..\\..\\..\\c", "..\\c");
t!(s: "a\\b", "C:\\a.txt", "C:\\a.txt");
t!(s: "C:\\a", "C:\\b.txt", "C:\\b.txt");
t!(s: "C:\\a\\b\\c", "C:d", "C:\\a\\b\\c\\d");
t!(s: "C:a\\b\\c", "C:d", "C:a\\b\\c\\d");
t!(s: "C:a\\b", "..\\..\\..\\c", "C:..\\c");
t!(s: "C:\\a\\b", "..\\..\\..\\c", "C:\\c");
t!(s: "\\\\server\\share\\foo", "bar", "\\\\server\\share\\foo\\bar");
t!(s: "\\\\server\\share\\foo", "..\\..\\bar", "\\\\server\\share\\bar");
t!(s: "\\\\server\\share\\foo", "C:baz", "C:baz");
t!(s: "\\\\?\\C:\\a\\b", "C:c\\d", "\\\\?\\C:\\a\\b\\c\\d");
t!(s: "\\\\?\\C:a\\b", "C:c\\d", "C:c\\d");
t!(s: "\\\\?\\C:\\a\\b", "C:\\c\\d", "C:\\c\\d");
t!(s: "\\\\?\\foo\\bar", "baz", "\\\\?\\foo\\bar\\baz");
t!(s: "\\\\?\\C:\\a\\b", "..\\..\\..\\c", "\\\\?\\C:\\a\\b\\..\\..\\..\\c");
t!(s: "\\\\?\\foo\\bar", "..\\..\\c", "\\\\?\\foo\\bar\\..\\..\\c");
t!(s: "\\\\?\\", "foo", "\\\\?\\\\foo");
t!(s: "\\\\?\\UNC\\server\\share\\foo", "bar", "\\\\?\\UNC\\server\\share\\foo\\bar");
t!(s: "\\\\?\\UNC\\server\\share", "C:\\a", "C:\\a");
t!(s: "\\\\?\\UNC\\server\\share", "C:a", "C:a");
t!(s: "\\\\?\\UNC\\server", "foo", "\\\\?\\UNC\\server\\\\foo");
t!(s: "C:\\a", "\\\\?\\UNC\\server\\share", "\\\\?\\UNC\\server\\share");
t!(s: "\\\\.\\foo\\bar", "baz", "\\\\.\\foo\\bar\\baz");
t!(s: "\\\\.\\foo\\bar", "C:a", "C:a");
// again, not sure about the following, but I'm assuming \\.\ should be verbatim
t!(s: "\\\\.\\foo", "..\\bar", "\\\\.\\foo\\..\\bar");
t!(s: "\\\\?\\C:", "foo", "\\\\?\\C:\\foo"); // this is a weird one
}
#[test]
fn test_push_many() {
use to_man = at_vec::to_managed_move;
macro_rules! t(
(s: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many($push);
assert_eq!(p.as_str(), Some($exp));
}
);
(v: $path:expr, $push:expr, $exp:expr) => (
{
let mut p = Path::new($path);
p.push_many($push);
assert_eq!(p.as_vec(), $exp);
}
)
)
t!(s: "a\\b\\c", ["d", "e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", ["d", "\\e"], "\\e");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e");
t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [b!("d"), b!("\\e"), b!("f")], b!("\\e\\f"));
t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())],
b!("a\\b\\c\\d\\e"));
}
#[test]
fn test_pop() {
macro_rules! t(
(s: $path:expr, $left:expr, $right:expr) => (
{
let pstr = $path;
let mut p = Path::new(pstr);
let result = p.pop();
let left = $left;
assert!(p.as_str() == Some(left),
"`{}`.pop() failed; expected remainder `{}`, found `{}`",
pstr, left, p.as_str().unwrap());
assert_eq!(result, $right);
}
);
(v: [$($path:expr),+], [$($left:expr),+], $right:expr) => (
{
let mut p = Path::new(b!($($path),+));
let result = p.pop();
assert_eq!(p.as_vec(), b!($($left),+));
assert_eq!(result, $right);
}
)
)
t!(s: "a\\b\\c", "a\\b", true);
t!(s: "a", ".", true);
t!(s: ".", ".", false);
t!(s: "\\a", "\\", true);
t!(s: "\\", "\\", false);
t!(v: ["a\\b\\c"], ["a\\b"], true);
t!(v: ["a"], ["."], true);
t!(v: ["."], ["."], false);
t!(v: ["\\a"], ["\\"], true);
t!(v: ["\\"], ["\\"], false);
t!(s: "C:\\a\\b", "C:\\a", true);
t!(s: "C:\\a", "C:\\", true);
t!(s: "C:\\", "C:\\", false);
t!(s: "C:a\\b", "C:a", true);
t!(s: "C:a", "C:", true);
t!(s: "C:", "C:", false);
t!(s: "\\\\server\\share\\a\\b", "\\\\server\\share\\a", true);
t!(s: "\\\\server\\share\\a", "\\\\server\\share", true);
t!(s: "\\\\server\\share", "\\\\server\\share", false);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", true);
t!(s: "\\\\?\\a\\b", "\\\\?\\a", true);
t!(s: "\\\\?\\a", "\\\\?\\a", false);
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\C:\\a", true);
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\", true);
t!(s: "\\\\?\\C:\\", "\\\\?\\C:\\", false);
t!(s: "\\\\?\\UNC\\server\\share\\a\\b", "\\\\?\\UNC\\server\\share\\a", true);
t!(s: "\\\\?\\UNC\\server\\share\\a", "\\\\?\\UNC\\server\\share", true);
t!(s: "\\\\?\\UNC\\server\\share", "\\\\?\\UNC\\server\\share", false);
t!(s: "\\\\.\\a\\b\\c", "\\\\.\\a\\b", true);
t!(s: "\\\\.\\a\\b", "\\\\.\\a", true);
t!(s: "\\\\.\\a", "\\\\.\\a", false);
t!(s: "\\\\?\\a\\b\\", "\\\\?\\a", true);
}
#[test]
fn test_root_path() {
assert_eq!(Path::new("a\\b\\c").root_path(), None);
assert_eq!(Path::new("\\a\\b\\c").root_path(), Some(Path::new("\\")));
assert_eq!(Path::new("C:a").root_path(), None);
assert_eq!(Path::new("C:\\a").root_path(), Some(Path::new("C:\\")));
assert_eq!(Path::new("\\\\a\\b\\c").root_path(), Some(Path::new("\\\\a\\b")));
assert_eq!(Path::new("\\\\?\\a\\b").root_path(), Some(Path::new("\\\\?\\a")));
assert_eq!(Path::new("\\\\?\\C:\\a").root_path(), Some(Path::new("\\\\?\\C:\\")));
assert_eq!(Path::new("\\\\?\\UNC\\a\\b\\c").root_path(),
Some(Path::new("\\\\?\\UNC\\a\\b")));
assert_eq!(Path::new("\\\\.\\a\\b").root_path(), Some(Path::new("\\\\.\\a")));
}
#[test]
fn test_join() {
t!(s: Path::new("a\\b\\c").join(".."), "a\\b");
t!(s: Path::new("\\a\\b\\c").join("d"), "\\a\\b\\c\\d");
t!(s: Path::new("a\\b").join("c\\d"), "a\\b\\c\\d");
t!(s: Path::new("a\\b").join("\\c\\d"), "\\c\\d");
t!(s: Path::new(".").join("a\\b"), "a\\b");
t!(s: Path::new("\\").join("a\\b"), "\\a\\b");
t!(v: Path::new(b!("a\\b\\c")).join(b!("..")), b!("a\\b"));
t!(v: Path::new(b!("\\a\\b\\c")).join(b!("d")), b!("\\a\\b\\c\\d"));
// full join testing is covered under test_push_path, so no need for
// the full set of prefix tests
}
#[test]
fn test_join_path() {
macro_rules! t(
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let join = Path::new($join);
let res = path.join(&join);
assert_eq!(res.as_str(), Some($exp));
}
)
)
t!(s: "a\\b\\c", "..", "a\\b");
t!(s: "\\a\\b\\c", "d", "\\a\\b\\c\\d");
t!(s: "a\\b", "c\\d", "a\\b\\c\\d");
t!(s: "a\\b", "\\c\\d", "\\c\\d");
t!(s: ".", "a\\b", "a\\b");
t!(s: "\\", "a\\b", "\\a\\b");
// join is implemented using push, so there's no need for
// the full set of prefix tests
}
#[test]
fn test_join_many() {
use to_man = at_vec::to_managed_move;
macro_rules! t(
(s: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many($join);
assert_eq!(res.as_str(), Some($exp));
}
);
(v: $path:expr, $join:expr, $exp:expr) => (
{
let path = Path::new($path);
let res = path.join_many($join);
assert_eq!(res.as_vec(), $exp);
}
)
)
t!(s: "a\\b\\c", ["d", "e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", ["..", "d"], "a\\b\\d");
t!(s: "a\\b\\c", ["d", "\\e", "f"], "\\e\\f");
t!(s: "a\\b\\c", [~"d", ~"e"], "a\\b\\c\\d\\e");
t!(s: "a\\b\\c", [@"d", @"e"], "a\\b\\c\\d\\e");
t!(v: b!("a\\b\\c"), [b!("d"), b!("e")], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [b!("d").to_owned(), b!("e").to_owned()], b!("a\\b\\c\\d\\e"));
t!(v: b!("a\\b\\c"), [to_man(b!("d").to_owned()), to_man(b!("e").to_owned())],
b!("a\\b\\c\\d\\e"));
}
#[test]
fn test_with_helpers() {
macro_rules! t(
(s: $path:expr, $op:ident, $arg:expr, $res:expr) => (
{
let pstr = $path;
let path = Path::new(pstr);
let arg = $arg;
let res = path.$op(arg);
let exp = $res;
assert!(res.as_str() == Some(exp),
"`{}`.{}(\"{}\"): Expected `{}`, found `{}`",
pstr, stringify!($op), arg, exp, res.as_str().unwrap());
}
)
)
t!(s: "a\\b\\c", with_filename, "d", "a\\b\\d");
t!(s: ".", with_filename, "foo", "foo");
t!(s: "\\a\\b\\c", with_filename, "d", "\\a\\b\\d");
t!(s: "\\", with_filename, "foo", "\\foo");
t!(s: "\\a", with_filename, "foo", "\\foo");
t!(s: "foo", with_filename, "bar", "bar");
t!(s: "\\", with_filename, "foo\\", "\\foo");
t!(s: "\\a", with_filename, "foo\\", "\\foo");
t!(s: "a\\b\\c", with_filename, "", "a\\b");
t!(s: "a\\b\\c", with_filename, ".", "a\\b");
t!(s: "a\\b\\c", with_filename, "..", "a");
t!(s: "\\a", with_filename, "", "\\");
t!(s: "foo", with_filename, "", ".");
t!(s: "a\\b\\c", with_filename, "d\\e", "a\\b\\d\\e");
t!(s: "a\\b\\c", with_filename, "\\d", "a\\b\\d");
t!(s: "..", with_filename, "foo", "..\\foo");
t!(s: "..\\..", with_filename, "foo", "..\\..\\foo");
t!(s: "..", with_filename, "", "..");
t!(s: "..\\..", with_filename, "", "..\\..");
t!(s: "C:\\foo\\bar", with_filename, "baz", "C:\\foo\\baz");
t!(s: "C:\\foo", with_filename, "bar", "C:\\bar");
t!(s: "C:\\", with_filename, "foo", "C:\\foo");
t!(s: "C:foo\\bar", with_filename, "baz", "C:foo\\baz");
t!(s: "C:foo", with_filename, "bar", "C:bar");
t!(s: "C:", with_filename, "foo", "C:foo");
t!(s: "C:\\foo", with_filename, "", "C:\\");
t!(s: "C:foo", with_filename, "", "C:");
t!(s: "C:\\foo\\bar", with_filename, "..", "C:\\");
t!(s: "C:\\foo", with_filename, "..", "C:\\");
t!(s: "C:\\", with_filename, "..", "C:\\");
t!(s: "C:foo\\bar", with_filename, "..", "C:");
t!(s: "C:foo", with_filename, "..", "C:..");
t!(s: "C:", with_filename, "..", "C:..");
t!(s: "\\\\server\\share\\foo", with_filename, "bar", "\\\\server\\share\\bar");
t!(s: "\\\\server\\share", with_filename, "foo", "\\\\server\\share\\foo");
t!(s: "\\\\server\\share\\foo", with_filename, "", "\\\\server\\share");
t!(s: "\\\\server\\share", with_filename, "", "\\\\server\\share");
t!(s: "\\\\server\\share\\foo", with_filename, "..", "\\\\server\\share");
t!(s: "\\\\server\\share", with_filename, "..", "\\\\server\\share");
t!(s: "\\\\?\\C:\\foo\\bar", with_filename, "baz", "\\\\?\\C:\\foo\\baz");
t!(s: "\\\\?\\C:\\foo", with_filename, "bar", "\\\\?\\C:\\bar");
t!(s: "\\\\?\\C:\\", with_filename, "foo", "\\\\?\\C:\\foo");
t!(s: "\\\\?\\C:\\foo", with_filename, "..", "\\\\?\\C:\\..");
t!(s: "\\\\?\\foo\\bar", with_filename, "baz", "\\\\?\\foo\\baz");
t!(s: "\\\\?\\foo", with_filename, "bar", "\\\\?\\foo\\bar");
t!(s: "\\\\?\\", with_filename, "foo", "\\\\?\\\\foo");
t!(s: "\\\\?\\foo\\bar", with_filename, "..", "\\\\?\\foo\\..");
t!(s: "\\\\.\\foo\\bar", with_filename, "baz", "\\\\.\\foo\\baz");
t!(s: "\\\\.\\foo", with_filename, "bar", "\\\\.\\foo\\bar");
t!(s: "\\\\.\\foo\\bar", with_filename, "..", "\\\\.\\foo\\..");
t!(s: "hi\\there.txt", with_extension, "exe", "hi\\there.exe");
t!(s: "hi\\there.txt", with_extension, "", "hi\\there");
t!(s: "hi\\there.txt", with_extension, ".", "hi\\there..");
t!(s: "hi\\there.txt", with_extension, "..", "hi\\there...");
t!(s: "hi\\there", with_extension, "txt", "hi\\there.txt");
t!(s: "hi\\there", with_extension, ".", "hi\\there..");
t!(s: "hi\\there", with_extension, "..", "hi\\there...");
t!(s: "hi\\there.", with_extension, "txt", "hi\\there.txt");
t!(s: "hi\\.foo", with_extension, "txt", "hi\\.foo.txt");
t!(s: "hi\\there.txt", with_extension, ".foo", "hi\\there..foo");
t!(s: "\\", with_extension, "txt", "\\");
t!(s: "\\", with_extension, ".", "\\");
t!(s: "\\", with_extension, "..", "\\");
t!(s: ".", with_extension, "txt", ".");
// extension setter calls filename setter internally, no need for extended tests
}
#[test]
fn test_setters() {
macro_rules! t(
(s: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert_eq!(p1, p2.$with(arg));
}
);
(v: $path:expr, $set:ident, $with:ident, $arg:expr) => (
{
let path = $path;
let arg = $arg;
let mut p1 = Path::new(path);
p1.$set(arg);
let p2 = Path::new(path);
assert_eq!(p1, p2.$with(arg));
}
)
)
t!(v: b!("a\\b\\c"), set_filename, with_filename, b!("d"));
t!(v: b!("\\"), set_filename, with_filename, b!("foo"));
t!(s: "a\\b\\c", set_filename, with_filename, "d");
t!(s: "\\", set_filename, with_filename, "foo");
t!(s: ".", set_filename, with_filename, "foo");
t!(s: "a\\b", set_filename, with_filename, "");
t!(s: "a", set_filename, with_filename, "");
t!(v: b!("hi\\there.txt"), set_extension, with_extension, b!("exe"));
t!(s: "hi\\there.txt", set_extension, with_extension, "exe");
t!(s: "hi\\there.", set_extension, with_extension, "txt");
t!(s: "hi\\there", set_extension, with_extension, "txt");
t!(s: "hi\\there.txt", set_extension, with_extension, "");
t!(s: "hi\\there", set_extension, with_extension, "");
t!(s: ".", set_extension, with_extension, "txt");
// with_ helpers use the setter internally, so the tests for the with_ helpers
// will suffice. No need for the full set of prefix tests.
}
#[test]
fn test_getters() {
macro_rules! t(
(s: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
let path = $path;
let filename = $filename;
assert!(path.filename_str() == filename,
"`{}`.filename_str(): Expected `{:?}`, found `{:?}`",
path.as_str().unwrap(), filename, path.filename_str());
let dirname = $dirname;
assert!(path.dirname_str() == dirname,
"`{}`.dirname_str(): Expected `{:?}`, found `{:?}`",
path.as_str().unwrap(), dirname, path.dirname_str());
let filestem = $filestem;
assert!(path.filestem_str() == filestem,
"`{}`.filestem_str(): Expected `{:?}`, found `{:?}`",
path.as_str().unwrap(), filestem, path.filestem_str());
let ext = $ext;
assert!(path.extension_str() == ext,
"`{}`.extension_str(): Expected `{:?}`, found `{:?}`",
path.as_str().unwrap(), ext, path.extension_str());
}
);
(v: $path:expr, $filename:expr, $dirname:expr, $filestem:expr, $ext:expr) => (
{
let path = $path;
assert_eq!(path.filename(), $filename);
assert_eq!(path.dirname(), $dirname);
assert_eq!(path.filestem(), $filestem);
assert_eq!(path.extension(), $ext);
}
)
)
t!(v: Path::new(b!("a\\b\\c")), Some(b!("c")), b!("a\\b"), Some(b!("c")), None);
t!(s: Path::new("a\\b\\c"), Some("c"), Some("a\\b"), Some("c"), None);
t!(s: Path::new("."), None, Some("."), None, None);
t!(s: Path::new("\\"), None, Some("\\"), None, None);
t!(s: Path::new(".."), None, Some(".."), None, None);
t!(s: Path::new("..\\.."), None, Some("..\\.."), None, None);
t!(s: Path::new("hi\\there.txt"), Some("there.txt"), Some("hi"),
Some("there"), Some("txt"));
t!(s: Path::new("hi\\there"), Some("there"), Some("hi"), Some("there"), None);
t!(s: Path::new("hi\\there."), Some("there."), Some("hi"),
Some("there"), Some(""));
t!(s: Path::new("hi\\.there"), Some(".there"), Some("hi"), Some(".there"), None);
t!(s: Path::new("hi\\..there"), Some("..there"), Some("hi"),
Some("."), Some("there"));
// these are already tested in test_components, so no need for extended tests
}
#[test]
fn test_dir_path() {
t!(s: Path::new("hi\\there").dir_path(), "hi");
t!(s: Path::new("hi").dir_path(), ".");
t!(s: Path::new("\\hi").dir_path(), "\\");
t!(s: Path::new("\\").dir_path(), "\\");
t!(s: Path::new("..").dir_path(), "..");
t!(s: Path::new("..\\..").dir_path(), "..\\..");
// dir_path is just dirname interpreted as a path.
// No need for extended tests
}
#[test]
fn test_is_absolute() {
macro_rules! t(
($path:expr, $abs:expr, $vol:expr, $cwd:expr, $rel:expr) => (
{
let path = Path::new($path);
let (abs, vol, cwd, rel) = ($abs, $vol, $cwd, $rel);
let b = path.is_absolute();
assert!(b == abs, "Path '{}'.is_absolute(): expected {:?}, found {:?}",
path.as_str().unwrap(), abs, b);
let b = is_vol_relative(&path);
assert!(b == vol, "is_vol_relative('{}'): expected {:?}, found {:?}",
path.as_str().unwrap(), vol, b);
let b = is_cwd_relative(&path);
assert!(b == cwd, "is_cwd_relative('{}'): expected {:?}, found {:?}",
path.as_str().unwrap(), cwd, b);
let b = path.is_relative();
assert!(b == rel, "Path '{}'.is_relativf(): expected {:?}, found {:?}",
path.as_str().unwrap(), rel, b);
}
)
)
t!("a\\b\\c", false, false, false, true);
t!("\\a\\b\\c", false, true, false, false);
t!("a", false, false, false, true);
t!("\\a", false, true, false, false);
t!(".", false, false, false, true);
t!("\\", false, true, false, false);
t!("..", false, false, false, true);
t!("..\\..", false, false, false, true);
t!("C:a\\b.txt", false, false, true, false);
t!("C:\\a\\b.txt", true, false, false, false);
t!("\\\\server\\share\\a\\b.txt", true, false, false, false);
t!("\\\\?\\a\\b\\c.txt", true, false, false, false);
t!("\\\\?\\C:\\a\\b.txt", true, false, false, false);
t!("\\\\?\\C:a\\b.txt", true, false, false, false); // NB: not equivalent to C:a\b.txt
t!("\\\\?\\UNC\\server\\share\\a\\b.txt", true, false, false, false);
t!("\\\\.\\a\\b", true, false, false, false);
}
#[test]
fn test_is_ancestor_of() {
macro_rules! t(
(s: $path:expr, $dest:expr, $exp:expr) => (
{
let path = Path::new($path);
let dest = Path::new($dest);
let exp = $exp;
let res = path.is_ancestor_of(&dest);
assert!(res == exp,
"`{}`.is_ancestor_of(`{}`): Expected {:?}, found {:?}",
path.as_str().unwrap(), dest.as_str().unwrap(), exp, res);
}
)
)
t!(s: "a\\b\\c", "a\\b\\c\\d", true);
t!(s: "a\\b\\c", "a\\b\\c", true);
t!(s: "a\\b\\c", "a\\b", false);
t!(s: "\\a\\b\\c", "\\a\\b\\c", true);
t!(s: "\\a\\b", "\\a\\b\\c", true);
t!(s: "\\a\\b\\c\\d", "\\a\\b\\c", false);
t!(s: "\\a\\b", "a\\b\\c", false);
t!(s: "a\\b", "\\a\\b\\c", false);
t!(s: "a\\b\\c", "a\\b\\d", false);
t!(s: "..\\a\\b\\c", "a\\b\\c", false);
t!(s: "a\\b\\c", "..\\a\\b\\c", false);
t!(s: "a\\b\\c", "a\\b\\cd", false);
t!(s: "a\\b\\cd", "a\\b\\c", false);
t!(s: "..\\a\\b", "..\\a\\b\\c", true);
t!(s: ".", "a\\b", true);
t!(s: ".", ".", true);
t!(s: "\\", "\\", true);
t!(s: "\\", "\\a\\b", true);
t!(s: "..", "a\\b", true);
t!(s: "..\\..", "a\\b", true);
t!(s: "foo\\bar", "foobar", false);
t!(s: "foobar", "foo\\bar", false);
t!(s: "foo", "C:foo", false);
t!(s: "C:foo", "foo", false);
t!(s: "C:foo", "C:foo\\bar", true);
t!(s: "C:foo\\bar", "C:foo", false);
t!(s: "C:\\foo", "C:\\foo\\bar", true);
t!(s: "C:", "C:", true);
t!(s: "C:", "C:\\", false);
t!(s: "C:\\", "C:", false);
t!(s: "C:\\", "C:\\", true);
t!(s: "C:\\foo\\bar", "C:\\foo", false);
t!(s: "C:foo\\bar", "C:foo", false);
t!(s: "C:\\foo", "\\foo", false);
t!(s: "\\foo", "C:\\foo", false);
t!(s: "\\\\server\\share\\foo", "\\\\server\\share\\foo\\bar", true);
t!(s: "\\\\server\\share", "\\\\server\\share\\foo", true);
t!(s: "\\\\server\\share\\foo", "\\\\server\\share", false);
t!(s: "C:\\foo", "\\\\server\\share\\foo", false);
t!(s: "\\\\server\\share\\foo", "C:\\foo", false);
t!(s: "\\\\?\\foo\\bar", "\\\\?\\foo\\bar\\baz", true);
t!(s: "\\\\?\\foo\\bar\\baz", "\\\\?\\foo\\bar", false);
t!(s: "\\\\?\\foo\\bar", "\\foo\\bar\\baz", false);
t!(s: "\\foo\\bar", "\\\\?\\foo\\bar\\baz", false);
t!(s: "\\\\?\\C:\\foo\\bar", "\\\\?\\C:\\foo\\bar\\baz", true);
t!(s: "\\\\?\\C:\\foo\\bar\\baz", "\\\\?\\C:\\foo\\bar", false);
t!(s: "\\\\?\\C:\\", "\\\\?\\C:\\foo", true);
t!(s: "\\\\?\\C:", "\\\\?\\C:\\", false); // this is a weird one
t!(s: "\\\\?\\C:\\", "\\\\?\\C:", false);
t!(s: "\\\\?\\C:\\a", "\\\\?\\c:\\a\\b", true);
t!(s: "\\\\?\\c:\\a", "\\\\?\\C:\\a\\b", true);
t!(s: "\\\\?\\C:\\a", "\\\\?\\D:\\a\\b", false);
t!(s: "\\\\?\\foo", "\\\\?\\foobar", false);
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\c", true);
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\", true);
t!(s: "\\\\?\\a\\b\\", "\\\\?\\a\\b", true);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b\\", false);
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\b\\c\\d", true);
t!(s: "\\\\?\\UNC\\a\\b\\c\\d", "\\\\?\\UNC\\a\\b\\c", false);
t!(s: "\\\\?\\UNC\\a\\b", "\\\\?\\UNC\\a\\b\\c", true);
t!(s: "\\\\.\\foo\\bar", "\\\\.\\foo\\bar\\baz", true);
t!(s: "\\\\.\\foo\\bar\\baz", "\\\\.\\foo\\bar", false);
t!(s: "\\\\.\\foo", "\\\\.\\foo\\bar", true);
t!(s: "\\\\.\\foo", "\\\\.\\foobar", false);
t!(s: "\\a\\b", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b", "\\a\\b", false);
t!(s: "\\a\\b", "\\\\?\\C:\\a\\b", false);
t!(s: "\\\\?\\C:\\a\\b", "\\a\\b", false);
t!(s: "Z:\\a\\b", "\\\\?\\z:\\a\\b", true);
t!(s: "C:\\a\\b", "\\\\?\\D:\\a\\b", false);
t!(s: "a\\b", "\\\\?\\a\\b", false);
t!(s: "\\\\?\\a\\b", "a\\b", false);
t!(s: "C:\\a\\b", "\\\\?\\C:\\a\\b", true);
t!(s: "\\\\?\\C:\\a\\b", "C:\\a\\b", true);
t!(s: "C:a\\b", "\\\\?\\C:\\a\\b", false);
t!(s: "C:a\\b", "\\\\?\\C:a\\b", false);
t!(s: "\\\\?\\C:\\a\\b", "C:a\\b", false);
t!(s: "\\\\?\\C:a\\b", "C:a\\b", false);
t!(s: "C:\\a\\b", "\\\\?\\C:\\a\\b\\", true);
t!(s: "\\\\?\\C:\\a\\b\\", "C:\\a\\b", true);
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\b\\c", true);
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\b\\c", true);
}
#[test]
fn test_ends_with_path() {
macro_rules! t(
(s: $path:expr, $child:expr, $exp:expr) => (
{
let path = Path::new($path);
let child = Path::new($child);
assert_eq!(path.ends_with_path(&child), $exp);
}
);
)
t!(s: "a\\b\\c", "c", true);
t!(s: "a\\b\\c", "d", false);
t!(s: "foo\\bar\\quux", "bar", false);
t!(s: "foo\\bar\\quux", "barquux", false);
t!(s: "a\\b\\c", "b\\c", true);
t!(s: "a\\b\\c", "a\\b\\c", true);
t!(s: "a\\b\\c", "foo\\a\\b\\c", false);
t!(s: "\\a\\b\\c", "a\\b\\c", true);
t!(s: "\\a\\b\\c", "\\a\\b\\c", false); // child must be relative
t!(s: "\\a\\b\\c", "foo\\a\\b\\c", false);
t!(s: "a\\b\\c", "", false);
t!(s: "", "", true);
t!(s: "\\a\\b\\c", "d\\e\\f", false);
t!(s: "a\\b\\c", "a\\b", false);
t!(s: "a\\b\\c", "b", false);
t!(s: "C:\\a\\b", "b", true);
t!(s: "C:\\a\\b", "C:b", false);
t!(s: "C:\\a\\b", "C:a\\b", false);
}
#[test]
fn test_path_relative_from() {
macro_rules! t(
(s: $path:expr, $other:expr, $exp:expr) => (
{
let path = Path::new($path);
let other = Path::new($other);
let res = path.path_relative_from(&other);
let exp = $exp;
assert!(res.as_ref().and_then(|x| x.as_str()) == exp,
"`{}`.path_relative_from(`{}`): Expected {:?}, got {:?}",
path.as_str().unwrap(), other.as_str().unwrap(), exp,
res.as_ref().and_then(|x| x.as_str()));
}
)
)
t!(s: "a\\b\\c", "a\\b", Some("c"));
t!(s: "a\\b\\c", "a\\b\\d", Some("..\\c"));
t!(s: "a\\b\\c", "a\\b\\c\\d", Some(".."));
t!(s: "a\\b\\c", "a\\b\\c", Some("."));
t!(s: "a\\b\\c", "a\\b\\c\\d\\e", Some("..\\.."));
t!(s: "a\\b\\c", "a\\d\\e", Some("..\\..\\b\\c"));
t!(s: "a\\b\\c", "d\\e\\f", Some("..\\..\\..\\a\\b\\c"));
t!(s: "a\\b\\c", "\\a\\b\\c", None);
t!(s: "\\a\\b\\c", "a\\b\\c", Some("\\a\\b\\c"));
t!(s: "\\a\\b\\c", "\\a\\b\\c\\d", Some(".."));
t!(s: "\\a\\b\\c", "\\a\\b", Some("c"));
t!(s: "\\a\\b\\c", "\\a\\b\\c\\d\\e", Some("..\\.."));
t!(s: "\\a\\b\\c", "\\a\\d\\e", Some("..\\..\\b\\c"));
t!(s: "\\a\\b\\c", "\\d\\e\\f", Some("..\\..\\..\\a\\b\\c"));
t!(s: "hi\\there.txt", "hi\\there", Some("..\\there.txt"));
t!(s: ".", "a", Some(".."));
t!(s: ".", "a\\b", Some("..\\.."));
t!(s: ".", ".", Some("."));
t!(s: "a", ".", Some("a"));
t!(s: "a\\b", ".", Some("a\\b"));
t!(s: "..", ".", Some(".."));
t!(s: "a\\b\\c", "a\\b\\c", Some("."));
t!(s: "\\a\\b\\c", "\\a\\b\\c", Some("."));
t!(s: "\\", "\\", Some("."));
t!(s: "\\", ".", Some("\\"));
t!(s: "..\\..\\a", "b", Some("..\\..\\..\\a"));
t!(s: "a", "..\\..\\b", None);
t!(s: "..\\..\\a", "..\\..\\b", Some("..\\a"));
t!(s: "..\\..\\a", "..\\..\\a\\b", Some(".."));
t!(s: "..\\..\\a\\b", "..\\..\\a", Some("b"));
t!(s: "C:a\\b\\c", "C:a\\b", Some("c"));
t!(s: "C:a\\b", "C:a\\b\\c", Some(".."));
t!(s: "C:" ,"C:a\\b", Some("..\\.."));
t!(s: "C:a\\b", "C:c\\d", Some("..\\..\\a\\b"));
t!(s: "C:a\\b", "D:c\\d", Some("C:a\\b"));
t!(s: "C:a\\b", "C:..\\c", None);
t!(s: "C:..\\a", "C:b\\c", Some("..\\..\\..\\a"));
t!(s: "C:\\a\\b\\c", "C:\\a\\b", Some("c"));
t!(s: "C:\\a\\b", "C:\\a\\b\\c", Some(".."));
t!(s: "C:\\", "C:\\a\\b", Some("..\\.."));
t!(s: "C:\\a\\b", "C:\\c\\d", Some("..\\..\\a\\b"));
t!(s: "C:\\a\\b", "C:a\\b", Some("C:\\a\\b"));
t!(s: "C:a\\b", "C:\\a\\b", None);
t!(s: "\\a\\b", "C:\\a\\b", None);
t!(s: "\\a\\b", "C:a\\b", None);
t!(s: "a\\b", "C:\\a\\b", None);
t!(s: "a\\b", "C:a\\b", None);
t!(s: "\\\\a\\b\\c", "\\\\a\\b", Some("c"));
t!(s: "\\\\a\\b", "\\\\a\\b\\c", Some(".."));
t!(s: "\\\\a\\b\\c\\e", "\\\\a\\b\\c\\d", Some("..\\e"));
t!(s: "\\\\a\\c\\d", "\\\\a\\b\\d", Some("\\\\a\\c\\d"));
t!(s: "\\\\b\\c\\d", "\\\\a\\c\\d", Some("\\\\b\\c\\d"));
t!(s: "\\\\a\\b\\c", "\\d\\e", Some("\\\\a\\b\\c"));
t!(s: "\\d\\e", "\\\\a\\b\\c", None);
t!(s: "d\\e", "\\\\a\\b\\c", None);
t!(s: "C:\\a\\b\\c", "\\\\a\\b\\c", Some("C:\\a\\b\\c"));
t!(s: "C:\\c", "\\\\a\\b\\c", Some("C:\\c"));
t!(s: "\\\\?\\a\\b", "\\a\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "a\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "\\b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "b", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a\\b", "\\\\?\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\b", Some("c"));
t!(s: "\\\\?\\a\\b", "\\\\?\\c\\d", Some("\\\\?\\a\\b"));
t!(s: "\\\\?\\a", "\\\\?\\b", Some("\\\\?\\a"));
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\a\\b", Some(".."));
t!(s: "\\\\?\\C:\\a", "\\\\?\\C:\\b", Some("..\\a"));
t!(s: "\\\\?\\C:\\a", "\\\\?\\D:\\a", Some("\\\\?\\C:\\a"));
t!(s: "\\\\?\\C:\\a\\b", "\\\\?\\c:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a\\b", "C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a", "C:\\a\\b", Some(".."));
t!(s: "C:\\a\\b", "\\\\?\\C:\\a", Some("b"));
t!(s: "C:\\a", "\\\\?\\C:\\a\\b", Some(".."));
t!(s: "\\\\?\\C:\\a", "D:\\a", Some("\\\\?\\C:\\a"));
t!(s: "\\\\?\\c:\\a\\b", "C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\a\\b", "C:a\\b", Some("\\\\?\\C:\\a\\b"));
t!(s: "\\\\?\\C:\\a\\.\\b", "C:\\a", Some("\\\\?\\C:\\a\\.\\b"));
t!(s: "\\\\?\\C:\\a\\b/c", "C:\\a", Some("\\\\?\\C:\\a\\b/c"));
t!(s: "\\\\?\\C:\\a\\..\\b", "C:\\a", Some("\\\\?\\C:\\a\\..\\b"));
t!(s: "C:a\\b", "\\\\?\\C:\\a\\b", None);
t!(s: "\\\\?\\C:\\a\\.\\b", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\.\\b"));
t!(s: "\\\\?\\C:\\a\\b/c", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\b/c"));
t!(s: "\\\\?\\C:\\a\\..\\b", "\\\\?\\C:\\a", Some("\\\\?\\C:\\a\\..\\b"));
t!(s: "\\\\?\\C:\\a\\b\\", "\\\\?\\C:\\a", Some("b"));
t!(s: "\\\\?\\C:\\.\\b", "\\\\?\\C:\\.", Some("b"));
t!(s: "C:\\b", "\\\\?\\C:\\.", Some("..\\b"));
t!(s: "\\\\?\\a\\.\\b\\c", "\\\\?\\a\\.\\b", Some("c"));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\.\\d", Some("..\\..\\b\\c"));
t!(s: "\\\\?\\a\\..\\b", "\\\\?\\a\\..", Some("b"));
t!(s: "\\\\?\\a\\b\\..", "\\\\?\\a\\b", Some("\\\\?\\a\\b\\.."));
t!(s: "\\\\?\\a\\b\\c", "\\\\?\\a\\..\\b", Some("..\\..\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\b", Some("c"));
t!(s: "\\\\?\\UNC\\a\\b", "\\\\?\\UNC\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\UNC\\a\\c\\d", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\b\\c\\d", "\\\\?\\UNC\\a\\c\\d", Some("\\\\?\\UNC\\b\\c\\d"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\a\\b\\c", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\?\\C:\\a\\b\\c", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\a\\b\\c/d", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\c/d"));
t!(s: "\\\\?\\UNC\\a\\b\\.", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\."));
t!(s: "\\\\?\\UNC\\a\\b\\..", "\\\\?\\UNC\\a\\b", Some("\\\\?\\UNC\\a\\b\\.."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\b", Some("c"));
t!(s: "\\\\?\\UNC\\a\\b", "\\\\a\\b\\c", Some(".."));
t!(s: "\\\\?\\UNC\\a\\b\\c", "\\\\a\\c\\d", Some("\\\\?\\UNC\\a\\b\\c"));
t!(s: "\\\\?\\UNC\\b\\c\\d", "\\\\a\\c\\d", Some("\\\\?\\UNC\\b\\c\\d"));
t!(s: "\\\\?\\UNC\\a\\b\\.", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\."));
t!(s: "\\\\?\\UNC\\a\\b\\c/d", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\c/d"));
t!(s: "\\\\?\\UNC\\a\\b\\..", "\\\\a\\b", Some("\\\\?\\UNC\\a\\b\\.."));
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\b", Some("c"));
t!(s: "\\\\a\\b\\c", "\\\\?\\UNC\\a\\c\\d", Some("\\\\a\\b\\c"));
}
#[test]
fn test_str_component_iter() {
macro_rules! t(
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.str_component_iter().map(|x|x.unwrap()).to_owned_vec();
let exp: &[&str] = $exp;
assert!(comps.as_slice() == exp,
"str_component_iter: Expected {:?}, found {:?}",
comps.as_slice(), exp);
let comps = path.rev_str_component_iter().map(|x|x.unwrap()).to_owned_vec();
let exp = exp.rev_iter().map(|&x|x).to_owned_vec();
assert!(comps.as_slice() == exp,
"rev_str_component_iter: Expected {:?}, found {:?}",
comps.as_slice(), exp);
}
);
(v: [$($arg:expr),+], $exp:expr) => (
{
let path = Path::new(b!($($arg),+));
let comps = path.str_component_iter().map(|x|x.unwrap()).to_owned_vec();
let exp: &[&str] = $exp;
assert!(comps.as_slice() == exp,
"str_component_iter: Expected {:?}, found {:?}",
comps.as_slice(), exp);
let comps = path.rev_str_component_iter().map(|x|x.unwrap()).to_owned_vec();
let exp = exp.rev_iter().map(|&x|x).to_owned_vec();
assert!(comps.as_slice() == exp,
"rev_str_component_iter: Expected {:?}, found {:?}",
comps.as_slice(), exp);
}
)
)
t!(v: ["a\\b\\c"], ["a", "b", "c"]);
t!(s: "a\\b\\c", ["a", "b", "c"]);
t!(s: "a\\b\\d", ["a", "b", "d"]);
t!(s: "a\\b\\cd", ["a", "b", "cd"]);
t!(s: "\\a\\b\\c", ["a", "b", "c"]);
t!(s: "a", ["a"]);
t!(s: "\\a", ["a"]);
t!(s: "\\", []);
t!(s: ".", ["."]);
t!(s: "..", [".."]);
t!(s: "..\\..", ["..", ".."]);
t!(s: "..\\..\\foo", ["..", "..", "foo"]);
t!(s: "C:foo\\bar", ["foo", "bar"]);
t!(s: "C:foo", ["foo"]);
t!(s: "C:", []);
t!(s: "C:\\foo\\bar", ["foo", "bar"]);
t!(s: "C:\\foo", ["foo"]);
t!(s: "C:\\", []);
t!(s: "\\\\server\\share\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\server\\share\\foo", ["foo"]);
t!(s: "\\\\server\\share", []);
t!(s: "\\\\?\\foo\\bar\\baz", ["bar", "baz"]);
t!(s: "\\\\?\\foo\\bar", ["bar"]);
t!(s: "\\\\?\\foo", []);
t!(s: "\\\\?\\", []);
t!(s: "\\\\?\\a\\b", ["b"]);
t!(s: "\\\\?\\a\\b\\", ["b"]);
t!(s: "\\\\?\\foo\\bar\\\\baz", ["bar", "", "baz"]);
t!(s: "\\\\?\\C:\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\?\\C:\\foo", ["foo"]);
t!(s: "\\\\?\\C:\\", []);
t!(s: "\\\\?\\C:\\foo\\", ["foo"]);
t!(s: "\\\\?\\UNC\\server\\share\\foo\\bar", ["foo", "bar"]);
t!(s: "\\\\?\\UNC\\server\\share\\foo", ["foo"]);
t!(s: "\\\\?\\UNC\\server\\share", []);
t!(s: "\\\\.\\foo\\bar\\baz", ["bar", "baz"]);
t!(s: "\\\\.\\foo\\bar", ["bar"]);
t!(s: "\\\\.\\foo", []);
}
#[test]
fn test_component_iter() {
macro_rules! t(
(s: $path:expr, $exp:expr) => (
{
let path = Path::new($path);
let comps = path.component_iter().to_owned_vec();
let exp: &[&[u8]] = $exp;
assert!(comps.as_slice() == exp, "component_iter: Expected {:?}, found {:?}",
comps.as_slice(), exp);
let comps = path.rev_component_iter().to_owned_vec();
let exp = exp.rev_iter().map(|&x|x).to_owned_vec();
assert!(comps.as_slice() == exp,
"rev_component_iter: Expected {:?}, found {:?}",
comps.as_slice(), exp);
}
)
)
t!(s: "a\\b\\c", [b!("a"), b!("b"), b!("c")]);
t!(s: ".", [b!(".")]);
// since this is really a wrapper around str_component_iter, those tests suffice
}
}
| 41.522015 | 100 | 0.415672 |
e6a9f334829c6a2a88bde79e9526d64efab6dd78 | 252 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
mod events;
mod keyboard2;
mod keyboard3;
pub mod service;
pub use service::Service;
| 21 | 73 | 0.753968 |
79420ee206d25dc5cea4b3de6668dc7d4a22ae64 | 9,623 | // Copyright 2019 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! This is the Fuchsia Installer implementation that talks to fuchsia.update.installer FIDL API.
use crate::install_plan::FuchsiaInstallPlan;
use anyhow::anyhow;
use fidl::endpoints::create_proxy;
use fidl_fuchsia_update_installer::{
Initiator, InstallerMarker, InstallerProxy, MonitorEvent, MonitorMarker, MonitorOptions,
Options, State,
};
use fuchsia_component::client::connect_to_service;
use fuchsia_zircon as zx;
use futures::future::BoxFuture;
use futures::prelude::*;
use log::info;
use omaha_client::{
installer::{Installer, ProgressObserver},
protocol::request::InstallSource,
};
use thiserror::Error;
#[derive(Debug, Error)]
pub enum FuchsiaInstallError {
#[error("generic error: {}", _0)]
Failure(anyhow::Error),
#[error("FIDL error: {}", _0)]
FIDL(fidl::Error),
#[error("System update installer failed")]
Installer,
}
impl From<anyhow::Error> for FuchsiaInstallError {
fn from(e: anyhow::Error) -> FuchsiaInstallError {
FuchsiaInstallError::Failure(e)
}
}
impl From<fidl::Error> for FuchsiaInstallError {
fn from(e: fidl::Error) -> FuchsiaInstallError {
FuchsiaInstallError::FIDL(e)
}
}
#[derive(Debug)]
pub struct FuchsiaInstaller {
proxy: InstallerProxy,
}
impl FuchsiaInstaller {
// Unused until temp_installer.rs is removed.
#[allow(dead_code)]
pub fn new() -> Result<Self, anyhow::Error> {
let proxy = fuchsia_component::client::connect_to_service::<InstallerMarker>()?;
Ok(FuchsiaInstaller { proxy })
}
#[cfg(test)]
fn new_mock() -> (Self, fidl_fuchsia_update_installer::InstallerRequestStream) {
let (proxy, stream) =
fidl::endpoints::create_proxy_and_stream::<InstallerMarker>().unwrap();
(FuchsiaInstaller { proxy }, stream)
}
}
impl Installer for FuchsiaInstaller {
type InstallPlan = FuchsiaInstallPlan;
type Error = FuchsiaInstallError;
fn perform_install(
&mut self,
install_plan: &FuchsiaInstallPlan,
_observer: Option<&dyn ProgressObserver>,
) -> BoxFuture<'_, Result<(), FuchsiaInstallError>> {
let url = install_plan.url.to_string();
let options = Options {
initiator: Some(match install_plan.install_source {
InstallSource::ScheduledTask => Initiator::Service,
InstallSource::OnDemand => Initiator::User,
}),
};
async move {
let (monitor_proxy, server_end) = create_proxy::<MonitorMarker>()?;
let monitor_options = MonitorOptions { should_notify: Some(true) };
let attempt_id =
self.proxy.start_update(&url, options, server_end, monitor_options).await?;
info!("Update started with attempt id: {}", attempt_id);
let mut stream = monitor_proxy.take_event_stream();
while let Some(event) = stream.try_next().await? {
match event {
MonitorEvent::OnStateEnter { state } => {
// TODO: report progress to ProgressObserver
info!("Installer entered state: {}", state_to_string(state));
match state {
State::Complete => {
return Ok(());
}
State::Fail => {
return Err(FuchsiaInstallError::Installer);
}
_ => {}
}
}
}
}
Err(FuchsiaInstallError::Installer)
}
.boxed()
}
fn perform_reboot(&mut self) -> BoxFuture<'_, Result<(), anyhow::Error>> {
async move {
zx::Status::ok(
connect_to_service::<fidl_fuchsia_device_manager::AdministratorMarker>()?
.suspend(fidl_fuchsia_device_manager::SUSPEND_FLAG_REBOOT)
.await?,
)
.map_err(|e| anyhow!("Suspend error: {}", e))
}
.boxed()
}
}
/// Convert fuchsia.update.installer/State to string for ProgressObserver.
fn state_to_string(state: State) -> &'static str {
match state {
State::Prepare => "Prepare",
State::Download => "Download",
State::Stage => "Stage",
State::Reboot => "Reboot",
State::Finalize => "Finalize",
State::Complete => "Complete",
State::Fail => "Fail",
}
}
#[cfg(test)]
mod tests {
use super::*;
use fidl_fuchsia_update_installer::InstallerRequest;
use fuchsia_async as fasync;
const TEST_URL: &str = "fuchsia-pkg://fuchsia.com/update/0";
#[fasync::run_singlethreaded(test)]
async fn test_start_update() {
let (mut installer, mut stream) = FuchsiaInstaller::new_mock();
let plan = FuchsiaInstallPlan {
url: TEST_URL.parse().unwrap(),
install_source: InstallSource::OnDemand,
};
let installer_fut = async move {
installer.perform_install(&plan, None).await.unwrap();
};
let stream_fut = async move {
match stream.next().await.unwrap() {
Ok(InstallerRequest::StartUpdate {
url,
options: Options { initiator },
monitor,
monitor_options: MonitorOptions { should_notify },
responder,
}) => {
assert_eq!(url, TEST_URL);
assert_eq!(initiator, Some(Initiator::User));
assert_eq!(should_notify, Some(true));
responder.send("00000000-0000-0000-0000-000000000001").unwrap();
let (_stream, handle) = monitor.into_stream_and_control_handle().unwrap();
handle.send_on_state_enter(State::Complete).unwrap();
}
request => panic!("Unexpected request: {:?}", request),
}
};
future::join(installer_fut, stream_fut).await;
}
#[fasync::run_singlethreaded(test)]
async fn test_install_error() {
let (mut installer, mut stream) = FuchsiaInstaller::new_mock();
let plan = FuchsiaInstallPlan {
url: TEST_URL.parse().unwrap(),
install_source: InstallSource::OnDemand,
};
let installer_fut = async move {
match installer.perform_install(&plan, None).await {
Err(FuchsiaInstallError::Installer) => {} // expected
result => panic!("Unexpected result: {:?}", result),
}
};
let stream_fut = async move {
match stream.next().await.unwrap() {
Ok(InstallerRequest::StartUpdate { monitor, responder, .. }) => {
responder.send("00000000-0000-0000-0000-000000000002").unwrap();
let (_stream, handle) = monitor.into_stream_and_control_handle().unwrap();
handle.send_on_state_enter(State::Fail).unwrap();
}
request => panic!("Unexpected request: {:?}", request),
}
};
future::join(installer_fut, stream_fut).await;
}
#[fasync::run_singlethreaded(test)]
async fn test_fidl_error() {
let (mut installer, mut stream) = FuchsiaInstaller::new_mock();
let plan = FuchsiaInstallPlan {
url: TEST_URL.parse().unwrap(),
install_source: InstallSource::OnDemand,
};
let installer_fut = async move {
match installer.perform_install(&plan, None).await {
Err(FuchsiaInstallError::FIDL(_)) => {} // expected
result => panic!("Unexpected result: {:?}", result),
}
};
let stream_fut = async move {
match stream.next().await.unwrap() {
Ok(InstallerRequest::StartUpdate { .. }) => {
// Don't send attempt id.
}
request => panic!("Unexpected request: {:?}", request),
}
};
future::join(installer_fut, stream_fut).await;
}
#[fasync::run_singlethreaded(test)]
async fn test_server_close_unexpectedly() {
let (mut installer, mut stream) = FuchsiaInstaller::new_mock();
let plan = FuchsiaInstallPlan {
url: TEST_URL.parse().unwrap(),
install_source: InstallSource::OnDemand,
};
let installer_fut = async move {
match installer.perform_install(&plan, None).await {
Err(FuchsiaInstallError::Installer) => {} // expected
result => panic!("Unexpected result: {:?}", result),
}
};
let stream_fut = async move {
match stream.next().await.unwrap() {
Ok(InstallerRequest::StartUpdate { monitor, responder, .. }) => {
responder.send("00000000-0000-0000-0000-000000000003").unwrap();
let (_stream, handle) = monitor.into_stream_and_control_handle().unwrap();
handle.send_on_state_enter(State::Prepare).unwrap();
handle.send_on_state_enter(State::Download).unwrap();
}
request => panic!("Unexpected request: {:?}", request),
}
};
future::join(installer_fut, stream_fut).await;
}
}
| 36.869732 | 97 | 0.565312 |
ef544dd7ce8b5148b55620d59a2978424ab4df83 | 6,056 | use std::slice::Iter;
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
enum Token {
Open,
Sep,
Close,
Number(u8),
}
use Token::*;
#[derive(Clone)]
struct SF(Vec<Token>);
impl SF {
fn new(v: Vec<Token>) -> Self {
let mut it = v.iter();
assert!(
validate_iter(&mut it) && it.next().is_none(),
"Invalid SnailFish number"
);
SF(v)
}
fn parse(s: &str) -> SF {
let mut v = Vec::new();
let mut it = s.chars();
while let Some(c) = it.next() {
match c {
'[' => v.push(Open),
']' => v.push(Close),
',' => v.push(Sep),
_ => {
if let Some(d) = c.to_digit(10) {
v.push(Number(d as u8));
} else {
panic!("Unexpected char");
}
}
}
}
SF::new(v)
}
fn to_string(&self) -> String {
use std::fmt::Write;
let mut s = String::new();
for t in &self.0 {
match t {
Open => s.push('['),
Sep => s.push(','),
Close => s.push(']'),
&Number(n) => write!(&mut s, "{}", n).unwrap(),
}
}
s
}
fn explode(&self) -> Option<SF> {
explode_raw(&self.0).map(SF::new)
}
fn split(&self) -> Option<SF> {
split_raw(&self.0).map(SF::new)
}
fn magnitude(&self) -> u64 {
let mut it = self.0.iter();
let x = magnitude_iter(&mut it);
assert!(it.next().is_none());
x
}
}
#[test]
fn test_explode() {
fn single(a: &str, b: &str) {
let x = SF::parse(a);
assert_eq!(x.explode().map(|x| x.to_string()), Some(b.to_string()));
}
single("[[[[[9,8],1],2],3],4]", "[[[[0,9],2],3],4]");
}
fn validate_iter(it: &mut Iter<Token>) -> bool {
match it.next() {
Some(Open) => {
validate_iter(it)
&& it.next().copied() == Some(Sep)
&& validate_iter(it)
&& it.next().copied() == Some(Close)
}
Some(Number(_)) => true,
_ => false,
}
}
fn explode_raw(v: &[Token]) -> Option<Vec<Token>> {
let n = v.len();
let mut depth = 0;
for i in 0..(n - 4) {
match v[i] {
Open => {
depth += 1;
if depth > 4 && v[i + 4] == Close {
match (v[i + 1], v[i + 2], v[i + 3]) {
(Number(x), Sep, Number(y)) => {
return Some(explode_raw2(&v[..i], x, y, &v[i + 5..]));
}
_ => panic!("This should not happen"),
}
}
}
Close => depth -= 1,
_ => (),
}
}
None
}
fn explode_raw2(pre: &[Token], x: u8, y: u8, post: &[Token]) -> Vec<Token> {
let mut v = pre.to_owned();
for t in v.iter_mut().rev() {
if let Number(z) = t {
*t = Number(*z + x);
break;
}
}
v.push(Number(0));
let n = v.len();
v.extend(post);
for t in &mut v[n..] {
if let Number(z) = t {
*t = Number(*z + y);
break;
}
}
v
}
fn split_raw(v: &[Token]) -> Option<Vec<Token>> {
for (i, t) in v.iter().enumerate() {
match t {
&Number(n) if n >= 10 => {
let mut w = Vec::from(&v[..i]);
w.push(Open);
w.push(Number(n / 2));
w.push(Sep);
w.push(Number(n - n / 2));
w.push(Close);
w.extend(&v[i + 1..]);
return Some(w);
}
_ => (),
}
}
None
}
fn reduce(mut v: SF) -> SF {
loop {
if let Some(w) = v.explode() {
v = w;
} else if let Some(w) = v.split() {
v = w;
} else {
return v;
}
}
}
fn add(x: &SF, y: &SF) -> SF {
let mut v = Vec::new();
v.push(Open);
v.extend(&x.0);
v.push(Sep);
v.extend(&y.0);
v.push(Close);
reduce(SF::new(v))
}
fn parse_many(input: &str) -> Vec<SF> {
input.trim().lines().map(SF::parse).collect()
}
fn add_many(v: &[SF]) -> SF {
let mut sum = add(&v[0], &v[1]);
for x in &v[2..] {
sum = add(&sum, x);
}
sum
}
#[test]
fn test_add() {
let a = SF::parse("[[[[4,3],4],4],[7,[[8,4],9]]]");
let b = SF::parse("[1,1]");
let c = add(&a, &b);
assert_eq!(c.to_string(), "[[[[0,7],4],[[7,8],[6,0]]],[8,1]]");
}
#[test]
fn test_homework() {
let v = parse_many(
"
[[[0,[5,8]],[[1,7],[9,6]]],[[4,[1,2]],[[1,4],2]]]
[[[5,[2,8]],4],[5,[[9,9],0]]]
[6,[[[6,2],[5,6]],[[7,6],[4,7]]]]
[[[6,[0,7]],[0,9]],[4,[9,[9,0]]]]
[[[7,[6,4]],[3,[1,3]]],[[[5,5],1],9]]
[[6,[[7,3],[3,2]]],[[[3,8],[5,7]],4]]
[[[[5,4],[7,7]],8],[[8,3],8]]
[[9,3],[[9,9],[6,[4,9]]]]
[[2,[[7,7],7]],[[5,8],[[9,3],[0,2]]]]
[[[[5,2],5],[8,[3,7]]],[[5,[7,5]],[4,4]]]
",
);
assert_eq!(part1(&v), 4140);
assert_eq!(part2(&v), 3993);
}
fn magnitude_iter(it: &mut Iter<Token>) -> u64 {
match it.next() {
Some(&Number(x)) => x as u64,
Some(Open) => {
let x = magnitude_iter(it);
it.next();
let y = magnitude_iter(it);
it.next();
3 * x + 2 * y
}
_ => panic!(),
}
}
#[test]
fn answers() {
let v = parse_many(include_str!("day18.txt"));
assert_eq!(part1(&v), 3734);
assert_eq!(part2(&v), 4837);
}
fn part1(v: &[SF]) -> u64 {
add_many(&v).magnitude()
}
fn part2(v: &[SF]) -> u64 {
let mut best = 0;
for (i, a) in v.iter().enumerate() {
for (j, b) in v.iter().enumerate() {
if i != j {
let c = add(a, b).magnitude();
if c > best {
best = c;
}
}
}
}
best
}
| 23.292308 | 82 | 0.380614 |
f501427507138dfee59fb82f9f7768fd1d81e562 | 1,021 | #![warn(rust_2018_idioms)]
#![cfg(feature = "full")]
use tokio::runtime::Runtime;
use tokio::sync::oneshot;
use tokio::task::{self, LocalSet};
#[test]
fn acquire_mutex_in_drop() {
use futures::future::pending;
let (tx1, rx1) = oneshot::channel();
let (tx2, rx2) = oneshot::channel();
let mut rt = rt();
let local = LocalSet::new();
local.spawn_local(async move {
let _ = rx2.await;
unreachable!();
});
local.spawn_local(async move {
let _ = rx1.await;
let _ = tx2.send(()).unwrap();
unreachable!();
});
// Spawn a task that will never notify
local.spawn_local(async move {
pending::<()>().await;
tx1.send(()).unwrap();
});
// Tick the loop
local.block_on(&mut rt, async {
task::yield_now().await;
});
// Drop the LocalSet
drop(local);
}
fn rt() -> Runtime {
tokio::runtime::Builder::new()
.basic_scheduler()
.enable_all()
.build()
.unwrap()
}
| 20.019608 | 42 | 0.549461 |
149778712f08d92cc2f30a43a4f1a9844ffdd456 | 944 | #![deny(warnings)]
extern crate mpi;
use mpi::traits::*;
use mpi::datatype::PartitionMut;
use mpi::Count;
fn main() {
let universe = mpi::initialize().unwrap();
let world = universe.world();
let rank = world.rank();
let size = world.size();
let msg: Vec<_> = (0..rank).collect();
let counts: Vec<Count> = (0..size).collect();
let displs: Vec<Count> = counts
.iter()
.scan(0, |acc, &x| {
let tmp = *acc;
*acc += x;
Some(tmp)
})
.collect();
let mut buf = vec![0; (size * (size - 1) / 2) as usize];
{
let mut partition = PartitionMut::new(&mut buf[..], counts, &displs[..]);
world.all_gather_varcount_into(&msg[..], &mut partition);
}
assert!(
buf.iter()
.zip((0..size).flat_map(|r| (0..r)))
.all(|(&i, j)| i == j)
);
println!("Process {} got message {:?}", rank, buf);
}
| 23.6 | 81 | 0.497881 |
ff3493eb6de6d642aa24cc0cddc79c91e0ef00ae | 63,601 | //! # Type Coercion
//!
//! Under certain circumstances we will coerce from one type to another,
//! for example by auto-borrowing. This occurs in situations where the
//! compiler has a firm 'expected type' that was supplied from the user,
//! and where the actual type is similar to that expected type in purpose
//! but not in representation (so actual subtyping is inappropriate).
//!
//! ## Reborrowing
//!
//! Note that if we are expecting a reference, we will *reborrow*
//! even if the argument provided was already a reference. This is
//! useful for freezing mut/const things (that is, when the expected is &T
//! but you have &const T or &mut T) and also for avoiding the linearity
//! of mut things (when the expected is &mut T and you have &mut T). See
//! the various `src/test/ui/coerce-reborrow-*.rs` tests for
//! examples of where this is useful.
//!
//! ## Subtle note
//!
//! When deciding what type coercions to consider, we do not attempt to
//! resolve any type variables we may encounter. This is because `b`
//! represents the expected type "as the user wrote it", meaning that if
//! the user defined a generic function like
//!
//! fn foo<A>(a: A, b: A) { ... }
//!
//! and then we wrote `foo(&1, @2)`, we will not auto-borrow
//! either argument. In older code we went to some lengths to
//! resolve the `b` variable, which could mean that we'd
//! auto-borrow later arguments but not earlier ones, which
//! seems very confusing.
//!
//! ## Subtler note
//!
//! However, right now, if the user manually specifies the
//! values for the type variables, as so:
//!
//! foo::<&int>(@1, @2)
//!
//! then we *will* auto-borrow, because we can't distinguish this from a
//! function that declared `&int`. This is inconsistent but it's easiest
//! at the moment. The right thing to do, I think, is to consider the
//! *unsubstituted* type when deciding whether to auto-borrow, but the
//! *substituted* type when considering the bounds and so forth. But most
//! of our methods don't give access to the unsubstituted type, and
//! rightly so because they'd be error-prone. So maybe the thing to do is
//! to actually determine the kind of coercions that should occur
//! separately and pass them in. Or maybe it's ok as is. Anyway, it's
//! sort of a minor point so I've opted to leave it for later -- after all,
//! we may want to adjust precisely when coercions occur.
use crate::astconv::AstConv;
use crate::check::{FnCtxt, Needs};
use rustc_errors::{struct_span_err, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::infer::{Coercion, InferOk, InferResult};
use rustc_middle::ty::adjustment::{
Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast,
};
use rustc_middle::ty::error::TypeError;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::relate::RelateResult;
use rustc_middle::ty::subst::SubstsRef;
use rustc_middle::ty::{self, Ty, TypeAndMut};
use rustc_session::parse::feature_err;
use rustc_span::symbol::sym;
use rustc_span::{self, Span};
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode};
use smallvec::{smallvec, SmallVec};
use std::ops::Deref;
struct Coerce<'a, 'tcx> {
fcx: &'a FnCtxt<'a, 'tcx>,
cause: ObligationCause<'tcx>,
use_lub: bool,
/// Determines whether or not allow_two_phase_borrow is set on any
/// autoref adjustments we create while coercing. We don't want to
/// allow deref coercions to create two-phase borrows, at least initially,
/// but we do need two-phase borrows for function argument reborrows.
/// See #47489 and #48598
/// See docs on the "AllowTwoPhase" type for a more detailed discussion
allow_two_phase: AllowTwoPhase,
}
impl<'a, 'tcx> Deref for Coerce<'a, 'tcx> {
type Target = FnCtxt<'a, 'tcx>;
fn deref(&self) -> &Self::Target {
&self.fcx
}
}
type CoerceResult<'tcx> = InferResult<'tcx, (Vec<Adjustment<'tcx>>, Ty<'tcx>)>;
fn coerce_mutbls<'tcx>(
from_mutbl: hir::Mutability,
to_mutbl: hir::Mutability,
) -> RelateResult<'tcx, ()> {
match (from_mutbl, to_mutbl) {
(hir::Mutability::Mut, hir::Mutability::Mut)
| (hir::Mutability::Not, hir::Mutability::Not)
| (hir::Mutability::Mut, hir::Mutability::Not) => Ok(()),
(hir::Mutability::Not, hir::Mutability::Mut) => Err(TypeError::Mutability),
}
}
fn identity(_: Ty<'_>) -> Vec<Adjustment<'_>> {
vec![]
}
fn simple(kind: Adjust<'tcx>) -> impl FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>> {
move |target| vec![Adjustment { kind, target }]
}
fn success<'tcx>(
adj: Vec<Adjustment<'tcx>>,
target: Ty<'tcx>,
obligations: traits::PredicateObligations<'tcx>,
) -> CoerceResult<'tcx> {
Ok(InferOk { value: (adj, target), obligations })
}
impl<'f, 'tcx> Coerce<'f, 'tcx> {
fn new(
fcx: &'f FnCtxt<'f, 'tcx>,
cause: ObligationCause<'tcx>,
allow_two_phase: AllowTwoPhase,
) -> Self {
Coerce { fcx, cause, allow_two_phase, use_lub: false }
}
fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
self.commit_if_ok(|_| {
if self.use_lub {
self.at(&self.cause, self.fcx.param_env).lub(b, a)
} else {
self.at(&self.cause, self.fcx.param_env)
.sup(b, a)
.map(|InferOk { value: (), obligations }| InferOk { value: a, obligations })
}
})
}
/// Unify two types (using sub or lub) and produce a specific coercion.
fn unify_and<F>(&self, a: Ty<'tcx>, b: Ty<'tcx>, f: F) -> CoerceResult<'tcx>
where
F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
{
self.unify(&a, &b)
.and_then(|InferOk { value: ty, obligations }| success(f(ty), ty, obligations))
}
fn coerce(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
let a = self.shallow_resolve(a);
debug!("Coerce.tys({:?} => {:?})", a, b);
// Just ignore error types.
if a.references_error() || b.references_error() {
return success(vec![], self.fcx.tcx.types.err, vec![]);
}
if a.is_never() {
// Subtle: If we are coercing from `!` to `?T`, where `?T` is an unbound
// type variable, we want `?T` to fallback to `!` if not
// otherwise constrained. An example where this arises:
//
// let _: Option<?T> = Some({ return; });
//
// here, we would coerce from `!` to `?T`.
let b = self.shallow_resolve(b);
return if self.shallow_resolve(b).is_ty_var() {
// Micro-optimization: no need for this if `b` is
// already resolved in some way.
let diverging_ty = self.next_diverging_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::AdjustmentType,
span: self.cause.span,
});
self.unify_and(&b, &diverging_ty, simple(Adjust::NeverToAny))
} else {
success(simple(Adjust::NeverToAny)(b), b, vec![])
};
}
// Consider coercing the subtype to a DST
//
// NOTE: this is wrapped in a `commit_if_ok` because it creates
// a "spurious" type variable, and we don't want to have that
// type variable in memory if the coercion fails.
let unsize = self.commit_if_ok(|_| self.coerce_unsized(a, b));
match unsize {
Ok(_) => {
debug!("coerce: unsize successful");
return unsize;
}
Err(TypeError::ObjectUnsafeCoercion(did)) => {
debug!("coerce: unsize not object safe");
return Err(TypeError::ObjectUnsafeCoercion(did));
}
Err(_) => {}
}
debug!("coerce: unsize failed");
// Examine the supertype and consider auto-borrowing.
//
// Note: does not attempt to resolve type variables we encounter.
// See above for details.
match b.kind {
ty::RawPtr(mt_b) => {
return self.coerce_unsafe_ptr(a, b, mt_b.mutbl);
}
ty::Ref(r_b, ty, mutbl) => {
let mt_b = ty::TypeAndMut { ty, mutbl };
return self.coerce_borrowed_pointer(a, b, r_b, mt_b);
}
_ => {}
}
match a.kind {
ty::FnDef(..) => {
// Function items are coercible to any closure
// type; function pointers are not (that would
// require double indirection).
// Additionally, we permit coercion of function
// items to drop the unsafe qualifier.
self.coerce_from_fn_item(a, b)
}
ty::FnPtr(a_f) => {
// We permit coercion of fn pointers to drop the
// unsafe qualifier.
self.coerce_from_fn_pointer(a, a_f, b)
}
ty::Closure(_, substs_a) => {
// Non-capturing closures are coercible to
// function pointers or unsafe function pointers.
// It cannot convert closures that require unsafe.
self.coerce_closure_to_fn(a, substs_a, b)
}
_ => {
// Otherwise, just use unification rules.
self.unify_and(a, b, identity)
}
}
}
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
/// To match `A` with `B`, autoderef will be performed,
/// calling `deref`/`deref_mut` where necessary.
fn coerce_borrowed_pointer(
&self,
a: Ty<'tcx>,
b: Ty<'tcx>,
r_b: ty::Region<'tcx>,
mt_b: TypeAndMut<'tcx>,
) -> CoerceResult<'tcx> {
debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b);
// If we have a parameter of type `&M T_a` and the value
// provided is `expr`, we will be adding an implicit borrow,
// meaning that we convert `f(expr)` to `f(&M *expr)`. Therefore,
// to type check, we will construct the type that `&M*expr` would
// yield.
let (r_a, mt_a) = match a.kind {
ty::Ref(r_a, ty, mutbl) => {
let mt_a = ty::TypeAndMut { ty, mutbl };
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
(r_a, mt_a)
}
_ => return self.unify_and(a, b, identity),
};
let span = self.cause.span;
let mut first_error = None;
let mut r_borrow_var = None;
let mut autoderef = self.autoderef(span, a);
let mut found = None;
for (referent_ty, autoderefs) in autoderef.by_ref() {
if autoderefs == 0 {
// Don't let this pass, otherwise it would cause
// &T to autoref to &&T.
continue;
}
// At this point, we have deref'd `a` to `referent_ty`. So
// imagine we are coercing from `&'a mut Vec<T>` to `&'b mut [T]`.
// In the autoderef loop for `&'a mut Vec<T>`, we would get
// three callbacks:
//
// - `&'a mut Vec<T>` -- 0 derefs, just ignore it
// - `Vec<T>` -- 1 deref
// - `[T]` -- 2 deref
//
// At each point after the first callback, we want to
// check to see whether this would match out target type
// (`&'b mut [T]`) if we autoref'd it. We can't just
// compare the referent types, though, because we still
// have to consider the mutability. E.g., in the case
// we've been considering, we have an `&mut` reference, so
// the `T` in `[T]` needs to be unified with equality.
//
// Therefore, we construct reference types reflecting what
// the types will be after we do the final auto-ref and
// compare those. Note that this means we use the target
// mutability [1], since it may be that we are coercing
// from `&mut T` to `&U`.
//
// One fine point concerns the region that we use. We
// choose the region such that the region of the final
// type that results from `unify` will be the region we
// want for the autoref:
//
// - if in sub mode, that means we want to use `'b` (the
// region from the target reference) for both
// pointers [2]. This is because sub mode (somewhat
// arbitrarily) returns the subtype region. In the case
// where we are coercing to a target type, we know we
// want to use that target type region (`'b`) because --
// for the program to type-check -- it must be the
// smaller of the two.
// - One fine point. It may be surprising that we can
// use `'b` without relating `'a` and `'b`. The reason
// that this is ok is that what we produce is
// effectively a `&'b *x` expression (if you could
// annotate the region of a borrow), and regionck has
// code that adds edges from the region of a borrow
// (`'b`, here) into the regions in the borrowed
// expression (`*x`, here). (Search for "link".)
// - if in lub mode, things can get fairly complicated. The
// easiest thing is just to make a fresh
// region variable [4], which effectively means we defer
// the decision to region inference (and regionck, which will add
// some more edges to this variable). However, this can wind up
// creating a crippling number of variables in some cases --
// e.g., #32278 -- so we optimize one particular case [3].
// Let me try to explain with some examples:
// - The "running example" above represents the simple case,
// where we have one `&` reference at the outer level and
// ownership all the rest of the way down. In this case,
// we want `LUB('a, 'b)` as the resulting region.
// - However, if there are nested borrows, that region is
// too strong. Consider a coercion from `&'a &'x Rc<T>` to
// `&'b T`. In this case, `'a` is actually irrelevant.
// The pointer we want is `LUB('x, 'b`). If we choose `LUB('a,'b)`
// we get spurious errors (`ui/regions-lub-ref-ref-rc.rs`).
// (The errors actually show up in borrowck, typically, because
// this extra edge causes the region `'a` to be inferred to something
// too big, which then results in borrowck errors.)
// - We could track the innermost shared reference, but there is already
// code in regionck that has the job of creating links between
// the region of a borrow and the regions in the thing being
// borrowed (here, `'a` and `'x`), and it knows how to handle
// all the various cases. So instead we just make a region variable
// and let regionck figure it out.
let r = if !self.use_lub {
r_b // [2] above
} else if autoderefs == 1 {
r_a // [3] above
} else {
if r_borrow_var.is_none() {
// create var lazilly, at most once
let coercion = Coercion(span);
let r = self.next_region_var(coercion);
r_borrow_var = Some(r); // [4] above
}
r_borrow_var.unwrap()
};
let derefd_ty_a = self.tcx.mk_ref(
r,
TypeAndMut {
ty: referent_ty,
mutbl: mt_b.mutbl, // [1] above
},
);
match self.unify(derefd_ty_a, b) {
Ok(ok) => {
found = Some(ok);
break;
}
Err(err) => {
if first_error.is_none() {
first_error = Some(err);
}
}
}
}
// Extract type or return an error. We return the first error
// we got, which should be from relating the "base" type
// (e.g., in example above, the failure from relating `Vec<T>`
// to the target type), since that should be the least
// confusing.
let InferOk { value: ty, mut obligations } = match found {
Some(d) => d,
None => {
let err = first_error.expect("coerce_borrowed_pointer had no error");
debug!("coerce_borrowed_pointer: failed with err = {:?}", err);
return Err(err);
}
};
if ty == a && mt_a.mutbl == hir::Mutability::Not && autoderef.step_count() == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
// we started with. In that case, just skip it
// altogether. This is just an optimization.
//
// Note that for `&mut`, we DO want to reborrow --
// otherwise, this would be a move, which might be an
// error. For example `foo(self.x)` where `self` and
// `self.x` both have `&mut `type would be a move of
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
assert_eq!(mt_b.mutbl, hir::Mutability::Not); // can only coerce &T -> &U
return success(vec![], ty, obligations);
}
let needs = Needs::maybe_mut_place(mt_b.mutbl);
let InferOk { value: mut adjustments, obligations: o } =
autoderef.adjust_steps_as_infer_ok(self, needs);
obligations.extend(o);
obligations.extend(autoderef.into_obligations());
// Now apply the autoref. We have to extract the region out of
// the final ref type we got.
let r_borrow = match ty.kind {
ty::Ref(r_borrow, _, _) => r_borrow,
_ => span_bug!(span, "expected a ref type, got {:?}", ty),
};
let mutbl = match mt_b.mutbl {
hir::Mutability::Not => AutoBorrowMutability::Not,
hir::Mutability::Mut => {
AutoBorrowMutability::Mut { allow_two_phase_borrow: self.allow_two_phase }
}
};
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)),
target: ty,
});
debug!("coerce_borrowed_pointer: succeeded ty={:?} adjustments={:?}", ty, adjustments);
success(adjustments, ty, obligations)
}
// &[T; n] or &mut [T; n] -> &[T]
// or &mut [T; n] -> &mut [T]
// or &Concrete -> &Trait, etc.
fn coerce_unsized(&self, source: Ty<'tcx>, target: Ty<'tcx>) -> CoerceResult<'tcx> {
debug!("coerce_unsized(source={:?}, target={:?})", source, target);
let traits =
(self.tcx.lang_items().unsize_trait(), self.tcx.lang_items().coerce_unsized_trait());
let (unsize_did, coerce_unsized_did) = if let (Some(u), Some(cu)) = traits {
(u, cu)
} else {
debug!("missing Unsize or CoerceUnsized traits");
return Err(TypeError::Mismatch);
};
// Note, we want to avoid unnecessary unsizing. We don't want to coerce to
// a DST unless we have to. This currently comes out in the wash since
// we can't unify [T] with U. But to properly support DST, we need to allow
// that, at which point we will need extra checks on the target here.
// Handle reborrows before selecting `Source: CoerceUnsized<Target>`.
let reborrow = match (&source.kind, &target.kind) {
(&ty::Ref(_, ty_a, mutbl_a), &ty::Ref(_, _, mutbl_b)) => {
coerce_mutbls(mutbl_a, mutbl_b)?;
let coercion = Coercion(self.cause.span);
let r_borrow = self.next_region_var(coercion);
let mutbl = match mutbl_b {
hir::Mutability::Not => AutoBorrowMutability::Not,
hir::Mutability::Mut => AutoBorrowMutability::Mut {
// We don't allow two-phase borrows here, at least for initial
// implementation. If it happens that this coercion is a function argument,
// the reborrow in coerce_borrowed_ptr will pick it up.
allow_two_phase_borrow: AllowTwoPhase::No,
},
};
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(r_borrow, mutbl)),
target: self
.tcx
.mk_ref(r_borrow, ty::TypeAndMut { mutbl: mutbl_b, ty: ty_a }),
},
))
}
(&ty::Ref(_, ty_a, mt_a), &ty::RawPtr(ty::TypeAndMut { mutbl: mt_b, .. })) => {
coerce_mutbls(mt_a, mt_b)?;
Some((
Adjustment { kind: Adjust::Deref(None), target: ty_a },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::RawPtr(mt_b)),
target: self.tcx.mk_ptr(ty::TypeAndMut { mutbl: mt_b, ty: ty_a }),
},
))
}
_ => None,
};
let coerce_source = reborrow.as_ref().map_or(source, |&(_, ref r)| r.target);
// Setup either a subtyping or a LUB relationship between
// the `CoerceUnsized` target type and the expected type.
// We only have the latter, so we use an inference variable
// for the former and let type inference do the rest.
let origin = TypeVariableOrigin {
kind: TypeVariableOriginKind::MiscVariable,
span: self.cause.span,
};
let coerce_target = self.next_ty_var(origin);
let mut coercion = self.unify_and(coerce_target, target, |target| {
let unsize = Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), target };
match reborrow {
None => vec![unsize],
Some((ref deref, ref autoref)) => vec![deref.clone(), autoref.clone(), unsize],
}
})?;
let mut selcx = traits::SelectionContext::new(self);
// Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = ObligationCause::new(
self.cause.span,
self.body_id,
ObligationCauseCode::Coercion { source, target },
);
// Use a FIFO queue for this custom fulfillment procedure.
//
// A Vec (or SmallVec) is not a natural choice for a queue. However,
// this code path is hot, and this queue usually has a max length of 1
// and almost never more than 3. By using a SmallVec we avoid an
// allocation, at the (very small) cost of (occasionally) having to
// shift subsequent elements down when removing the front element.
let mut queue: SmallVec<[_; 4]> = smallvec![traits::predicate_for_trait_def(
self.tcx,
self.fcx.param_env,
cause,
coerce_unsized_did,
0,
coerce_source,
&[coerce_target.into()]
)];
let mut has_unsized_tuple_coercion = false;
// Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
// emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
// inference might unify those two inner type variables later.
let traits = [coerce_unsized_did, unsize_did];
while !queue.is_empty() {
let obligation = queue.remove(0);
debug!("coerce_unsized resolve step: {:?}", obligation);
let trait_pred = match obligation.predicate {
ty::Predicate::Trait(trait_pred, _) if traits.contains(&trait_pred.def_id()) => {
if unsize_did == trait_pred.def_id() {
let unsize_ty = trait_pred.skip_binder().trait_ref.substs[1].expect_ty();
if let ty::Tuple(..) = unsize_ty.kind {
debug!("coerce_unsized: found unsized tuple coercion");
has_unsized_tuple_coercion = true;
}
}
trait_pred
}
_ => {
coercion.obligations.push(obligation);
continue;
}
};
match selcx.select(&obligation.with(trait_pred)) {
// Uncertain or unimplemented.
Ok(None) => {
if trait_pred.def_id() == unsize_did {
let trait_pred = self.resolve_vars_if_possible(&trait_pred);
let self_ty = trait_pred.skip_binder().self_ty();
let unsize_ty = trait_pred.skip_binder().trait_ref.substs[1].expect_ty();
debug!("coerce_unsized: ambiguous unsize case for {:?}", trait_pred);
match (&self_ty.kind, &unsize_ty.kind) {
(ty::Infer(ty::TyVar(v)), ty::Dynamic(..))
if self.type_var_is_sized(*v) =>
{
debug!("coerce_unsized: have sized infer {:?}", v);
coercion.obligations.push(obligation);
// `$0: Unsize<dyn Trait>` where we know that `$0: Sized`, try going
// for unsizing.
}
_ => {
// Some other case for `$0: Unsize<Something>`. Note that we
// hit this case even if `Something` is a sized type, so just
// don't do the coercion.
debug!("coerce_unsized: ambiguous unsize");
return Err(TypeError::Mismatch);
}
}
} else {
debug!("coerce_unsized: early return - ambiguous");
return Err(TypeError::Mismatch);
}
}
Err(traits::Unimplemented) => {
debug!("coerce_unsized: early return - can't prove obligation");
return Err(TypeError::Mismatch);
}
// Object safety violations or miscellaneous.
Err(err) => {
self.report_selection_error(&obligation, &err, false, false);
// Treat this like an obligation and follow through
// with the unsizing - the lack of a coercion should
// be silent, as it causes a type mismatch later.
}
Ok(Some(vtable)) => queue.extend(vtable.nested_obligations()),
}
}
if has_unsized_tuple_coercion && !self.tcx.features().unsized_tuple_coercion {
feature_err(
&self.tcx.sess.parse_sess,
sym::unsized_tuple_coercion,
self.cause.span,
"unsized tuple coercion is not stable enough for use and is subject to change",
)
.emit();
}
Ok(coercion)
}
fn coerce_from_safe_fn<F, G>(
&self,
a: Ty<'tcx>,
fn_ty_a: ty::PolyFnSig<'tcx>,
b: Ty<'tcx>,
to_unsafe: F,
normal: G,
) -> CoerceResult<'tcx>
where
F: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
G: FnOnce(Ty<'tcx>) -> Vec<Adjustment<'tcx>>,
{
if let ty::FnPtr(fn_ty_b) = b.kind {
if let (hir::Unsafety::Normal, hir::Unsafety::Unsafe) =
(fn_ty_a.unsafety(), fn_ty_b.unsafety())
{
let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a);
return self.unify_and(unsafe_a, b, to_unsafe);
}
}
self.unify_and(a, b, normal)
}
fn coerce_from_fn_pointer(
&self,
a: Ty<'tcx>,
fn_ty_a: ty::PolyFnSig<'tcx>,
b: Ty<'tcx>,
) -> CoerceResult<'tcx> {
//! Attempts to coerce from the type of a Rust function item
//! into a closure or a `proc`.
//!
let b = self.shallow_resolve(b);
debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b);
self.coerce_from_safe_fn(
a,
fn_ty_a,
b,
simple(Adjust::Pointer(PointerCast::UnsafeFnPointer)),
identity,
)
}
fn coerce_from_fn_item(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
//! Attempts to coerce from the type of a Rust function item
//! into a closure or a `proc`.
let b = self.shallow_resolve(b);
debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b);
match b.kind {
ty::FnPtr(_) => {
let a_sig = a.fn_sig(self.tcx);
// Intrinsics are not coercible to function pointers
if a_sig.abi() == Abi::RustIntrinsic || a_sig.abi() == Abi::PlatformIntrinsic {
return Err(TypeError::IntrinsicCast);
}
let InferOk { value: a_sig, mut obligations } =
self.normalize_associated_types_in_as_infer_ok(self.cause.span, &a_sig);
let a_fn_pointer = self.tcx.mk_fn_ptr(a_sig);
let InferOk { value, obligations: o2 } = self.coerce_from_safe_fn(
a_fn_pointer,
a_sig,
b,
|unsafe_ty| {
vec![
Adjustment {
kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
target: a_fn_pointer,
},
Adjustment {
kind: Adjust::Pointer(PointerCast::UnsafeFnPointer),
target: unsafe_ty,
},
]
},
simple(Adjust::Pointer(PointerCast::ReifyFnPointer)),
)?;
obligations.extend(o2);
Ok(InferOk { value, obligations })
}
_ => self.unify_and(a, b, identity),
}
}
fn coerce_closure_to_fn(
&self,
a: Ty<'tcx>,
substs_a: SubstsRef<'tcx>,
b: Ty<'tcx>,
) -> CoerceResult<'tcx> {
//! Attempts to coerce from the type of a non-capturing closure
//! into a function pointer.
//!
let b = self.shallow_resolve(b);
match b.kind {
ty::FnPtr(fn_ty) if substs_a.as_closure().upvar_tys().next().is_none() => {
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
// to
// `fn(arg0,arg1,...) -> _`
// or
// `unsafe fn(arg0,arg1,...) -> _`
let closure_sig = substs_a.as_closure().sig();
let unsafety = fn_ty.unsafety();
let pointer_ty = self.tcx.coerce_closure_fn_ty(closure_sig, unsafety);
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})", a, b, pointer_ty);
self.unify_and(
pointer_ty,
b,
simple(Adjust::Pointer(PointerCast::ClosureFnPointer(unsafety))),
)
}
_ => self.unify_and(a, b, identity),
}
}
fn coerce_unsafe_ptr(
&self,
a: Ty<'tcx>,
b: Ty<'tcx>,
mutbl_b: hir::Mutability,
) -> CoerceResult<'tcx> {
debug!("coerce_unsafe_ptr(a={:?}, b={:?})", a, b);
let (is_ref, mt_a) = match a.kind {
ty::Ref(_, ty, mutbl) => (true, ty::TypeAndMut { ty, mutbl }),
ty::RawPtr(mt) => (false, mt),
_ => return self.unify_and(a, b, identity),
};
// Check that the types which they point at are compatible.
let a_unsafe = self.tcx.mk_ptr(ty::TypeAndMut { mutbl: mutbl_b, ty: mt_a.ty });
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
// Although references and unsafe ptrs have the same
// representation, we still register an Adjust::DerefRef so that
// regionck knows that the region for `a` must be valid here.
if is_ref {
self.unify_and(a_unsafe, b, |target| {
vec![
Adjustment { kind: Adjust::Deref(None), target: mt_a.ty },
Adjustment { kind: Adjust::Borrow(AutoBorrow::RawPtr(mutbl_b)), target },
]
})
} else if mt_a.mutbl != mutbl_b {
self.unify_and(a_unsafe, b, simple(Adjust::Pointer(PointerCast::MutToConstPointer)))
} else {
self.unify_and(a_unsafe, b, identity)
}
}
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Attempt to coerce an expression to a type, and return the
/// adjusted type of the expression, if successful.
/// Adjustments are only recorded if the coercion succeeded.
/// The expressions *must not* have any pre-existing adjustments.
pub fn try_coerce(
&self,
expr: &hir::Expr<'_>,
expr_ty: Ty<'tcx>,
target: Ty<'tcx>,
allow_two_phase: AllowTwoPhase,
) -> RelateResult<'tcx, Ty<'tcx>> {
let source = self.resolve_vars_with_obligations(expr_ty);
debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
let cause = self.cause(expr.span, ObligationCauseCode::ExprAssignable);
let coerce = Coerce::new(self, cause, allow_two_phase);
let ok = self.commit_if_ok(|_| coerce.coerce(source, target))?;
let (adjustments, _) = self.register_infer_ok_obligations(ok);
self.apply_adjustments(expr, adjustments);
Ok(if expr_ty.references_error() { self.tcx.types.err } else { target })
}
/// Same as `try_coerce()`, but without side-effects.
pub fn can_coerce(&self, expr_ty: Ty<'tcx>, target: Ty<'tcx>) -> bool {
let source = self.resolve_vars_with_obligations(expr_ty);
debug!("coercion::can({:?} -> {:?})", source, target);
let cause = self.cause(rustc_span::DUMMY_SP, ObligationCauseCode::ExprAssignable);
// We don't ever need two-phase here since we throw out the result of the coercion
let coerce = Coerce::new(self, cause, AllowTwoPhase::No);
self.probe(|_| coerce.coerce(source, target)).is_ok()
}
/// Given some expressions, their known unified type and another expression,
/// tries to unify the types, potentially inserting coercions on any of the
/// provided expressions and returns their LUB (aka "common supertype").
///
/// This is really an internal helper. From outside the coercion
/// module, you should instantiate a `CoerceMany` instance.
fn try_find_coercion_lub<E>(
&self,
cause: &ObligationCause<'tcx>,
exprs: &[E],
prev_ty: Ty<'tcx>,
new: &hir::Expr<'_>,
new_ty: Ty<'tcx>,
) -> RelateResult<'tcx, Ty<'tcx>>
where
E: AsCoercionSite,
{
let prev_ty = self.resolve_vars_with_obligations(prev_ty);
let new_ty = self.resolve_vars_with_obligations(new_ty);
debug!("coercion::try_find_coercion_lub({:?}, {:?})", prev_ty, new_ty);
// Special-case that coercion alone cannot handle:
// Two function item types of differing IDs or InternalSubsts.
if let (&ty::FnDef(..), &ty::FnDef(..)) = (&prev_ty.kind, &new_ty.kind) {
// Don't reify if the function types have a LUB, i.e., they
// are the same function and their parameters have a LUB.
let lub_ty = self
.commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
.map(|ok| self.register_infer_ok_obligations(ok));
if lub_ty.is_ok() {
// We have a LUB of prev_ty and new_ty, just return it.
return lub_ty;
}
// The signature must match.
let a_sig = prev_ty.fn_sig(self.tcx);
let a_sig = self.normalize_associated_types_in(new.span, &a_sig);
let b_sig = new_ty.fn_sig(self.tcx);
let b_sig = self.normalize_associated_types_in(new.span, &b_sig);
let sig = self
.at(cause, self.param_env)
.trace(prev_ty, new_ty)
.lub(&a_sig, &b_sig)
.map(|ok| self.register_infer_ok_obligations(ok))?;
// Reify both sides and return the reified fn pointer type.
let fn_ptr = self.tcx.mk_fn_ptr(sig);
for expr in exprs.iter().map(|e| e.as_coercion_site()).chain(Some(new)) {
// The only adjustment that can produce an fn item is
// `NeverToAny`, so this should always be valid.
self.apply_adjustments(
expr,
vec![Adjustment {
kind: Adjust::Pointer(PointerCast::ReifyFnPointer),
target: fn_ptr,
}],
);
}
return Ok(fn_ptr);
}
// Configure a Coerce instance to compute the LUB.
// We don't allow two-phase borrows on any autorefs this creates since we
// probably aren't processing function arguments here and even if we were,
// they're going to get autorefed again anyway and we can apply 2-phase borrows
// at that time.
let mut coerce = Coerce::new(self, cause.clone(), AllowTwoPhase::No);
coerce.use_lub = true;
// First try to coerce the new expression to the type of the previous ones,
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
if !self.tables.borrow().adjustments().contains_key(new.hir_id) {
let result = self.commit_if_ok(|_| coerce.coerce(new_ty, prev_ty));
match result {
Ok(ok) => {
let (adjustments, target) = self.register_infer_ok_obligations(ok);
self.apply_adjustments(new, adjustments);
return Ok(target);
}
Err(e) => first_error = Some(e),
}
}
// Then try to coerce the previous expressions to the type of the new one.
// This requires ensuring there are no coercions applied to *any* of the
// previous expressions, other than noop reborrows (ignoring lifetimes).
for expr in exprs {
let expr = expr.as_coercion_site();
let noop = match self.tables.borrow().expr_adjustments(expr) {
&[Adjustment { kind: Adjust::Deref(_), .. }, Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(_, mutbl_adj)), .. }] =>
{
match self.node_ty(expr.hir_id).kind {
ty::Ref(_, _, mt_orig) => {
let mutbl_adj: hir::Mutability = mutbl_adj.into();
// Reborrow that we can safely ignore, because
// the next adjustment can only be a Deref
// which will be merged into it.
mutbl_adj == mt_orig
}
_ => false,
}
}
&[Adjustment { kind: Adjust::NeverToAny, .. }] | &[] => true,
_ => false,
};
if !noop {
return self
.commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
.map(|ok| self.register_infer_ok_obligations(ok));
}
}
match self.commit_if_ok(|_| coerce.coerce(prev_ty, new_ty)) {
Err(_) => {
// Avoid giving strange errors on failed attempts.
if let Some(e) = first_error {
Err(e)
} else {
self.commit_if_ok(|_| self.at(cause, self.param_env).lub(prev_ty, new_ty))
.map(|ok| self.register_infer_ok_obligations(ok))
}
}
Ok(ok) => {
let (adjustments, target) = self.register_infer_ok_obligations(ok);
for expr in exprs {
let expr = expr.as_coercion_site();
self.apply_adjustments(expr, adjustments.clone());
}
Ok(target)
}
}
}
}
/// CoerceMany encapsulates the pattern you should use when you have
/// many expressions that are all getting coerced to a common
/// type. This arises, for example, when you have a match (the result
/// of each arm is coerced to a common type). It also arises in less
/// obvious places, such as when you have many `break foo` expressions
/// that target the same loop, or the various `return` expressions in
/// a function.
///
/// The basic protocol is as follows:
///
/// - Instantiate the `CoerceMany` with an initial `expected_ty`.
/// This will also serve as the "starting LUB". The expectation is
/// that this type is something which all of the expressions *must*
/// be coercible to. Use a fresh type variable if needed.
/// - For each expression whose result is to be coerced, invoke `coerce()` with.
/// - In some cases we wish to coerce "non-expressions" whose types are implicitly
/// unit. This happens for example if you have a `break` with no expression,
/// or an `if` with no `else`. In that case, invoke `coerce_forced_unit()`.
/// - `coerce()` and `coerce_forced_unit()` may report errors. They hide this
/// from you so that you don't have to worry your pretty head about it.
/// But if an error is reported, the final type will be `err`.
/// - Invoking `coerce()` may cause us to go and adjust the "adjustments" on
/// previously coerced expressions.
/// - When all done, invoke `complete()`. This will return the LUB of
/// all your expressions.
/// - WARNING: I don't believe this final type is guaranteed to be
/// related to your initial `expected_ty` in any particular way,
/// although it will typically be a subtype, so you should check it.
/// - Invoking `complete()` may cause us to go and adjust the "adjustments" on
/// previously coerced expressions.
///
/// Example:
///
/// ```
/// let mut coerce = CoerceMany::new(expected_ty);
/// for expr in exprs {
/// let expr_ty = fcx.check_expr_with_expectation(expr, expected);
/// coerce.coerce(fcx, &cause, expr, expr_ty);
/// }
/// let final_ty = coerce.complete(fcx);
/// ```
pub struct CoerceMany<'tcx, 'exprs, E: AsCoercionSite> {
expected_ty: Ty<'tcx>,
final_ty: Option<Ty<'tcx>>,
expressions: Expressions<'tcx, 'exprs, E>,
pushed: usize,
}
/// The type of a `CoerceMany` that is storing up the expressions into
/// a buffer. We use this in `check/mod.rs` for things like `break`.
pub type DynamicCoerceMany<'tcx> = CoerceMany<'tcx, 'tcx, &'tcx hir::Expr<'tcx>>;
enum Expressions<'tcx, 'exprs, E: AsCoercionSite> {
Dynamic(Vec<&'tcx hir::Expr<'tcx>>),
UpFront(&'exprs [E]),
}
impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
/// The usual case; collect the set of expressions dynamically.
/// If the full set of coercion sites is known before hand,
/// consider `with_coercion_sites()` instead to avoid allocation.
pub fn new(expected_ty: Ty<'tcx>) -> Self {
Self::make(expected_ty, Expressions::Dynamic(vec![]))
}
/// As an optimization, you can create a `CoerceMany` with a
/// pre-existing slice of expressions. In this case, you are
/// expected to pass each element in the slice to `coerce(...)` in
/// order. This is used with arrays in particular to avoid
/// needlessly cloning the slice.
pub fn with_coercion_sites(expected_ty: Ty<'tcx>, coercion_sites: &'exprs [E]) -> Self {
Self::make(expected_ty, Expressions::UpFront(coercion_sites))
}
fn make(expected_ty: Ty<'tcx>, expressions: Expressions<'tcx, 'exprs, E>) -> Self {
CoerceMany { expected_ty, final_ty: None, expressions, pushed: 0 }
}
/// Returns the "expected type" with which this coercion was
/// constructed. This represents the "downward propagated" type
/// that was given to us at the start of typing whatever construct
/// we are typing (e.g., the match expression).
///
/// Typically, this is used as the expected type when
/// type-checking each of the alternative expressions whose types
/// we are trying to merge.
pub fn expected_ty(&self) -> Ty<'tcx> {
self.expected_ty
}
/// Returns the current "merged type", representing our best-guess
/// at the LUB of the expressions we've seen so far (if any). This
/// isn't *final* until you call `self.final()`, which will return
/// the merged type.
pub fn merged_ty(&self) -> Ty<'tcx> {
self.final_ty.unwrap_or(self.expected_ty)
}
/// Indicates that the value generated by `expression`, which is
/// of type `expression_ty`, is one of the possibilities that we
/// could coerce from. This will record `expression`, and later
/// calls to `coerce` may come back and add adjustments and things
/// if necessary.
pub fn coerce<'a>(
&mut self,
fcx: &FnCtxt<'a, 'tcx>,
cause: &ObligationCause<'tcx>,
expression: &'tcx hir::Expr<'tcx>,
expression_ty: Ty<'tcx>,
) {
self.coerce_inner(fcx, cause, Some(expression), expression_ty, None, false)
}
/// Indicates that one of the inputs is a "forced unit". This
/// occurs in a case like `if foo { ... };`, where the missing else
/// generates a "forced unit". Another example is a `loop { break;
/// }`, where the `break` has no argument expression. We treat
/// these cases slightly differently for error-reporting
/// purposes. Note that these tend to correspond to cases where
/// the `()` expression is implicit in the source, and hence we do
/// not take an expression argument.
///
/// The `augment_error` gives you a chance to extend the error
/// message, in case any results (e.g., we use this to suggest
/// removing a `;`).
pub fn coerce_forced_unit<'a>(
&mut self,
fcx: &FnCtxt<'a, 'tcx>,
cause: &ObligationCause<'tcx>,
augment_error: &mut dyn FnMut(&mut DiagnosticBuilder<'_>),
label_unit_as_expected: bool,
) {
self.coerce_inner(
fcx,
cause,
None,
fcx.tcx.mk_unit(),
Some(augment_error),
label_unit_as_expected,
)
}
/// The inner coercion "engine". If `expression` is `None`, this
/// is a forced-unit case, and hence `expression_ty` must be
/// `Nil`.
fn coerce_inner<'a>(
&mut self,
fcx: &FnCtxt<'a, 'tcx>,
cause: &ObligationCause<'tcx>,
expression: Option<&'tcx hir::Expr<'tcx>>,
mut expression_ty: Ty<'tcx>,
augment_error: Option<&mut dyn FnMut(&mut DiagnosticBuilder<'_>)>,
label_expression_as_expected: bool,
) {
// Incorporate whatever type inference information we have
// until now; in principle we might also want to process
// pending obligations, but doing so should only improve
// compatibility (hopefully that is true) by helping us
// uncover never types better.
if expression_ty.is_ty_var() {
expression_ty = fcx.infcx.shallow_resolve(expression_ty);
}
// If we see any error types, just propagate that error
// upwards.
if expression_ty.references_error() || self.merged_ty().references_error() {
self.final_ty = Some(fcx.tcx.types.err);
return;
}
// Handle the actual type unification etc.
let result = if let Some(expression) = expression {
if self.pushed == 0 {
// Special-case the first expression we are coercing.
// To be honest, I'm not entirely sure why we do this.
// We don't allow two-phase borrows, see comment in try_find_coercion_lub for why
fcx.try_coerce(expression, expression_ty, self.expected_ty, AllowTwoPhase::No)
} else {
match self.expressions {
Expressions::Dynamic(ref exprs) => fcx.try_find_coercion_lub(
cause,
exprs,
self.merged_ty(),
expression,
expression_ty,
),
Expressions::UpFront(ref coercion_sites) => fcx.try_find_coercion_lub(
cause,
&coercion_sites[0..self.pushed],
self.merged_ty(),
expression,
expression_ty,
),
}
}
} else {
// this is a hack for cases where we default to `()` because
// the expression etc has been omitted from the source. An
// example is an `if let` without an else:
//
// if let Some(x) = ... { }
//
// we wind up with a second match arm that is like `_ =>
// ()`. That is the case we are considering here. We take
// a different path to get the right "expected, found"
// message and so forth (and because we know that
// `expression_ty` will be unit).
//
// Another example is `break` with no argument expression.
assert!(expression_ty.is_unit(), "if let hack without unit type");
fcx.at(cause, fcx.param_env)
.eq_exp(label_expression_as_expected, expression_ty, self.merged_ty())
.map(|infer_ok| {
fcx.register_infer_ok_obligations(infer_ok);
expression_ty
})
};
match result {
Ok(v) => {
self.final_ty = Some(v);
if let Some(e) = expression {
match self.expressions {
Expressions::Dynamic(ref mut buffer) => buffer.push(e),
Expressions::UpFront(coercion_sites) => {
// if the user gave us an array to validate, check that we got
// the next expression in the list, as expected
assert_eq!(
coercion_sites[self.pushed].as_coercion_site().hir_id,
e.hir_id
);
}
}
self.pushed += 1;
}
}
Err(coercion_error) => {
let (expected, found) = if label_expression_as_expected {
// In the case where this is a "forced unit", like
// `break`, we want to call the `()` "expected"
// since it is implied by the syntax.
// (Note: not all force-units work this way.)"
(expression_ty, self.final_ty.unwrap_or(self.expected_ty))
} else {
// Otherwise, the "expected" type for error
// reporting is the current unification type,
// which is basically the LUB of the expressions
// we've seen so far (combined with the expected
// type)
(self.final_ty.unwrap_or(self.expected_ty), expression_ty)
};
let mut err;
let mut unsized_return = false;
match cause.code {
ObligationCauseCode::ReturnNoExpression => {
err = struct_span_err!(
fcx.tcx.sess,
cause.span,
E0069,
"`return;` in a function whose return type is not `()`"
);
err.span_label(cause.span, "return type is not `()`");
}
ObligationCauseCode::BlockTailExpression(blk_id) => {
let parent_id = fcx.tcx.hir().get_parent_node(blk_id);
err = self.report_return_mismatched_types(
cause,
expected,
found,
coercion_error,
fcx,
parent_id,
expression.map(|expr| (expr, blk_id)),
);
if !fcx.tcx.features().unsized_locals {
unsized_return = self.is_return_ty_unsized(fcx, blk_id);
}
}
ObligationCauseCode::ReturnValue(id) => {
err = self.report_return_mismatched_types(
cause,
expected,
found,
coercion_error,
fcx,
id,
None,
);
if !fcx.tcx.features().unsized_locals {
let id = fcx.tcx.hir().get_parent_node(id);
unsized_return = self.is_return_ty_unsized(fcx, id);
}
}
_ => {
err = fcx.report_mismatched_types(cause, expected, found, coercion_error);
}
}
if let Some(augment_error) = augment_error {
augment_error(&mut err);
}
if let Some(expr) = expression {
fcx.emit_coerce_suggestions(&mut err, expr, found, expected);
}
// Error possibly reported in `check_assign` so avoid emitting error again.
let assign_to_bool = expression
// #67273: Use initial expected type as opposed to `expected`.
// Otherwise we end up using prior coercions in e.g. a `match` expression:
// ```
// match i {
// 0 => true, // Because of this...
// 1 => i = 1, // ...`expected == bool` now, but not when checking `i = 1`.
// _ => (),
// };
// ```
.filter(|e| fcx.is_assign_to_bool(e, self.expected_ty()))
.is_some();
err.emit_unless(assign_to_bool || unsized_return);
self.final_ty = Some(fcx.tcx.types.err);
}
}
}
fn report_return_mismatched_types<'a>(
&self,
cause: &ObligationCause<'tcx>,
expected: Ty<'tcx>,
found: Ty<'tcx>,
ty_err: TypeError<'tcx>,
fcx: &FnCtxt<'a, 'tcx>,
id: hir::HirId,
expression: Option<(&'tcx hir::Expr<'tcx>, hir::HirId)>,
) -> DiagnosticBuilder<'a> {
let mut err = fcx.report_mismatched_types(cause, expected, found, ty_err);
let mut pointing_at_return_type = false;
let mut fn_output = None;
// Verify that this is a tail expression of a function, otherwise the
// label pointing out the cause for the type coercion will be wrong
// as prior return coercions would not be relevant (#57664).
let parent_id = fcx.tcx.hir().get_parent_node(id);
let fn_decl = if let Some((expr, blk_id)) = expression {
pointing_at_return_type = fcx.suggest_mismatched_types_on_tail(
&mut err, expr, expected, found, cause.span, blk_id,
);
let parent = fcx.tcx.hir().get(parent_id);
if let (Some(match_expr), true, false) = (
fcx.tcx.hir().get_match_if_cause(expr.hir_id),
expected.is_unit(),
pointing_at_return_type,
) {
if match_expr.span.desugaring_kind().is_none() {
err.span_label(match_expr.span, "expected this to be `()`");
fcx.suggest_semicolon_at_end(match_expr.span, &mut err);
}
}
fcx.get_node_fn_decl(parent).map(|(fn_decl, _, is_main)| (fn_decl, is_main))
} else {
fcx.get_fn_decl(parent_id)
};
if let (Some((fn_decl, can_suggest)), _) = (fn_decl, pointing_at_return_type) {
if expression.is_none() {
pointing_at_return_type |= fcx.suggest_missing_return_type(
&mut err,
&fn_decl,
expected,
found,
can_suggest,
);
}
if !pointing_at_return_type {
fn_output = Some(&fn_decl.output); // `impl Trait` return type
}
}
if let (Some(sp), Some(fn_output)) = (fcx.ret_coercion_span.borrow().as_ref(), fn_output) {
self.add_impl_trait_explanation(&mut err, fcx, expected, *sp, fn_output);
}
err
}
fn add_impl_trait_explanation<'a>(
&self,
err: &mut DiagnosticBuilder<'a>,
fcx: &FnCtxt<'a, 'tcx>,
expected: Ty<'tcx>,
sp: Span,
fn_output: &hir::FnRetTy<'_>,
) {
let return_sp = fn_output.span();
err.span_label(return_sp, "expected because this return type...");
err.span_label(
sp,
format!("...is found to be `{}` here", fcx.resolve_vars_with_obligations(expected)),
);
let impl_trait_msg = "for information on `impl Trait`, see \
<https://doc.rust-lang.org/book/ch10-02-traits.html\
#returning-types-that-implement-traits>";
let trait_obj_msg = "for information on trait objects, see \
<https://doc.rust-lang.org/book/ch17-02-trait-objects.html\
#using-trait-objects-that-allow-for-values-of-different-types>";
err.note("to return `impl Trait`, all returned values must be of the same type");
err.note(impl_trait_msg);
let snippet = fcx
.tcx
.sess
.source_map()
.span_to_snippet(return_sp)
.unwrap_or_else(|_| "dyn Trait".to_string());
let mut snippet_iter = snippet.split_whitespace();
let has_impl = snippet_iter.next().map_or(false, |s| s == "impl");
// Only suggest `Box<dyn Trait>` if `Trait` in `impl Trait` is object safe.
let mut is_object_safe = false;
if let hir::FnRetTy::Return(ty) = fn_output {
// Get the return type.
if let hir::TyKind::Def(..) = ty.kind {
let ty = AstConv::ast_ty_to_ty(fcx, ty);
// Get the `impl Trait`'s `DefId`.
if let ty::Opaque(def_id, _) = ty.kind {
let hir_id = fcx.tcx.hir().as_local_hir_id(def_id).unwrap();
// Get the `impl Trait`'s `Item` so that we can get its trait bounds and
// get the `Trait`'s `DefId`.
if let hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds, .. }) =
fcx.tcx.hir().expect_item(hir_id).kind
{
// Are of this `impl Trait`'s traits object safe?
is_object_safe = bounds.iter().all(|bound| {
bound
.trait_ref()
.and_then(|t| t.trait_def_id())
.map_or(false, |def_id| {
fcx.tcx.object_safety_violations(def_id).is_empty()
})
})
}
}
}
};
if has_impl {
if is_object_safe {
err.help(&format!(
"you can instead return a boxed trait object using `Box<dyn {}>`",
&snippet[5..]
));
} else {
err.help(&format!(
"if the trait `{}` were object safe, you could return a boxed trait object",
&snippet[5..]
));
}
err.note(trait_obj_msg);
}
err.help("alternatively, create a new `enum` with a variant for each returned type");
}
fn is_return_ty_unsized(&self, fcx: &FnCtxt<'a, 'tcx>, blk_id: hir::HirId) -> bool {
if let Some((fn_decl, _)) = fcx.get_fn_decl(blk_id) {
if let hir::FnRetTy::Return(ty) = fn_decl.output {
let ty = AstConv::ast_ty_to_ty(fcx, ty);
if let ty::Dynamic(..) = ty.kind {
return true;
}
}
}
false
}
pub fn complete<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Ty<'tcx> {
if let Some(final_ty) = self.final_ty {
final_ty
} else {
// If we only had inputs that were of type `!` (or no
// inputs at all), then the final type is `!`.
assert_eq!(self.pushed, 0);
fcx.tcx.types.never
}
}
}
/// Something that can be converted into an expression to which we can
/// apply a coercion.
pub trait AsCoercionSite {
fn as_coercion_site(&self) -> &hir::Expr<'_>;
}
impl AsCoercionSite for hir::Expr<'_> {
fn as_coercion_site(&self) -> &hir::Expr<'_> {
self
}
}
impl<'a, T> AsCoercionSite for &'a T
where
T: AsCoercionSite,
{
fn as_coercion_site(&self) -> &hir::Expr<'_> {
(**self).as_coercion_site()
}
}
impl AsCoercionSite for ! {
fn as_coercion_site(&self) -> &hir::Expr<'_> {
unreachable!()
}
}
impl AsCoercionSite for hir::Arm<'_> {
fn as_coercion_site(&self) -> &hir::Expr<'_> {
&self.body
}
}
| 42.713902 | 135 | 0.531297 |
fcc125cda95531a0043f2f63c5972d985590dcd1 | 3,178 | // Copyright 2014-2017 The html5ever Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use] extern crate html5ever;
use std::io;
use std::default::Default;
use std::collections::HashMap;
use std::borrow::Cow;
use html5ever::{Attribute, QualName, ExpandedName};
use html5ever::parse_document;
use html5ever::tree_builder::{TreeSink, QuirksMode, NodeOrText, ElementFlags};
use html5ever::tendril::*;
struct Sink {
next_id: usize,
names: HashMap<usize, QualName>,
}
impl Sink {
fn get_id(&mut self) -> usize {
let id = self.next_id;
self.next_id += 2;
id
}
}
impl TreeSink for Sink {
type Handle = usize;
type Output = Self;
fn finish(self) -> Self { self }
fn get_document(&mut self) -> usize {
0
}
fn get_template_contents(&mut self, target: &usize) -> usize {
if let Some(expanded_name!(html "template")) = self.names.get(&target).map(|n| n.expanded()) {
target + 1
} else {
panic!("not a template element")
}
}
fn same_node(&self, x: &usize, y: &usize) -> bool {
x == y
}
fn elem_name(&self, target: &usize) -> ExpandedName {
self.names.get(target).expect("not an element").expanded()
}
fn create_element(&mut self, name: QualName, _: Vec<Attribute>, _: ElementFlags) -> usize {
let id = self.get_id();
self.names.insert(id, name);
id
}
fn create_comment(&mut self, _text: StrTendril) -> usize {
self.get_id()
}
#[allow(unused_variables)]
fn create_pi(&mut self, target: StrTendril, value: StrTendril) -> usize {
unimplemented!()
}
fn append_before_sibling(&mut self,
_sibling: &usize,
_new_node: NodeOrText<usize>) { }
fn append_based_on_parent_node(&mut self,
_element: &usize,
_prev_element: &usize,
_new_node: NodeOrText<usize>) { }
fn parse_error(&mut self, _msg: Cow<'static, str>) { }
fn set_quirks_mode(&mut self, _mode: QuirksMode) { }
fn append(&mut self, _parent: &usize, _child: NodeOrText<usize>) { }
fn append_doctype_to_document(&mut self, _: StrTendril, _: StrTendril, _: StrTendril) { }
fn add_attrs_if_missing(&mut self, target: &usize, _attrs: Vec<Attribute>) {
assert!(self.names.contains_key(&target), "not an element");
}
fn remove_from_parent(&mut self, _target: &usize) { }
fn reparent_children(&mut self, _node: &usize, _new_parent: &usize) { }
fn mark_script_already_started(&mut self, _node: &usize) { }
}
fn main() {
let sink = Sink {
next_id: 1,
names: HashMap::new(),
};
let stdin = io::stdin();
parse_document(sink, Default::default())
.from_utf8()
.read_from(&mut stdin.lock())
.unwrap();
}
| 29.425926 | 102 | 0.626809 |
e4b97b947bcaf1d9d57acc80ca83c6544580cb6a | 394 | use crate::Module;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Package {
modules: BTreeMap<String, Module>,
}
impl Package {
pub fn new(modules: BTreeMap<String, Module>) -> Self {
Self { modules }
}
pub fn modules(&self) -> &BTreeMap<String, Module> {
&self.modules
}
}
| 20.736842 | 59 | 0.64467 |
d7c5a910618f278bc978dfb1e51becc2c2fd3f26 | 433 | #![allow(non_camel_case_types, dead_code)]
use asn1_codecs_derive::AperCodec;
#[derive(Debug, AperCodec)]
#[asn(
type = "OCTET-STRING",
sz_extensible = false,
sz_lb = "1",
sz_ub = "32"
)]
pub struct WLANName(Vec<u8>);
#[derive(Debug, AperCodec)]
#[asn(type = "SEQUENCE-OF", sz_extensible = false, sz_lb = "1", sz_ub = "4")]
struct WLANMeasConfigNameList(Vec<WLANName>);
fn main() {
eprintln!("SequenceOf");
}
| 20.619048 | 77 | 0.658199 |
2f8d5aa6621f63a11eb8d24660161509f102a8c3 | 1,188 | // <recommend-two>
use actix_web::{
error, get,
http::{header::ContentType, StatusCode},
App, HttpResponse, HttpServer,
};
use derive_more::{Display, Error};
#[derive(Debug, Display, Error)]
enum UserError {
#[display(fmt = "An internal error occurred. Please try again later.")]
InternalError,
}
impl error::ResponseError for UserError {
fn error_response(&self) -> HttpResponse {
HttpResponse::build(self.status_code())
.insert_header(ContentType::html())
.body(self.to_string())
}
fn status_code(&self) -> StatusCode {
match *self {
UserError::InternalError => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
#[get("/")]
async fn index() -> Result<&'static str, UserError> {
do_thing_that_fails().map_err(|_e| UserError::InternalError)?;
Ok("success!")
}
// </recommend-two>
fn do_thing_that_fails() -> Result<(), std::io::Error> {
Err(std::io::Error::new(std::io::ErrorKind::Other, "some error"))
}
#[actix_web::main]
async fn main() -> std::io::Result<()> {
HttpServer::new(|| App::new().service(index))
.bind(("127.0.0.1", 8080))?
.run()
.await
}
| 25.276596 | 75 | 0.611111 |
0814ac72ac63b8bc3a82b29dc8c22dbac82934db | 17,327 | use crate::config::Config;
use anyhow::Result;
use arbitrary::{Arbitrary, Unstructured};
use cranelift::codegen::ir::types::*;
use cranelift::codegen::ir::{
AbiParam, Block, ExternalName, Function, JumpTable, Opcode, Signature, Type, Value,
};
use cranelift::codegen::isa::CallConv;
use cranelift::frontend::{FunctionBuilder, FunctionBuilderContext, Variable};
use cranelift::prelude::{EntityRef, InstBuilder, IntCC, JumpTableData};
use std::ops::RangeInclusive;
type BlockSignature = Vec<Type>;
fn insert_opcode_arity_0(
_fgen: &mut FunctionGenerator,
builder: &mut FunctionBuilder,
opcode: Opcode,
_args: &'static [Type],
_rets: &'static [Type],
) -> Result<()> {
builder.ins().NullAry(opcode, INVALID);
Ok(())
}
fn insert_opcode_arity_2(
fgen: &mut FunctionGenerator,
builder: &mut FunctionBuilder,
opcode: Opcode,
args: &'static [Type],
rets: &'static [Type],
) -> Result<()> {
let arg0 = fgen.get_variable_of_type(args[0])?;
let arg0 = builder.use_var(arg0);
let arg1 = fgen.get_variable_of_type(args[1])?;
let arg1 = builder.use_var(arg1);
let typevar = rets[0];
let (inst, dfg) = builder.ins().Binary(opcode, typevar, arg0, arg1);
let results = dfg.inst_results(inst).to_vec();
for (val, ty) in results.into_iter().zip(rets) {
let var = fgen.get_variable_of_type(*ty)?;
builder.def_var(var, val);
}
Ok(())
}
type OpcodeInserter = fn(
fgen: &mut FunctionGenerator,
builder: &mut FunctionBuilder,
Opcode,
&'static [Type],
&'static [Type],
) -> Result<()>;
// TODO: Derive this from the `cranelift-meta` generator.
const OPCODE_SIGNATURES: &'static [(
Opcode,
&'static [Type], // Args
&'static [Type], // Rets
OpcodeInserter,
)] = &[
(Opcode::Nop, &[], &[], insert_opcode_arity_0),
// Iadd
(Opcode::Iadd, &[I8, I8], &[I8], insert_opcode_arity_2),
(Opcode::Iadd, &[I16, I16], &[I16], insert_opcode_arity_2),
(Opcode::Iadd, &[I32, I32], &[I32], insert_opcode_arity_2),
(Opcode::Iadd, &[I64, I64], &[I64], insert_opcode_arity_2),
// Isub
(Opcode::Isub, &[I8, I8], &[I8], insert_opcode_arity_2),
(Opcode::Isub, &[I16, I16], &[I16], insert_opcode_arity_2),
(Opcode::Isub, &[I32, I32], &[I32], insert_opcode_arity_2),
(Opcode::Isub, &[I64, I64], &[I64], insert_opcode_arity_2),
// Imul
(Opcode::Imul, &[I8, I8], &[I8], insert_opcode_arity_2),
(Opcode::Imul, &[I16, I16], &[I16], insert_opcode_arity_2),
(Opcode::Imul, &[I32, I32], &[I32], insert_opcode_arity_2),
(Opcode::Imul, &[I64, I64], &[I64], insert_opcode_arity_2),
// Udiv
(Opcode::Udiv, &[I8, I8], &[I8], insert_opcode_arity_2),
(Opcode::Udiv, &[I16, I16], &[I16], insert_opcode_arity_2),
(Opcode::Udiv, &[I32, I32], &[I32], insert_opcode_arity_2),
(Opcode::Udiv, &[I64, I64], &[I64], insert_opcode_arity_2),
// Sdiv
(Opcode::Sdiv, &[I8, I8], &[I8], insert_opcode_arity_2),
(Opcode::Sdiv, &[I16, I16], &[I16], insert_opcode_arity_2),
(Opcode::Sdiv, &[I32, I32], &[I32], insert_opcode_arity_2),
(Opcode::Sdiv, &[I64, I64], &[I64], insert_opcode_arity_2),
];
pub struct FunctionGenerator<'r, 'data>
where
'data: 'r,
{
u: &'r mut Unstructured<'data>,
config: &'r Config,
vars: Vec<(Type, Variable)>,
blocks: Vec<(Block, BlockSignature)>,
jump_tables: Vec<JumpTable>,
}
impl<'r, 'data> FunctionGenerator<'r, 'data>
where
'data: 'r,
{
pub fn new(u: &'r mut Unstructured<'data>, config: &'r Config) -> Self {
Self {
u,
config,
vars: vec![],
blocks: vec![],
jump_tables: vec![],
}
}
/// Generates a random value for config `param`
fn param(&mut self, param: &RangeInclusive<usize>) -> Result<usize> {
Ok(self.u.int_in_range(param.clone())?)
}
fn generate_callconv(&mut self) -> Result<CallConv> {
// TODO: Generate random CallConvs per target
Ok(CallConv::SystemV)
}
fn generate_intcc(&mut self) -> Result<IntCC> {
Ok(*self.u.choose(
&[
IntCC::Equal,
IntCC::NotEqual,
IntCC::SignedLessThan,
IntCC::SignedGreaterThanOrEqual,
IntCC::SignedGreaterThan,
IntCC::SignedLessThanOrEqual,
IntCC::UnsignedLessThan,
IntCC::UnsignedGreaterThanOrEqual,
IntCC::UnsignedGreaterThan,
IntCC::UnsignedLessThanOrEqual,
IntCC::Overflow,
IntCC::NotOverflow,
][..],
)?)
}
fn generate_type(&mut self) -> Result<Type> {
// TODO: It would be nice if we could get these directly from cranelift
let scalars = [
// IFLAGS, FFLAGS,
B1, // B8, B16, B32, B64, B128,
I8, I16, I32, I64,
// I128,
// F32, F64,
// R32, R64,
];
// TODO: vector types
let ty = self.u.choose(&scalars[..])?;
Ok(*ty)
}
fn generate_abi_param(&mut self) -> Result<AbiParam> {
// TODO: Generate more advanced abi params (structs/purposes/extensions/etc...)
let ty = self.generate_type()?;
Ok(AbiParam::new(ty))
}
fn generate_signature(&mut self) -> Result<Signature> {
let callconv = self.generate_callconv()?;
let mut sig = Signature::new(callconv);
for _ in 0..self.param(&self.config.signature_params)? {
sig.params.push(self.generate_abi_param()?);
}
for _ in 0..self.param(&self.config.signature_rets)? {
sig.returns.push(self.generate_abi_param()?);
}
Ok(sig)
}
/// Creates a new var
fn create_var(&mut self, builder: &mut FunctionBuilder, ty: Type) -> Result<Variable> {
let id = self.vars.len();
let var = Variable::new(id);
builder.declare_var(var, ty);
self.vars.push((ty, var));
Ok(var)
}
fn vars_of_type(&self, ty: Type) -> Vec<Variable> {
self.vars
.iter()
.filter(|(var_ty, _)| *var_ty == ty)
.map(|(_, v)| *v)
.collect()
}
/// Get a variable of type `ty` from the current function
fn get_variable_of_type(&mut self, ty: Type) -> Result<Variable> {
let opts = self.vars_of_type(ty);
let var = self.u.choose(&opts[..])?;
Ok(*var)
}
/// Generates an instruction(`iconst`/`fconst`/etc...) to introduce a constant value
fn generate_const(&mut self, builder: &mut FunctionBuilder, ty: Type) -> Result<Value> {
Ok(match ty {
ty if ty.is_int() => {
let imm64 = match ty {
I8 => self.u.arbitrary::<i8>()? as i64,
I16 => self.u.arbitrary::<i16>()? as i64,
I32 => self.u.arbitrary::<i32>()? as i64,
I64 => self.u.arbitrary::<i64>()?,
_ => unreachable!(),
};
builder.ins().iconst(ty, imm64)
}
ty if ty.is_bool() => builder.ins().bconst(B1, bool::arbitrary(self.u)?),
_ => unimplemented!(),
})
}
/// Chooses a random block which can be targeted by a jump / branch.
/// This means any block that is not the first block.
///
/// For convenience we also generate values that match the block's signature
fn generate_target_block(
&mut self,
builder: &mut FunctionBuilder,
) -> Result<(Block, Vec<Value>)> {
let block_targets = &self.blocks[1..];
let (block, signature) = self.u.choose(block_targets)?.clone();
let args = self.generate_values_for_signature(builder, signature.into_iter())?;
Ok((block, args))
}
/// Valid blocks for jump tables have to have no parameters in the signature, and must also
/// not be the first block.
fn generate_valid_jumptable_target_blocks(&mut self) -> Vec<Block> {
self.blocks[1..]
.iter()
.filter(|(_, sig)| sig.len() == 0)
.map(|(b, _)| *b)
.collect()
}
fn generate_values_for_signature<I: Iterator<Item = Type>>(
&mut self,
builder: &mut FunctionBuilder,
signature: I,
) -> Result<Vec<Value>> {
signature
.map(|ty| {
let var = self.get_variable_of_type(ty)?;
let val = builder.use_var(var);
Ok(val)
})
.collect()
}
fn generate_return(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let types: Vec<Type> = {
let rets = &builder.func.signature.returns;
rets.iter().map(|p| p.value_type).collect()
};
let vals = self.generate_values_for_signature(builder, types.into_iter())?;
builder.ins().return_(&vals[..]);
Ok(())
}
fn generate_jump(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let (block, args) = self.generate_target_block(builder)?;
builder.ins().jump(block, &args[..]);
Ok(())
}
/// Generates a br_table into a random block
fn generate_br_table(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let _type = *self.u.choose(&[I8, I16, I32, I64][..])?;
let var = self.get_variable_of_type(_type)?;
let val = builder.use_var(var);
let valid_blocks = self.generate_valid_jumptable_target_blocks();
let default_block = *self.u.choose(&valid_blocks[..])?;
let jt = *self.u.choose(&self.jump_tables[..])?;
builder.ins().br_table(val, default_block, jt);
Ok(())
}
/// Generates a brz/brnz into a random block
fn generate_br(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let (block, args) = self.generate_target_block(builder)?;
let condbr_types = [
I8, I16, I32, I64, // TODO: I128
B1,
];
let _type = *self.u.choose(&condbr_types[..])?;
let var = self.get_variable_of_type(_type)?;
let val = builder.use_var(var);
if bool::arbitrary(self.u)? {
builder.ins().brz(val, block, &args[..]);
} else {
builder.ins().brnz(val, block, &args[..]);
}
// After brz/brnz we must generate a jump
self.generate_jump(builder)?;
Ok(())
}
fn generate_bricmp(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let (block, args) = self.generate_target_block(builder)?;
let cond = self.generate_intcc()?;
let bricmp_types = [
I8, I16, I32, I64, // TODO: I128
];
let _type = *self.u.choose(&bricmp_types[..])?;
let lhs_var = self.get_variable_of_type(_type)?;
let lhs_val = builder.use_var(lhs_var);
let rhs_var = self.get_variable_of_type(_type)?;
let rhs_val = builder.use_var(rhs_var);
builder
.ins()
.br_icmp(cond, lhs_val, rhs_val, block, &args[..]);
// After bricmp's we must generate a jump
self.generate_jump(builder)?;
Ok(())
}
/// We always need to exit safely out of a block.
/// This either means a jump into another block or a return.
fn finalize_block(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let gen = self.u.choose(
&[
Self::generate_bricmp,
Self::generate_br,
Self::generate_br_table,
Self::generate_jump,
Self::generate_return,
][..],
)?;
gen(self, builder)
}
/// Fills the current block with random instructions
fn generate_instructions(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
for _ in 0..self.param(&self.config.instructions_per_block)? {
let (op, args, rets, inserter) = *self.u.choose(OPCODE_SIGNATURES)?;
inserter(self, builder, op, args, rets)?;
}
Ok(())
}
fn generate_jumptables(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let valid_blocks = self.generate_valid_jumptable_target_blocks();
for _ in 0..self.param(&self.config.jump_tables_per_function)? {
let mut jt_data = JumpTableData::new();
for _ in 0..self.param(&self.config.jump_table_entries)? {
let block = *self.u.choose(&valid_blocks[..])?;
jt_data.push_entry(block);
}
self.jump_tables.push(builder.create_jump_table(jt_data));
}
Ok(())
}
/// Creates a random amount of blocks in this function
fn generate_blocks(
&mut self,
builder: &mut FunctionBuilder,
sig: &Signature,
) -> Result<Vec<(Block, BlockSignature)>> {
let extra_block_count = self.param(&self.config.blocks_per_function)?;
// We must always have at least one block, so we generate the "extra" blocks and add 1 for
// the entry block.
let block_count = 1 + extra_block_count;
let blocks = (0..block_count)
.map(|i| {
let block = builder.create_block();
// The first block has to have the function signature, but for the rest of them we generate
// a random signature;
if i == 0 {
builder.append_block_params_for_function_params(block);
Ok((block, sig.params.iter().map(|a| a.value_type).collect()))
} else {
let sig = self.generate_block_signature()?;
sig.iter().for_each(|ty| {
builder.append_block_param(block, *ty);
});
Ok((block, sig))
}
})
.collect::<Result<Vec<_>>>()?;
Ok(blocks)
}
fn generate_block_signature(&mut self) -> Result<BlockSignature> {
let param_count = self.param(&self.config.block_signature_params)?;
let mut params = Vec::with_capacity(param_count);
for _ in 0..param_count {
params.push(self.generate_type()?);
}
Ok(params)
}
fn build_variable_pool(&mut self, builder: &mut FunctionBuilder) -> Result<()> {
let block = builder.current_block().unwrap();
let func_params = builder.func.signature.params.clone();
// Define variables for the function signature
for (i, param) in func_params.iter().enumerate() {
let var = self.create_var(builder, param.value_type)?;
let block_param = builder.block_params(block)[i];
builder.def_var(var, block_param);
}
// Create a pool of vars that are going to be used in this function
for _ in 0..self.param(&self.config.vars_per_function)? {
let ty = self.generate_type()?;
let var = self.create_var(builder, ty)?;
let value = self.generate_const(builder, ty)?;
builder.def_var(var, value);
}
Ok(())
}
/// We generate a function in multiple stages:
///
/// * First we generate a random number of empty blocks
/// * Then we generate a random pool of variables to be used throughout the function
/// * We then visit each block and generate random instructions
///
/// Because we generate all blocks and variables up front we already know everything that
/// we need when generating instructions (i.e. jump targets / variables)
pub fn generate(mut self) -> Result<Function> {
let sig = self.generate_signature()?;
let mut fn_builder_ctx = FunctionBuilderContext::new();
let mut func = Function::with_name_signature(ExternalName::user(0, 0), sig.clone());
let mut builder = FunctionBuilder::new(&mut func, &mut fn_builder_ctx);
self.blocks = self.generate_blocks(&mut builder, &sig)?;
// Function preamble
self.generate_jumptables(&mut builder)?;
// Main instruction generation loop
for (i, (block, block_sig)) in self.blocks.clone().iter().enumerate() {
let is_block0 = i == 0;
builder.switch_to_block(*block);
if is_block0 {
// The first block is special because we must create variables both for the
// block signature and for the variable pool. Additionally, we must also define
// initial values for all variables that are not the function signature.
self.build_variable_pool(&mut builder)?;
} else {
// Define variables for the block params
for (i, ty) in block_sig.iter().enumerate() {
let var = self.get_variable_of_type(*ty)?;
let block_param = builder.block_params(*block)[i];
builder.def_var(var, block_param);
}
}
// Generate block instructions
self.generate_instructions(&mut builder)?;
self.finalize_block(&mut builder)?;
}
builder.seal_all_blocks();
builder.finalize();
Ok(func)
}
}
| 34.515936 | 107 | 0.568592 |
9118874bc8659437349d2bcb1bd131e71a5eac6f | 4,163 | // Copyright 2020, The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
mod v5;
mod v6;
use log::*;
use tari_storage::lmdb_store::{LMDBDatabase, LMDBError};
const LOG_TARGET: &str = "comms::peer_manager::migrations";
pub(super) const MIGRATION_VERSION_KEY: u64 = u64::MAX;
pub fn migrate(database: &LMDBDatabase) -> Result<(), LMDBError> {
// Add migrations here in version order
let migrations = vec![v5::Migration.boxed(), v6::Migration.boxed()];
if migrations.is_empty() {
return Ok(());
}
let latest_version = migrations.last().unwrap().get_version();
// If the database is empty there is nothing to migrate, so set it to the latest version
if database.len()? == 0 {
debug!(target: LOG_TARGET, "New database does not require migration");
if let Err(err) = database.insert(&MIGRATION_VERSION_KEY, &latest_version) {
error!(
target: LOG_TARGET,
"Failed to update migration counter: {}. ** Database may be corrupt **", err
);
}
return Ok(());
}
let mut version = database.get::<_, u32>(&MIGRATION_VERSION_KEY)?.unwrap_or(0);
if version == latest_version {
debug!(
target: LOG_TARGET,
"Database at version {}. No migration required.", latest_version
);
return Ok(());
}
debug!(
target: LOG_TARGET,
"Migrating database from version {} to {}", version, latest_version
);
loop {
version += 1;
let migration = migrations.iter().find(|m| m.get_version() == version);
match migration {
Some(migration) => {
migration.migrate(database)?;
if let Err(err) = database.insert(&MIGRATION_VERSION_KEY, &version) {
error!(
target: LOG_TARGET,
"Failed to update migration counter: {}. ** Database may be corrupt **", err
);
}
debug!(target: LOG_TARGET, "Migration {} complete", version);
},
None => {
error!(
target: LOG_TARGET,
"Migration {} not found. Unable to migrate peer db", version
);
return Ok(());
},
}
}
}
trait Migration<T> {
type Error;
fn get_version(&self) -> u32;
fn migrate(&self, db: &T) -> Result<(), Self::Error>;
}
trait MigrationExt<T>: Migration<T> {
fn boxed(self) -> Box<dyn Migration<T, Error = Self::Error>>
where Self: Sized + 'static {
Box::new(self)
}
}
impl<T, U> MigrationExt<T> for U where U: Migration<T> {}
| 37.845455 | 118 | 0.633437 |
9030da1e44b91ab3b82f6ea8debbda6f463cb1fd | 51 | pub use self::foreach::Foreach;
pub mod foreach;
| 10.2 | 31 | 0.72549 |
ac828dd88c77fe9ed2945a35e60efc1eb74464ef | 34 | pub mod controller;
pub mod view;
| 11.333333 | 19 | 0.764706 |
390c23fe032f1be3f787a8677812708c491cfd87 | 16,343 | // This file is part of Substrate.
// Copyright (C) 2020-2021 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! A manual sealing engine: the engine listens for rpc calls to seal blocks and create forks.
//! This is suitable for a testing environment.
use futures::prelude::*;
use prometheus_endpoint::Registry;
use sc_client_api::backend::{Backend as ClientBackend, Finalizer};
use sc_consensus::{
block_import::{BlockImport, BlockImportParams, ForkChoiceStrategy},
import_queue::{BasicQueue, BoxBlockImport, Verifier},
};
use sp_blockchain::HeaderBackend;
use sp_consensus::{CacheKeyId, Environment, Proposer, SelectChain};
use sp_inherents::CreateInherentDataProviders;
use sp_runtime::{traits::Block as BlockT, ConsensusEngineId};
use std::{marker::PhantomData, sync::Arc};
mod error;
mod finalize_block;
mod seal_block;
pub mod consensus;
pub mod rpc;
pub use self::{
consensus::ConsensusDataProvider,
error::Error,
finalize_block::{finalize_block, FinalizeBlockParams},
rpc::{CreatedBlock, EngineCommand},
seal_block::{seal_block, SealBlockParams, MAX_PROPOSAL_DURATION},
};
use sc_transaction_pool_api::TransactionPool;
use sp_api::{ProvideRuntimeApi, TransactionFor};
/// The `ConsensusEngineId` of Manual Seal.
pub const MANUAL_SEAL_ENGINE_ID: ConsensusEngineId = [b'm', b'a', b'n', b'l'];
/// The verifier for the manual seal engine; instantly finalizes.
struct ManualSealVerifier;
#[async_trait::async_trait]
impl<B: BlockT> Verifier<B> for ManualSealVerifier {
async fn verify(
&mut self,
mut block: BlockImportParams<B, ()>,
) -> Result<(BlockImportParams<B, ()>, Option<Vec<(CacheKeyId, Vec<u8>)>>), String> {
block.finalized = false;
block.fork_choice = Some(ForkChoiceStrategy::LongestChain);
Ok((block, None))
}
}
/// Instantiate the import queue for the manual seal consensus engine.
pub fn import_queue<Block, Transaction>(
block_import: BoxBlockImport<Block, Transaction>,
spawner: &impl sp_core::traits::SpawnEssentialNamed,
registry: Option<&Registry>,
) -> BasicQueue<Block, Transaction>
where
Block: BlockT,
Transaction: Send + Sync + 'static,
{
BasicQueue::new(ManualSealVerifier, block_import, None, spawner, registry)
}
/// Params required to start the instant sealing authorship task.
pub struct ManualSealParams<B: BlockT, BI, E, C: ProvideRuntimeApi<B>, TP, SC, CS, CIDP> {
/// Block import instance for well. importing blocks.
pub block_import: BI,
/// The environment we are producing blocks for.
pub env: E,
/// Client instance
pub client: Arc<C>,
/// Shared reference to the transaction pool.
pub pool: Arc<TP>,
/// Stream<Item = EngineCommands>, Basically the receiving end of a channel for sending
/// commands to the authorship task.
pub commands_stream: CS,
/// SelectChain strategy.
pub select_chain: SC,
/// Digest provider for inclusion in blocks.
pub consensus_data_provider:
Option<Box<dyn ConsensusDataProvider<B, Transaction = TransactionFor<C, B>>>>,
/// Something that can create the inherent data providers.
pub create_inherent_data_providers: CIDP,
}
/// Params required to start the manual sealing authorship task.
pub struct InstantSealParams<B: BlockT, BI, E, C: ProvideRuntimeApi<B>, TP, SC, CIDP> {
/// Block import instance for well. importing blocks.
pub block_import: BI,
/// The environment we are producing blocks for.
pub env: E,
/// Client instance
pub client: Arc<C>,
/// Shared reference to the transaction pool.
pub pool: Arc<TP>,
/// SelectChain strategy.
pub select_chain: SC,
/// Digest provider for inclusion in blocks.
pub consensus_data_provider:
Option<Box<dyn ConsensusDataProvider<B, Transaction = TransactionFor<C, B>>>>,
/// Something that can create the inherent data providers.
pub create_inherent_data_providers: CIDP,
}
/// Creates the background authorship task for the manual seal engine.
pub async fn run_manual_seal<B, BI, CB, E, C, TP, SC, CS, CIDP>(
ManualSealParams {
mut block_import,
mut env,
client,
pool,
mut commands_stream,
select_chain,
consensus_data_provider,
create_inherent_data_providers,
}: ManualSealParams<B, BI, E, C, TP, SC, CS, CIDP>,
) where
B: BlockT + 'static,
BI: BlockImport<B, Error = sp_consensus::Error, Transaction = sp_api::TransactionFor<C, B>>
+ Send
+ Sync
+ 'static,
C: HeaderBackend<B> + Finalizer<B, CB> + ProvideRuntimeApi<B> + 'static,
CB: ClientBackend<B> + 'static,
E: Environment<B> + 'static,
E::Proposer: Proposer<B, Transaction = TransactionFor<C, B>>,
CS: Stream<Item = EngineCommand<<B as BlockT>::Hash>> + Unpin + 'static,
SC: SelectChain<B> + 'static,
TransactionFor<C, B>: 'static,
TP: TransactionPool<Block = B>,
CIDP: CreateInherentDataProviders<B, ()>,
{
while let Some(command) = commands_stream.next().await {
match command {
EngineCommand::SealNewBlock { create_empty, finalize, parent_hash, sender } => {
seal_block(SealBlockParams {
sender,
parent_hash,
finalize,
create_empty,
env: &mut env,
select_chain: &select_chain,
block_import: &mut block_import,
consensus_data_provider: consensus_data_provider.as_ref().map(|p| &**p),
pool: pool.clone(),
client: client.clone(),
create_inherent_data_providers: &create_inherent_data_providers,
})
.await;
},
EngineCommand::FinalizeBlock { hash, sender, justification } => {
let justification = justification.map(|j| (MANUAL_SEAL_ENGINE_ID, j));
finalize_block(FinalizeBlockParams {
hash,
sender,
justification,
finalizer: client.clone(),
_phantom: PhantomData,
})
.await
},
}
}
}
/// runs the background authorship task for the instant seal engine.
/// instant-seal creates a new block for every transaction imported into
/// the transaction pool.
pub async fn run_instant_seal<B, BI, CB, E, C, TP, SC, CIDP>(
InstantSealParams {
block_import,
env,
client,
pool,
select_chain,
consensus_data_provider,
create_inherent_data_providers,
}: InstantSealParams<B, BI, E, C, TP, SC, CIDP>,
) where
B: BlockT + 'static,
BI: BlockImport<B, Error = sp_consensus::Error, Transaction = sp_api::TransactionFor<C, B>>
+ Send
+ Sync
+ 'static,
C: HeaderBackend<B> + Finalizer<B, CB> + ProvideRuntimeApi<B> + 'static,
CB: ClientBackend<B> + 'static,
E: Environment<B> + 'static,
E::Proposer: Proposer<B, Transaction = TransactionFor<C, B>>,
SC: SelectChain<B> + 'static,
TransactionFor<C, B>: 'static,
TP: TransactionPool<Block = B>,
CIDP: CreateInherentDataProviders<B, ()>,
{
// instant-seal creates blocks as soon as transactions are imported
// into the transaction pool.
let commands_stream = pool.import_notification_stream().map(|_| EngineCommand::SealNewBlock {
create_empty: false,
finalize: false,
parent_hash: None,
sender: None,
});
run_manual_seal(ManualSealParams {
block_import,
env,
client,
pool,
commands_stream,
select_chain,
consensus_data_provider,
create_inherent_data_providers,
})
.await
}
#[cfg(test)]
mod tests {
use super::*;
use sc_basic_authorship::ProposerFactory;
use sc_client_api::BlockBackend;
use sc_consensus::ImportedAux;
use sc_transaction_pool::{BasicPool, Options, RevalidationType};
use sc_transaction_pool_api::{MaintainedTransactionPool, TransactionPool, TransactionSource};
use sp_runtime::generic::BlockId;
use substrate_test_runtime_client::{
AccountKeyring::*, DefaultTestClientBuilderExt, TestClientBuilder, TestClientBuilderExt,
};
use substrate_test_runtime_transaction_pool::{uxt, TestApi};
fn api() -> Arc<TestApi> {
Arc::new(TestApi::empty())
}
const SOURCE: TransactionSource = TransactionSource::External;
#[tokio::test]
async fn instant_seal() {
let builder = TestClientBuilder::new();
let (client, select_chain) = builder.build_with_longest_chain();
let client = Arc::new(client);
let spawner = sp_core::testing::TaskExecutor::new();
let pool = Arc::new(BasicPool::with_revalidation_type(
Options::default(),
true.into(),
api(),
None,
RevalidationType::Full,
spawner.clone(),
0,
));
let env = ProposerFactory::new(spawner.clone(), client.clone(), pool.clone(), None, None);
// this test checks that blocks are created as soon as transactions are imported into the
// pool.
let (sender, receiver) = futures::channel::oneshot::channel();
let mut sender = Arc::new(Some(sender));
let commands_stream =
pool.pool().validated_pool().import_notification_stream().map(move |_| {
// we're only going to submit one tx so this fn will only be called once.
let mut_sender = Arc::get_mut(&mut sender).unwrap();
let sender = std::mem::take(mut_sender);
EngineCommand::SealNewBlock {
create_empty: false,
finalize: true,
parent_hash: None,
sender,
}
});
let future = run_manual_seal(ManualSealParams {
block_import: client.clone(),
env,
client: client.clone(),
pool: pool.clone(),
commands_stream,
select_chain,
create_inherent_data_providers: |_, _| async { Ok(()) },
consensus_data_provider: None,
});
std::thread::spawn(|| {
let rt = tokio::runtime::Runtime::new().unwrap();
// spawn the background authorship task
rt.block_on(future);
});
// submit a transaction to pool.
let result = pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Alice, 0)).await;
// assert that it was successfully imported
assert!(result.is_ok());
// assert that the background task returns ok
let created_block = receiver.await.unwrap().unwrap();
assert_eq!(
created_block,
CreatedBlock {
hash: created_block.hash.clone(),
aux: ImportedAux {
header_only: false,
clear_justification_requests: false,
needs_justification: false,
bad_justification: false,
is_new_best: true,
}
}
);
// assert that there's a new block in the db.
assert!(client.header(&BlockId::Number(1)).unwrap().is_some())
}
#[tokio::test]
async fn manual_seal_and_finalization() {
let builder = TestClientBuilder::new();
let (client, select_chain) = builder.build_with_longest_chain();
let client = Arc::new(client);
let spawner = sp_core::testing::TaskExecutor::new();
let pool = Arc::new(BasicPool::with_revalidation_type(
Options::default(),
true.into(),
api(),
None,
RevalidationType::Full,
spawner.clone(),
0,
));
let env = ProposerFactory::new(spawner.clone(), client.clone(), pool.clone(), None, None);
// this test checks that blocks are created as soon as an engine command is sent over the
// stream.
let (mut sink, commands_stream) = futures::channel::mpsc::channel(1024);
let future = run_manual_seal(ManualSealParams {
block_import: client.clone(),
env,
client: client.clone(),
pool: pool.clone(),
commands_stream,
select_chain,
consensus_data_provider: None,
create_inherent_data_providers: |_, _| async { Ok(()) },
});
std::thread::spawn(|| {
let rt = tokio::runtime::Runtime::new().unwrap();
// spawn the background authorship task
rt.block_on(future);
});
// submit a transaction to pool.
let result = pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Alice, 0)).await;
// assert that it was successfully imported
assert!(result.is_ok());
let (tx, rx) = futures::channel::oneshot::channel();
sink.send(EngineCommand::SealNewBlock {
parent_hash: None,
sender: Some(tx),
create_empty: false,
finalize: false,
})
.await
.unwrap();
let created_block = rx.await.unwrap().unwrap();
// assert that the background task returns ok
assert_eq!(
created_block,
CreatedBlock {
hash: created_block.hash.clone(),
aux: ImportedAux {
header_only: false,
clear_justification_requests: false,
needs_justification: false,
bad_justification: false,
is_new_best: true,
}
}
);
// assert that there's a new block in the db.
let header = client.header(&BlockId::Number(1)).unwrap().unwrap();
let (tx, rx) = futures::channel::oneshot::channel();
sink.send(EngineCommand::FinalizeBlock {
sender: Some(tx),
hash: header.hash(),
justification: None,
})
.await
.unwrap();
// assert that the background task returns ok
assert_eq!(rx.await.unwrap().unwrap(), ());
}
#[tokio::test]
async fn manual_seal_fork_blocks() {
let builder = TestClientBuilder::new();
let (client, select_chain) = builder.build_with_longest_chain();
let client = Arc::new(client);
let pool_api = api();
let spawner = sp_core::testing::TaskExecutor::new();
let pool = Arc::new(BasicPool::with_revalidation_type(
Options::default(),
true.into(),
pool_api.clone(),
None,
RevalidationType::Full,
spawner.clone(),
0,
));
let env = ProposerFactory::new(spawner.clone(), client.clone(), pool.clone(), None, None);
// this test checks that blocks are created as soon as an engine command is sent over the
// stream.
let (mut sink, commands_stream) = futures::channel::mpsc::channel(1024);
let future = run_manual_seal(ManualSealParams {
block_import: client.clone(),
env,
client: client.clone(),
pool: pool.clone(),
commands_stream,
select_chain,
consensus_data_provider: None,
create_inherent_data_providers: |_, _| async { Ok(()) },
});
std::thread::spawn(|| {
let rt = tokio::runtime::Runtime::new().unwrap();
// spawn the background authorship task
rt.block_on(future);
});
// submit a transaction to pool.
let result = pool.submit_one(&BlockId::Number(0), SOURCE, uxt(Alice, 0)).await;
// assert that it was successfully imported
assert!(result.is_ok());
let (tx, rx) = futures::channel::oneshot::channel();
sink.send(EngineCommand::SealNewBlock {
parent_hash: None,
sender: Some(tx),
create_empty: false,
finalize: false,
})
.await
.unwrap();
let created_block = rx.await.unwrap().unwrap();
pool_api.increment_nonce(Alice.into());
// assert that the background task returns ok
assert_eq!(
created_block,
CreatedBlock {
hash: created_block.hash.clone(),
aux: ImportedAux {
header_only: false,
clear_justification_requests: false,
needs_justification: false,
bad_justification: false,
is_new_best: true
}
}
);
let block = client.block(&BlockId::Number(1)).unwrap().unwrap().block;
pool_api.add_block(block, true);
assert!(pool.submit_one(&BlockId::Number(1), SOURCE, uxt(Alice, 1)).await.is_ok());
let header = client.header(&BlockId::Number(1)).expect("db error").expect("imported above");
pool.maintain(sc_transaction_pool_api::ChainEvent::NewBestBlock {
hash: header.hash(),
tree_route: None,
})
.await;
let (tx1, rx1) = futures::channel::oneshot::channel();
assert!(sink
.send(EngineCommand::SealNewBlock {
parent_hash: Some(created_block.hash),
sender: Some(tx1),
create_empty: false,
finalize: false,
})
.await
.is_ok());
assert_matches::assert_matches!(rx1.await.expect("should be no error receiving"), Ok(_));
let block = client.block(&BlockId::Number(2)).unwrap().unwrap().block;
pool_api.add_block(block, true);
pool_api.increment_nonce(Alice.into());
assert!(pool.submit_one(&BlockId::Number(1), SOURCE, uxt(Bob, 0)).await.is_ok());
let (tx2, rx2) = futures::channel::oneshot::channel();
assert!(sink
.send(EngineCommand::SealNewBlock {
parent_hash: Some(created_block.hash),
sender: Some(tx2),
create_empty: false,
finalize: false,
})
.await
.is_ok());
let imported = rx2.await.unwrap().unwrap();
// assert that fork block is in the db
assert!(client.header(&BlockId::Hash(imported.hash)).unwrap().is_some())
}
}
| 31.248566 | 94 | 0.704216 |
9c2624210a95ed897b800cd8190ec061803d7ba3 | 2,509 | //! An example of using blocking funcion annotation.
//!
//! This example will create 8 "heavy computation" blocking futures and 8
//! non-blocking futures with 4 threads core threads in runtime.
//! Each non-blocking future will print it's id and return immideatly.
//! Each blocking future will print it's id on start, sleep for 1000 ms, print
//! it's id and return.
//!
//! Note how non-blocking threads are executed before blocking threads finish
//! their task.
#![deny(warnings, rust_2018_idioms)]
use std::thread;
use std::time::Duration;
use tokio;
use tokio::prelude::*;
use tokio::runtime::Builder;
use tokio_threadpool::blocking;
/// This future blocks it's poll method for 1000 ms.
struct BlockingFuture {
value: i32,
}
impl Future for BlockingFuture {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
println!("Blocking begin: {}!", self.value);
// Try replacing this part with commnted code
blocking(|| {
println!("Blocking part annotated: {}!", self.value);
thread::sleep(Duration::from_millis(1000));
println!("Blocking done annotated: {}!", self.value);
})
.map_err(|err| panic!("Error in blocing block: {:?}", err))
// println!("Blocking part annotated: {}!", self.value);
// thread::sleep(Duration::from_millis(1000));
// println!("Blocking done annotated: {}!", self.value);
// Ok(Async::Ready(()))
}
}
/// This future returns immideatly.
struct NonBlockingFuture {
value: i32,
}
impl Future for NonBlockingFuture {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
println!("Non-blocking done: {}!", self.value);
Ok(Async::Ready(()))
}
}
/// This future spawns child futures.
struct SpawningFuture;
impl Future for SpawningFuture {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
for i in 0..8 {
let blocking_future = BlockingFuture { value: i };
tokio::spawn(blocking_future);
}
for i in 0..8 {
let non_blocking_future = NonBlockingFuture { value: i };
tokio::spawn(non_blocking_future);
}
Ok(Async::Ready(()))
}
}
fn main() {
let spawning_future = SpawningFuture;
let runtime = Builder::new().core_threads(4).build().unwrap();
runtime.block_on_all(spawning_future).unwrap();
}
| 28.511364 | 78 | 0.618972 |
f853ef1b3226948eac27fc9e2d64f1b0e6fc2b2c | 949 | use std::error::Error;
use std::io::{self, BufRead, BufReader};
fn fuel(mass: u32) -> u32 {
/* Fuel required to launch a given module is based on its mass.
Specifically, to find the fuel required for a module, take its mass,
divide by three, round down, and subtract 2. */
(mass / 3).saturating_sub(2)
}
fn fuel_with_fuel_fuel(mass: u32) -> u32 {
let fuel_for_mass = fuel(mass);
let mut last_fuel_chunk = fuel_for_mass;
let mut fuel_for_fuel = 0u32;
while last_fuel_chunk != 0 {
last_fuel_chunk = fuel(last_fuel_chunk);
fuel_for_fuel += last_fuel_chunk;
}
fuel_for_mass + fuel_for_fuel
}
fn main() -> Result<(), Box<dyn Error>> {
let buffered = BufReader::new(io::stdin());
let mut sum = 0u64;
for line in buffered.lines() {
let mass = line?.trim().parse::<u32>()?;
sum += fuel_with_fuel_fuel(mass) as u64;
}
println!("{}", sum);
Ok(())
}
| 25.648649 | 75 | 0.622761 |
f906d39443d143a77be02d537f372dd81a6f511c | 604 | use cursive::theme::BaseColor::*;
use cursive::theme::Color::*;
use cursive::theme::PaletteColor::*;
use cursive::views::{BoxView, Canvas};
use cursive::Cursive;
pub fn run() {
let mut siv = Cursive::default();
siv.add_global_callback('q', |s| s.quit());
let mut theme = siv.current_theme().clone();
theme.shadow = false;
theme.palette[Background] = Dark(Black);
siv.set_theme(theme);
siv.add_layer(BoxView::with_fixed_size(
(10, 20),
Canvas::new(()).with_draw(|_, printer| {
printer.print((1, 1), "hoge");
}),
));
siv.run();
}
| 24.16 | 48 | 0.596026 |
d978c86261bede0b770c5d820ddce0369ea99bc3 | 6,559 | // Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Helper module for multisignature transactions.
use byteorder::{ByteOrder, LittleEndian, WriteBytesExt};
use exonum::{
crypto::{self, Hash, PublicKey},
merkledb::{
access::{Access, RawAccessMut},
BinaryKey, BinaryValue, Error as MerkledbError, ObjectHash, ProofMapIndex,
},
};
use exonum_derive::*;
use std::{
borrow::Cow,
collections::BTreeSet,
io::{Cursor, Write},
mem,
};
/// Wrapper over a `ProofMapIndex` representing a set of values with 0 or more
/// votes for every value.
///
/// Votes are represented as public keys of authors and no verification for
/// ownership is performed within this index.
#[derive(Debug, FromAccess)]
#[from_access(transparent)]
pub struct MultisigIndex<T: Access, V>
where
V: BinaryKey + ObjectHash,
{
index: ProofMapIndex<T::Base, V, BinarySet<PublicKey>>,
}
impl<T, V> MultisigIndex<T, V>
where
T: Access,
V: BinaryKey + ObjectHash,
{
pub fn confirmed_by(&self, id: &V, author: &PublicKey) -> bool {
self.index
.get(id)
.and_then(|set| {
if set.0.contains(author) {
Some(())
} else {
None
}
})
.is_some()
}
pub fn confirmations(&self, id: &V) -> usize {
self.index.get(id).map_or(0, |confirms| confirms.0.len())
}
}
impl<T, V> MultisigIndex<T, V>
where
T: Access,
T::Base: RawAccessMut,
V: BinaryKey + ObjectHash,
{
pub fn confirm(&mut self, id: &V, author: PublicKey) -> usize {
let mut confirmations = self.index.get(id).unwrap_or_default();
confirmations.0.insert(author);
let len = confirmations.0.len();
self.index.put(id, confirmations);
len
}
/// Updates the stored confirmations to be an intersection of the set
/// with current confirmations, and set of the actual validator keys.
///
/// This method is intended to be called before comparing the amount of
/// confirmations and amount of validators, so confirmations of nodes which
/// are not validators anymore won't be taken into account.
///
/// Returns the amount of confirmations in the updated set.
fn intersect(&mut self, id: &V, validator_keys: &BTreeSet<PublicKey>) -> usize {
let mut confirmations = self.index.get(id).unwrap_or_default();
confirmations.0 = confirmations
.0
.intersection(validator_keys)
.copied()
.collect();
let confirmations_amount = confirmations.0.len();
self.index.put(id, confirmations);
confirmations_amount
}
/// Calculates the intersection of current confirmations and actual list of
/// validators.
///
/// Returns `true` if all validators confirmed the item, and `false` otherwise.
///
/// This method updates the list of confirmation, leaving confirmations only from
/// the actual validators.
pub fn intersect_with_validators(
&mut self,
id: &V,
validator_keys: impl IntoIterator<Item = PublicKey>,
) -> bool {
let validator_keys: BTreeSet<PublicKey> = validator_keys.into_iter().collect();
let validators_amount = validator_keys.len();
self.intersect(id, &validator_keys) == validators_amount
}
}
impl<T, V> ObjectHash for MultisigIndex<T, V>
where
T: Access,
V: BinaryKey + ObjectHash,
{
fn object_hash(&self) -> Hash {
self.index.object_hash()
}
}
/// A set of binary values.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct BinarySet<T: Ord>(pub BTreeSet<T>);
impl<T: Ord> BinarySet<T> {
pub fn new() -> Self {
Self(BTreeSet::default())
}
}
impl<T: Ord> Default for BinarySet<T> {
fn default() -> Self {
Self::new()
}
}
impl<T: Ord + BinaryValue> BinaryValue for BinarySet<T> {
fn to_bytes(&self) -> Vec<u8> {
let mut buf = Cursor::new(Vec::new());
for value in &self.0 {
let bytes = value.to_bytes();
buf.write_u64::<LittleEndian>(bytes.len() as u64).unwrap();
buf.write_all(&bytes).unwrap();
}
buf.into_inner()
}
fn from_bytes(bytes: Cow<'_, [u8]>) -> anyhow::Result<Self> {
let mut values = BTreeSet::new();
// Read the sequence of the (byte size, value bytes) pairs and deserialize them.
let mut reader = bytes.as_ref();
while !reader.is_empty() {
// Verify that buffer size is enough and read the bytes length of the value.
if reader.len() < mem::size_of::<u64>() {
return Err(MerkledbError::new("Insufficient buffer size").into());
}
let bytes_len = LittleEndian::read_u64(reader) as usize;
reader = &reader[mem::size_of::<u64>()..];
// Verify remaining size and read the value.
if reader.len() < bytes_len {
return Err(MerkledbError::new("Insufficient buffer size").into());
}
let value = T::from_bytes(Cow::Borrowed(&reader[0..bytes_len]))?;
values.insert(value);
reader = &reader[bytes_len..];
}
Ok(Self(values))
}
}
impl<T: Ord + BinaryValue> ObjectHash for BinarySet<T> {
fn object_hash(&self) -> Hash {
crypto::hash(&self.to_bytes())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_multisig_values_binary_value() {
let mut set = BinarySet::default();
let data = vec![
b"abacaba1224634abcfdfdfca353".to_vec(),
b"abacaba1224634abcfdfdfca353ee2224774".to_vec(),
];
set.0.insert(data[1].clone());
set.0.insert(data[0].clone());
assert_eq!(set.0.len(), 2);
let bytes = set.clone().into_bytes();
let set2 = BinarySet::from_bytes(bytes.into()).unwrap();
assert_eq!(set, set2);
}
}
| 30.793427 | 88 | 0.610002 |
e2a977bf651afa2ae50bac1963c2f12fe0099c70 | 4,244 | use crate::crawler::AtCoderFetcher;
use anyhow::Result;
use log::info;
use sql_client::simple_client::SimpleClient;
use sql_client::submission_client::SubmissionClient;
use std::{thread, time};
pub struct RecentCrawler<C, F> {
db: C,
fetcher: F,
}
impl<C, F> RecentCrawler<C, F>
where
C: SubmissionClient + SimpleClient + Sync,
F: AtCoderFetcher,
{
pub fn new(db: C, fetcher: F) -> Self {
Self { db, fetcher }
}
pub async fn crawl(&self) -> Result<()> {
info!("Started");
let contests = self.db.load_contests().await?;
for contest in contests.into_iter() {
for page in 1.. {
info!("Crawling {}-{} ...", contest.id, page);
let (submissions, max_page) =
self.fetcher.fetch_submissions(&contest.id, page).await;
if submissions.is_empty() {
info!("There is no submission on {}-{}", contest.id, page);
break;
}
let min_id = submissions.iter().map(|s| s.id).min().unwrap();
let exists = self.db.count_stored_submissions(&[min_id]).await? != 0;
self.db.update_submissions(&submissions).await?;
thread::sleep(time::Duration::from_millis(200));
if exists {
info!("Finished crawling {}", contest.id);
break;
}
if page == max_page {
info!(
"Finished crawling {} since it's last page: {}",
contest.id, page
);
break;
}
}
}
info!("Finished");
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::crawler::utils::MockFetcher;
use async_std::task::block_on;
use async_trait::async_trait;
use sql_client::models::{Contest, Problem, Submission};
use sql_client::submission_client::SubmissionRequest;
use std::ops::Range;
#[test]
fn test_recent_crawler() {
let fetcher = MockFetcher(|contest_id: &str, page: u32| {
assert_eq!(contest_id, "contest");
assert_eq!(page, 1);
vec![
Submission {
id: 0,
..Default::default()
},
Submission {
id: 1,
..Default::default()
},
]
});
struct MockDB;
#[async_trait]
impl SubmissionClient for MockDB {
async fn get_submissions<'a>(
&self,
request: SubmissionRequest<'a>,
) -> Result<Vec<Submission>> {
match request {
SubmissionRequest::ByIds { ids } => {
assert_eq!(ids, &[0]);
Ok(vec![Submission {
..Default::default()
}])
}
_ => unimplemented!(),
}
}
async fn update_submissions(&self, submissions: &[Submission]) -> Result<usize> {
assert_eq!(submissions.len(), 2);
Ok(2)
}
async fn get_user_submission_count(&self, _: &str, _: Range<i64>) -> Result<usize> {
unimplemented!()
}
}
#[async_trait]
impl SimpleClient for MockDB {
async fn insert_contests(&self, _: &[Contest]) -> Result<usize> {
unimplemented!()
}
async fn insert_problems(&self, _: &[Problem]) -> Result<usize> {
unimplemented!()
}
async fn load_problems(&self) -> Result<Vec<Problem>> {
unimplemented!()
}
async fn load_contests(&self) -> Result<Vec<Contest>> {
Ok(vec![Contest {
id: "contest".to_string(),
..Default::default()
}])
}
}
let crawler = RecentCrawler::new(MockDB, fetcher);
assert!(block_on(crawler.crawl()).is_ok());
}
}
| 31.205882 | 96 | 0.462771 |
e48570bc33aa1680689125e9e78ba288f4337961 | 5,172 | //! # jsonschema
//!
//! A crate for performing fast JSON Schema validation. It is fast due to schema compilation into
//! a validation tree, which reduces runtime costs for working with schema parameters.
//!
//! Supports:
//! - JSON Schema drafts 4, 6, 7 (except some optional test cases);
//! - Loading remote documents via HTTP(S);
//!
//! ## Usage Examples:
//! A schema can be compiled with two main flavours:
//! * using default configurations
//! ```rust
//! # use jsonschema::{CompilationError, Draft, JSONSchema};
//! # use serde_json::json;
//! # fn foo() -> Result<(), CompilationError> {
//! # let schema = json!({"maxLength": 5});
//! let compiled_schema = JSONSchema::compile(&schema)?;
//! # Ok(())
//! # }
//! ```
//! * using custom configurations (such as define a Draft version)
//! ```rust
//! # use jsonschema::{CompilationError, Draft, JSONSchema};
//! # use serde_json::json;
//! # fn foo() -> Result<(), CompilationError> {
//! # let schema = json!({"maxLength": 5});
//! let compiled_schema = JSONSchema::options()
//! .with_draft(Draft::Draft7)
//! .compile(&schema)?;
//! # Ok(())
//! # }
//! ```
//!
//! ## Example (CLI tool to highlight print errors)
//! ```rust
//! use jsonschema::{CompilationError, Draft, JSONSchema};
//! use serde_json::json;
//!
//! fn main() -> Result<(), CompilationError> {
//! let schema = json!({"maxLength": 5});
//! let instance = json!("foo");
//! let compiled = JSONSchema::options()
//! .with_draft(Draft::Draft7)
//! .compile(&schema)?;
//! let result = compiled.validate(&instance);
//! if let Err(errors) = result {
//! for error in errors {
//! println!("Validation error: {}", error);
//! println!("Instance path: {:?}", error.instance_path);
//! }
//! }
//! Ok(())
//! }
//! ```
#![warn(
clippy::cast_possible_truncation,
clippy::doc_markdown,
clippy::explicit_iter_loop,
clippy::map_unwrap_or,
clippy::match_same_arms,
clippy::needless_borrow,
clippy::needless_pass_by_value,
clippy::print_stdout,
clippy::redundant_closure,
clippy::trivially_copy_pass_by_ref,
missing_debug_implementations,
missing_docs,
trivial_casts,
trivial_numeric_casts,
unused_extern_crates,
unused_import_braces,
unused_qualifications,
unreachable_pub,
variant_size_differences
)]
#![allow(clippy::unnecessary_wraps, clippy::upper_case_acronyms)]
#![cfg_attr(not(test), allow(clippy::integer_arithmetic, clippy::unwrap_used))]
mod compilation;
mod content_encoding;
mod content_media_type;
pub mod error;
mod keywords;
pub mod primitive_type;
mod resolver;
mod schemas;
mod validator;
pub use compilation::{options::CompilationOptions, JSONSchema};
pub use error::{CompilationError, ErrorIterator, ValidationError};
pub use schemas::Draft;
use serde_json::Value;
/// A shortcut for validating `instance` against `schema`. Draft version is detected automatically.
/// ```rust
/// use jsonschema::is_valid;
/// use serde_json::json;
///
/// let schema = json!({"maxLength": 5});
/// let instance = json!("foo");
/// assert!(is_valid(&schema, &instance));
/// ```
///
/// This function panics if an invalid schema is passed.
#[must_use]
#[inline]
pub fn is_valid(schema: &Value, instance: &Value) -> bool {
let compiled = JSONSchema::compile(schema).expect("Invalid schema");
compiled.is_valid(instance)
}
#[cfg(test)]
pub(crate) mod tests_util {
use super::JSONSchema;
use serde_json::Value;
pub(crate) fn is_not_valid(schema: &Value, instance: &Value) {
let compiled = JSONSchema::compile(schema).unwrap();
assert!(
!compiled.is_valid(instance),
"{} should not be valid (via is_valid)",
instance
);
assert!(
compiled.validate(instance).is_err(),
"{} should not be valid (via validate)",
instance
);
}
pub(crate) fn expect_errors(schema: &Value, instance: &Value, errors: &[&str]) {
assert_eq!(
JSONSchema::compile(schema)
.expect("Should be a valid schema")
.validate(instance)
.expect_err(format!("{} should not be valid", instance).as_str())
.into_iter()
.map(|e| e.to_string())
.collect::<Vec<String>>(),
errors
)
}
pub(crate) fn is_valid(schema: &Value, instance: &Value) {
let compiled = JSONSchema::compile(schema).unwrap();
assert!(
compiled.is_valid(instance),
"{} should be valid (via is_valid)",
instance
);
assert!(
compiled.validate(instance).is_ok(),
"{} should be valid (via validate)",
instance
);
}
}
#[cfg(test)]
mod tests {
use super::is_valid;
use serde_json::json;
#[test]
fn test_is_valid() {
let schema = json!({"minLength": 5});
let valid = json!("foobar");
let invalid = json!("foo");
assert!(is_valid(&schema, &valid));
assert!(!is_valid(&schema, &invalid));
}
}
| 30.069767 | 99 | 0.606535 |
0e7e1683af56cdadcc7e4f77a22343fdea520b4f | 5,752 | use std::path::PathBuf;
use std::sync::Arc;
use anyhow::{anyhow, Context, Result};
use http_types::Url;
use crate::config::{ConfigOptsBuild, ConfigOptsClean, ConfigOptsProxy, ConfigOptsServe, ConfigOptsWatch};
/// Runtime config for the build system.
#[derive(Clone, Debug)]
pub struct RtcBuild {
/// The index HTML file to drive the bundling process.
pub target: PathBuf,
/// The parent directory of the target index HTML file.
pub target_parent: PathBuf,
/// Build in release mode.
pub release: bool,
/// The public URL from which assets are to be served.
pub public_url: String,
/// The directory where final build artifacts are placed after a successful build.
pub final_dist: PathBuf,
/// The directory used to stage build artifacts during an active build.
pub staging_dist: PathBuf,
}
impl RtcBuild {
/// Construct a new instance.
pub(super) fn new(opts: ConfigOptsBuild) -> Result<Self> {
// Get the canonical path to the target HTML file.
let pre_target = opts.target.clone().unwrap_or_else(|| "index.html".into());
let target = pre_target
.canonicalize()
.with_context(|| format!("error getting canonical path to source HTML file {:?}", &pre_target))?;
// Get the target HTML's parent dir, falling back to OS specific root, as that is the only
// time where no parent could be determined.
let target_parent = target
.parent()
.map(|path| path.to_owned())
.unwrap_or_else(|| PathBuf::from(std::path::MAIN_SEPARATOR.to_string()));
// Ensure the final dist dir exists and that we have a canonical path to the dir. Normally
// we would want to avoid such an action at this layer, however to ensure that other layers
// have a reliable FS path to work with, we make an exception here.
let final_dist = opts.dist.unwrap_or_else(|| target_parent.join(super::DIST_DIR));
if !final_dist.exists() {
std::fs::create_dir(&final_dist).with_context(|| format!("error creating final dist directory {:?}", &final_dist))?;
}
let final_dist = final_dist.canonicalize().context("error taking canonical path to dist dir")?;
let staging_dist = final_dist.join(super::STAGE_DIR);
Ok(Self {
target,
target_parent,
release: opts.release,
staging_dist,
final_dist,
public_url: opts.public_url.unwrap_or_else(|| "/".into()),
})
}
}
/// Runtime config for the watch system.
#[derive(Clone, Debug)]
pub struct RtcWatch {
/// Runtime config for the build system.
pub build: Arc<RtcBuild>,
/// Paths to watch, defaults to the build target parent directory.
pub paths: Vec<PathBuf>,
/// Paths to ignore.
pub ignored_paths: Vec<PathBuf>,
}
impl RtcWatch {
pub(super) fn new(build_opts: ConfigOptsBuild, opts: ConfigOptsWatch) -> Result<Self> {
let build = Arc::new(RtcBuild::new(build_opts)?);
// Take the canonical path of each of the specified watch targets.
let mut paths = vec![];
for path in opts.watch.unwrap_or_default() {
let canon_path = path.canonicalize().map_err(|_| anyhow!("invalid watch path provided: {:?}", path))?;
paths.push(canon_path);
}
// If no watch paths were provied, then we default to the target HTML's parent dir.
if paths.is_empty() {
paths.push(build.target_parent.clone());
}
// Take the canonical path of each of the specified ignore targets.
let mut ignored_paths = match opts.ignore {
None => vec![],
Some(paths) => paths.into_iter().try_fold(vec![], |mut acc, path| -> Result<Vec<PathBuf>> {
let canon_path = path.canonicalize().map_err(|_| anyhow!("invalid ignore path provided: {:?}", path))?;
acc.push(canon_path);
Ok(acc)
})?,
};
// Ensure the final dist dir is always ignored.
ignored_paths.push(build.final_dist.clone());
Ok(Self { build, paths, ignored_paths })
}
}
/// Runtime config for the serve system.
#[derive(Clone, Debug)]
pub struct RtcServe {
/// Runtime config for the watch system.
pub watch: Arc<RtcWatch>,
/// The port to serve on.
pub port: u16,
/// Open a browser tab once the initial build is complete.
pub open: bool,
/// A URL to which requests will be proxied.
pub proxy_backend: Option<Url>,
/// The URI on which to accept requests which are to be rewritten and proxied to backend.
pub proxy_rewrite: Option<String>,
/// Any proxies configured to run along with the server.
pub proxies: Option<Vec<ConfigOptsProxy>>,
}
impl RtcServe {
pub(super) fn new(
build_opts: ConfigOptsBuild, watch_opts: ConfigOptsWatch, opts: ConfigOptsServe, proxies: Option<Vec<ConfigOptsProxy>>,
) -> Result<Self> {
let watch = Arc::new(RtcWatch::new(build_opts, watch_opts)?);
Ok(Self {
watch,
port: opts.port.unwrap_or(8080),
open: opts.open,
proxy_backend: opts.proxy_backend,
proxy_rewrite: opts.proxy_rewrite,
proxies,
})
}
}
/// Runtime config for the clean system.
#[derive(Clone, Debug)]
pub struct RtcClean {
/// The output dir for all final assets.
pub dist: PathBuf,
/// Optionally perform a cargo clean.
pub cargo: bool,
}
impl RtcClean {
pub(super) fn new(opts: ConfigOptsClean) -> Self {
Self {
dist: opts.dist.unwrap_or_else(|| super::DIST_DIR.into()),
cargo: opts.cargo,
}
}
}
| 37.109677 | 128 | 0.628825 |
fe8c543991fcad279859f4955c45ebf82e23c468 | 1,245 | use smartcore::{
linalg::naive::dense_matrix::DenseMatrix, linear::lasso::Lasso,
model_selection::cross_validate, model_selection::CrossValidationResult,
};
use crate::{Algorithm, Settings};
pub(crate) struct LassoRegressorWrapper {}
impl super::ModelWrapper for LassoRegressorWrapper {
fn cv(
x: &DenseMatrix<f32>,
y: &Vec<f32>,
settings: &Settings,
) -> (CrossValidationResult<f32>, Algorithm) {
(
cross_validate(
Lasso::fit,
x,
y,
settings.lasso_settings.as_ref().unwrap().clone(),
settings.get_kfolds(),
settings.get_metric(),
)
.unwrap(),
Algorithm::Lasso,
)
}
fn train(x: &DenseMatrix<f32>, y: &Vec<f32>, settings: &Settings) -> Vec<u8> {
bincode::serialize(
&Lasso::fit(x, y, settings.lasso_settings.as_ref().unwrap().clone()).unwrap(),
)
.unwrap()
}
fn predict(x: &DenseMatrix<f32>, final_model: &Vec<u8>, _settings: &Settings) -> Vec<f32> {
let model: Lasso<f32, DenseMatrix<f32>> = bincode::deserialize(&*final_model).unwrap();
model.predict(x).unwrap()
}
}
| 29.642857 | 95 | 0.562249 |
e6e5063a04c739e4ea927e9e991bf6887103cbb7 | 26,184 | use self::proto::{event_wrapper::Event as EventProto, metric::Value as MetricProto, Log};
use bytes::Bytes;
use chrono::{DateTime, SecondsFormat, TimeZone, Utc};
use getset::{Getters, Setters};
use lazy_static::lazy_static;
use metric::{MetricKind, MetricValue};
use once_cell::sync::OnceCell;
use serde::{Deserialize, Serialize, Serializer};
use serde_json::Value as JsonValue;
use std::{collections::BTreeMap, iter::FromIterator};
use string_cache::DefaultAtom as Atom;
pub mod discriminant;
pub mod merge;
pub mod merge_state;
pub mod metric;
mod util;
pub use metric::{Metric, StatisticKind};
pub(crate) use util::log::PathComponent;
pub(crate) use util::log::PathIter;
pub mod proto {
include!(concat!(env!("OUT_DIR"), "/event.proto.rs"));
}
pub static LOG_SCHEMA: OnceCell<LogSchema> = OnceCell::new();
pub const PARTIAL_STR: &str = "_partial"; // TODO: clean up the _STR suffix after we get rid of atoms
lazy_static! {
pub static ref PARTIAL: Atom = Atom::from(PARTIAL_STR);
static ref LOG_SCHEMA_DEFAULT: LogSchema = LogSchema {
message_key: Atom::from("message"),
timestamp_key: Atom::from("timestamp"),
host_key: Atom::from("host"),
source_type_key: Atom::from("source_type"),
};
}
#[derive(PartialEq, Debug, Clone)]
pub enum Event {
Log(LogEvent),
Metric(Metric),
}
#[derive(PartialEq, Debug, Clone, Default)]
pub struct LogEvent {
fields: BTreeMap<String, Value>,
}
impl Event {
pub fn new_empty_log() -> Self {
Event::Log(LogEvent::default())
}
pub fn as_log(&self) -> &LogEvent {
match self {
Event::Log(log) => log,
_ => panic!("Failed type coercion, {:?} is not a log event", self),
}
}
pub fn as_mut_log(&mut self) -> &mut LogEvent {
match self {
Event::Log(log) => log,
_ => panic!("Failed type coercion, {:?} is not a log event", self),
}
}
pub fn into_log(self) -> LogEvent {
match self {
Event::Log(log) => log,
_ => panic!("Failed type coercion, {:?} is not a log event", self),
}
}
pub fn as_metric(&self) -> &Metric {
match self {
Event::Metric(metric) => metric,
_ => panic!("Failed type coercion, {:?} is not a metric", self),
}
}
pub fn as_mut_metric(&mut self) -> &mut Metric {
match self {
Event::Metric(metric) => metric,
_ => panic!("Failed type coercion, {:?} is not a metric", self),
}
}
pub fn into_metric(self) -> Metric {
match self {
Event::Metric(metric) => metric,
_ => panic!("Failed type coercion, {:?} is not a metric", self),
}
}
}
impl LogEvent {
pub fn get(&self, key: &Atom) -> Option<&Value> {
util::log::get(&self.fields, key)
}
pub fn get_flat(&self, key: impl AsRef<str>) -> Option<&Value> {
self.fields.get(key.as_ref())
}
pub fn get_mut(&mut self, key: &Atom) -> Option<&mut Value> {
util::log::get_mut(&mut self.fields, key)
}
pub fn contains(&self, key: &Atom) -> bool {
util::log::contains(&self.fields, key)
}
pub fn insert<K, V>(&mut self, key: K, value: V) -> Option<Value>
where
K: AsRef<str>,
V: Into<Value>,
{
util::log::insert(&mut self.fields, key.as_ref(), value.into())
}
pub fn insert_path<V>(&mut self, key: Vec<PathComponent>, value: V) -> Option<Value>
where
V: Into<Value>,
{
util::log::insert_path(&mut self.fields, key, value.into())
}
pub fn insert_flat<K, V>(&mut self, key: K, value: V)
where
K: Into<String>,
V: Into<Value>,
{
self.fields.insert(key.into(), value.into());
}
pub fn try_insert<V>(&mut self, key: &Atom, value: V)
where
V: Into<Value>,
{
if !self.contains(key) {
self.insert(key.clone(), value);
}
}
pub fn remove(&mut self, key: &Atom) -> Option<Value> {
util::log::remove(&mut self.fields, &key, false)
}
pub fn remove_prune(&mut self, key: &Atom, prune: bool) -> Option<Value> {
util::log::remove(&mut self.fields, &key, prune)
}
pub fn keys<'a>(&'a self) -> impl Iterator<Item = String> + 'a {
util::log::keys(&self.fields)
}
pub fn all_fields(&self) -> impl Iterator<Item = (String, &Value)> + Serialize {
util::log::all_fields(&self.fields)
}
pub fn is_empty(&self) -> bool {
self.fields.is_empty()
}
}
impl std::ops::Index<&Atom> for LogEvent {
type Output = Value;
fn index(&self, key: &Atom) -> &Value {
self.get(key).expect("Key is not found")
}
}
impl<K: Into<Atom>, V: Into<Value>> Extend<(K, V)> for LogEvent {
fn extend<I: IntoIterator<Item = (K, V)>>(&mut self, iter: I) {
for (k, v) in iter {
self.insert(k.into(), v.into());
}
}
}
// Allow converting any kind of appropriate key/value iterator directly into a LogEvent.
impl<K: Into<Atom>, V: Into<Value>> FromIterator<(K, V)> for LogEvent {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut log_event = LogEvent::default();
log_event.extend(iter);
log_event
}
}
/// Converts event into an iterator over top-level key/value pairs.
impl IntoIterator for LogEvent {
type Item = (String, Value);
type IntoIter = std::collections::btree_map::IntoIter<String, Value>;
fn into_iter(self) -> Self::IntoIter {
self.fields.into_iter()
}
}
impl Serialize for LogEvent {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_map(self.fields.iter())
}
}
pub fn log_schema() -> &'static LogSchema {
LOG_SCHEMA.get().unwrap_or(&LOG_SCHEMA_DEFAULT)
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Getters, Setters)]
#[serde(default)]
pub struct LogSchema {
#[serde(default = "LogSchema::default_message_key")]
#[getset(get = "pub", set = "pub(crate)")]
message_key: Atom,
#[serde(default = "LogSchema::default_timestamp_key")]
#[getset(get = "pub", set = "pub(crate)")]
timestamp_key: Atom,
#[serde(default = "LogSchema::default_host_key")]
#[getset(get = "pub", set = "pub(crate)")]
host_key: Atom,
#[serde(default = "LogSchema::default_source_type_key")]
#[getset(get = "pub", set = "pub(crate)")]
source_type_key: Atom,
}
impl Default for LogSchema {
fn default() -> Self {
LogSchema {
message_key: Atom::from("message"),
timestamp_key: Atom::from("timestamp"),
host_key: Atom::from("host"),
source_type_key: Atom::from("source_type"),
}
}
}
impl LogSchema {
fn default_message_key() -> Atom {
Atom::from("message")
}
fn default_timestamp_key() -> Atom {
Atom::from("timestamp")
}
fn default_host_key() -> Atom {
Atom::from("host")
}
fn default_source_type_key() -> Atom {
Atom::from("source_type")
}
}
#[derive(PartialEq, Debug, Clone)]
pub enum Value {
Bytes(Bytes),
Integer(i64),
Float(f64),
Boolean(bool),
Timestamp(DateTime<Utc>),
Map(BTreeMap<String, Value>),
Array(Vec<Value>),
Null,
}
impl Serialize for Value {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match &self {
Value::Integer(i) => serializer.serialize_i64(*i),
Value::Float(f) => serializer.serialize_f64(*f),
Value::Boolean(b) => serializer.serialize_bool(*b),
Value::Bytes(_) | Value::Timestamp(_) => {
serializer.serialize_str(&self.to_string_lossy())
}
Value::Map(m) => serializer.collect_map(m),
Value::Array(a) => serializer.collect_seq(a),
Value::Null => serializer.serialize_none(),
}
}
}
impl From<Bytes> for Value {
fn from(bytes: Bytes) -> Self {
Value::Bytes(bytes)
}
}
impl From<Vec<u8>> for Value {
fn from(bytes: Vec<u8>) -> Self {
Value::Bytes(bytes.into())
}
}
impl From<&[u8]> for Value {
fn from(bytes: &[u8]) -> Self {
Value::Bytes(Vec::from(bytes).into())
}
}
impl From<String> for Value {
fn from(string: String) -> Self {
Value::Bytes(string.into())
}
}
impl From<&String> for Value {
fn from(string: &String) -> Self {
string.as_str().into()
}
}
impl From<&str> for Value {
fn from(s: &str) -> Self {
Value::Bytes(Vec::from(s.as_bytes()).into())
}
}
impl From<DateTime<Utc>> for Value {
fn from(timestamp: DateTime<Utc>) -> Self {
Value::Timestamp(timestamp)
}
}
impl From<f32> for Value {
fn from(value: f32) -> Self {
Value::Float(f64::from(value))
}
}
impl From<f64> for Value {
fn from(value: f64) -> Self {
Value::Float(value)
}
}
impl From<BTreeMap<String, Value>> for Value {
fn from(value: BTreeMap<String, Value>) -> Self {
Value::Map(value)
}
}
impl From<Vec<Value>> for Value {
fn from(value: Vec<Value>) -> Self {
Value::Array(value)
}
}
macro_rules! impl_valuekind_from_integer {
($t:ty) => {
impl From<$t> for Value {
fn from(value: $t) -> Self {
Value::Integer(value as i64)
}
}
};
}
impl_valuekind_from_integer!(i64);
impl_valuekind_from_integer!(i32);
impl_valuekind_from_integer!(i16);
impl_valuekind_from_integer!(i8);
impl_valuekind_from_integer!(isize);
impl From<bool> for Value {
fn from(value: bool) -> Self {
Value::Boolean(value)
}
}
impl From<JsonValue> for Value {
fn from(json_value: JsonValue) -> Self {
match json_value {
JsonValue::Bool(b) => Value::Boolean(b),
JsonValue::Number(n) => {
if let Some(i) = n.as_i64() {
Value::Integer(i)
} else if let Some(f) = n.as_f64() {
Value::Float(f)
} else {
Value::Bytes(n.to_string().into())
}
}
JsonValue::String(s) => Value::Bytes(Bytes::from(s)),
JsonValue::Object(obj) => Value::Map(
obj.into_iter()
.map(|(key, value)| (key, Value::from(value)))
.collect(),
),
JsonValue::Array(arr) => Value::Array(arr.into_iter().map(Value::from).collect()),
JsonValue::Null => Value::Null,
}
}
}
impl Value {
// TODO: return Cow
pub fn to_string_lossy(&self) -> String {
match self {
Value::Bytes(bytes) => String::from_utf8_lossy(&bytes).into_owned(),
Value::Timestamp(timestamp) => timestamp_to_string(timestamp),
Value::Integer(num) => format!("{}", num),
Value::Float(num) => format!("{}", num),
Value::Boolean(b) => format!("{}", b),
Value::Map(map) => serde_json::to_string(map).expect("Cannot serialize map"),
Value::Array(arr) => serde_json::to_string(arr).expect("Cannot serialize array"),
Value::Null => "<null>".to_string(),
}
}
pub fn as_bytes(&self) -> Bytes {
match self {
Value::Bytes(bytes) => bytes.clone(), // cloning a Bytes is cheap
Value::Timestamp(timestamp) => Bytes::from(timestamp_to_string(timestamp)),
Value::Integer(num) => Bytes::from(format!("{}", num)),
Value::Float(num) => Bytes::from(format!("{}", num)),
Value::Boolean(b) => Bytes::from(format!("{}", b)),
Value::Map(map) => Bytes::from(serde_json::to_vec(map).expect("Cannot serialize map")),
Value::Array(arr) => {
Bytes::from(serde_json::to_vec(arr).expect("Cannot serialize array"))
}
Value::Null => Bytes::from("<null>"),
}
}
pub fn into_bytes(self) -> Bytes {
self.as_bytes()
}
pub fn as_timestamp(&self) -> Option<&DateTime<Utc>> {
match &self {
Value::Timestamp(ts) => Some(ts),
_ => None,
}
}
}
fn timestamp_to_string(timestamp: &DateTime<Utc>) -> String {
timestamp.to_rfc3339_opts(SecondsFormat::AutoSi, true)
}
fn decode_map(fields: BTreeMap<String, proto::Value>) -> Option<Value> {
let mut accum: BTreeMap<String, Value> = BTreeMap::new();
for (key, value) in fields {
match decode_value(value) {
Some(value) => {
accum.insert(key, value);
}
None => return None,
}
}
Some(Value::Map(accum))
}
fn decode_array(items: Vec<proto::Value>) -> Option<Value> {
let mut accum = Vec::with_capacity(items.len());
for value in items {
match decode_value(value) {
Some(value) => accum.push(value),
None => return None,
}
}
Some(Value::Array(accum))
}
fn decode_value(input: proto::Value) -> Option<Value> {
match input.kind {
Some(proto::value::Kind::RawBytes(data)) => Some(Value::Bytes(data.into())),
Some(proto::value::Kind::Timestamp(ts)) => Some(Value::Timestamp(
chrono::Utc.timestamp(ts.seconds, ts.nanos as u32),
)),
Some(proto::value::Kind::Integer(value)) => Some(Value::Integer(value)),
Some(proto::value::Kind::Float(value)) => Some(Value::Float(value)),
Some(proto::value::Kind::Boolean(value)) => Some(Value::Boolean(value)),
Some(proto::value::Kind::Map(map)) => decode_map(map.fields),
Some(proto::value::Kind::Array(array)) => decode_array(array.items),
Some(proto::value::Kind::Null(_)) => Some(Value::Null),
None => {
error!("encoded event contains unknown value kind");
None
}
}
}
impl From<proto::EventWrapper> for Event {
fn from(proto: proto::EventWrapper) -> Self {
let event = proto.event.unwrap();
match event {
EventProto::Log(proto) => {
let fields = proto
.fields
.into_iter()
.filter_map(|(k, v)| decode_value(v).map(|value| (k, value)))
.collect::<BTreeMap<_, _>>();
Event::Log(LogEvent { fields })
}
EventProto::Metric(proto) => {
let kind = match proto.kind() {
proto::metric::Kind::Incremental => MetricKind::Incremental,
proto::metric::Kind::Absolute => MetricKind::Absolute,
};
let name = proto.name;
let timestamp = proto
.timestamp
.map(|ts| chrono::Utc.timestamp(ts.seconds, ts.nanos as u32));
let tags = if !proto.tags.is_empty() {
Some(proto.tags)
} else {
None
};
let value = match proto.value.unwrap() {
MetricProto::Counter(counter) => MetricValue::Counter {
value: counter.value,
},
MetricProto::Gauge(gauge) => MetricValue::Gauge { value: gauge.value },
MetricProto::Set(set) => MetricValue::Set {
values: set.values.into_iter().collect(),
},
MetricProto::Distribution(dist) => MetricValue::Distribution {
statistic: match dist.statistic() {
proto::distribution::StatisticKind::Histogram => {
StatisticKind::Histogram
}
proto::distribution::StatisticKind::Summary => StatisticKind::Summary,
},
values: dist.values,
sample_rates: dist.sample_rates,
},
MetricProto::AggregatedHistogram(hist) => MetricValue::AggregatedHistogram {
buckets: hist.buckets,
counts: hist.counts,
count: hist.count,
sum: hist.sum,
},
MetricProto::AggregatedSummary(summary) => MetricValue::AggregatedSummary {
quantiles: summary.quantiles,
values: summary.values,
count: summary.count,
sum: summary.sum,
},
};
Event::Metric(Metric {
name,
timestamp,
tags,
kind,
value,
})
}
}
}
}
fn encode_value(value: Value) -> proto::Value {
proto::Value {
kind: match value {
Value::Bytes(b) => Some(proto::value::Kind::RawBytes(b.to_vec())),
Value::Timestamp(ts) => Some(proto::value::Kind::Timestamp(prost_types::Timestamp {
seconds: ts.timestamp(),
nanos: ts.timestamp_subsec_nanos() as i32,
})),
Value::Integer(value) => Some(proto::value::Kind::Integer(value)),
Value::Float(value) => Some(proto::value::Kind::Float(value)),
Value::Boolean(value) => Some(proto::value::Kind::Boolean(value)),
Value::Map(fields) => Some(proto::value::Kind::Map(encode_map(fields))),
Value::Array(items) => Some(proto::value::Kind::Array(encode_array(items))),
Value::Null => Some(proto::value::Kind::Null(proto::ValueNull::NullValue as i32)),
},
}
}
fn encode_map(fields: BTreeMap<String, Value>) -> proto::ValueMap {
proto::ValueMap {
fields: fields
.into_iter()
.map(|(key, value)| (key, encode_value(value)))
.collect(),
}
}
fn encode_array(items: Vec<Value>) -> proto::ValueArray {
proto::ValueArray {
items: items.into_iter().map(encode_value).collect(),
}
}
impl From<Event> for proto::EventWrapper {
fn from(event: Event) -> Self {
match event {
Event::Log(LogEvent { fields }) => {
let fields = fields
.into_iter()
.map(|(k, v)| (k, encode_value(v)))
.collect::<BTreeMap<_, _>>();
let event = EventProto::Log(Log { fields });
proto::EventWrapper { event: Some(event) }
}
Event::Metric(Metric {
name,
timestamp,
tags,
kind,
value,
}) => {
let timestamp = timestamp.map(|ts| prost_types::Timestamp {
seconds: ts.timestamp(),
nanos: ts.timestamp_subsec_nanos() as i32,
});
let tags = tags.unwrap_or_default();
let kind = match kind {
MetricKind::Incremental => proto::metric::Kind::Incremental,
MetricKind::Absolute => proto::metric::Kind::Absolute,
}
.into();
let metric = match value {
MetricValue::Counter { value } => {
MetricProto::Counter(proto::Counter { value })
}
MetricValue::Gauge { value } => MetricProto::Gauge(proto::Gauge { value }),
MetricValue::Set { values } => MetricProto::Set(proto::Set {
values: values.into_iter().collect(),
}),
MetricValue::Distribution {
values,
sample_rates,
statistic,
} => MetricProto::Distribution(proto::Distribution {
values,
sample_rates,
statistic: match statistic {
StatisticKind::Histogram => {
proto::distribution::StatisticKind::Histogram
}
StatisticKind::Summary => proto::distribution::StatisticKind::Summary,
}
.into(),
}),
MetricValue::AggregatedHistogram {
buckets,
counts,
count,
sum,
} => MetricProto::AggregatedHistogram(proto::AggregatedHistogram {
buckets,
counts,
count,
sum,
}),
MetricValue::AggregatedSummary {
quantiles,
values,
count,
sum,
} => MetricProto::AggregatedSummary(proto::AggregatedSummary {
quantiles,
values,
count,
sum,
}),
};
let event = EventProto::Metric(proto::Metric {
name,
timestamp,
tags,
kind,
value: Some(metric),
});
proto::EventWrapper { event: Some(event) }
}
}
}
}
// TODO: should probably get rid of this
impl From<Event> for Vec<u8> {
fn from(event: Event) -> Vec<u8> {
event
.into_log()
.remove(&log_schema().message_key())
.unwrap()
.as_bytes()
.to_vec()
}
}
impl From<Bytes> for Event {
fn from(message: Bytes) -> Self {
let mut event = Event::Log(LogEvent {
fields: BTreeMap::new(),
});
event
.as_mut_log()
.insert(log_schema().message_key().clone(), message);
event
.as_mut_log()
.insert(log_schema().timestamp_key().clone(), Utc::now());
event
}
}
impl From<&str> for Event {
fn from(line: &str) -> Self {
line.to_owned().into()
}
}
impl From<String> for Event {
fn from(line: String) -> Self {
Bytes::from(line).into()
}
}
impl From<LogEvent> for Event {
fn from(log: LogEvent) -> Self {
Event::Log(log)
}
}
impl From<Metric> for Event {
fn from(metric: Metric) -> Self {
Event::Metric(metric)
}
}
#[cfg(test)]
mod test {
use super::{Atom, Event, LogSchema, Value};
use regex::Regex;
use std::collections::HashSet;
#[test]
fn serialization() {
let mut event = Event::from("raw log line");
event.as_mut_log().insert("foo", "bar");
event.as_mut_log().insert("bar", "baz");
let expected_all = serde_json::json!({
"message": "raw log line",
"foo": "bar",
"bar": "baz",
"timestamp": event.as_log().get(&super::log_schema().timestamp_key()),
});
let actual_all = serde_json::to_value(event.as_log().all_fields()).unwrap();
assert_eq!(expected_all, actual_all);
let rfc3339_re = Regex::new(r"\A\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z\z").unwrap();
assert!(rfc3339_re.is_match(actual_all.pointer("/timestamp").unwrap().as_str().unwrap()));
}
#[test]
fn type_serialization() {
use serde_json::json;
let mut event = Event::from("hello world");
event.as_mut_log().insert("int", 4);
event.as_mut_log().insert("float", 5.5);
event.as_mut_log().insert("bool", true);
event.as_mut_log().insert("string", "thisisastring");
let map = serde_json::to_value(event.as_log().all_fields()).unwrap();
assert_eq!(map["float"], json!(5.5));
assert_eq!(map["int"], json!(4));
assert_eq!(map["bool"], json!(true));
assert_eq!(map["string"], json!("thisisastring"));
}
#[test]
fn event_iteration() {
let mut event = Event::new_empty_log();
event
.as_mut_log()
.insert("Ke$ha", "It's going down, I'm yelling timber");
event
.as_mut_log()
.insert("Pitbull", "The bigger they are, the harder they fall");
let all = event
.as_log()
.all_fields()
.map(|(k, v)| (k, v.to_string_lossy()))
.collect::<HashSet<_>>();
assert_eq!(
all,
vec![
(
String::from("Ke$ha"),
"It's going down, I'm yelling timber".to_string()
),
(
String::from("Pitbull"),
"The bigger they are, the harder they fall".to_string()
),
]
.into_iter()
.collect::<HashSet<_>>()
);
}
#[test]
fn event_iteration_order() {
let mut event = Event::new_empty_log();
let log = event.as_mut_log();
log.insert(&Atom::from("lZDfzKIL"), Value::from("tOVrjveM"));
log.insert(&Atom::from("o9amkaRY"), Value::from("pGsfG7Nr"));
log.insert(&Atom::from("YRjhxXcg"), Value::from("nw8iM5Jr"));
let collected: Vec<_> = log.all_fields().collect();
assert_eq!(
collected,
vec![
(String::from("YRjhxXcg"), &Value::from("nw8iM5Jr")),
(String::from("lZDfzKIL"), &Value::from("tOVrjveM")),
(String::from("o9amkaRY"), &Value::from("pGsfG7Nr")),
]
);
}
#[test]
fn partial_log_schema() {
let toml = r#"
message_key = "message"
timestamp_key = "timestamp"
"#;
let _ = toml::from_str::<LogSchema>(toml).unwrap();
}
}
| 30.660422 | 101 | 0.513558 |
c129bcd15de6ef29966263133725c4e3ff4e12d1 | 10,607 | use crate::closure;
#[cfg(any(feature = "js-sys", test))]
use js_sys::Array;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsCast;
#[wasm_bindgen]
extern "C" {
/// Interface provided by Dragula to interact with active drag-and-drop system
///
/// ### Example:
#[cfg_attr(feature = "js-sys", doc = "```no_run")]
#[cfg_attr(not(feature = "js-sys"), doc = "```no_compile")]
/// use dragula::*;
///
/// let doc = web_sys::window().unwrap().document().unwrap();
/// let element_1 = doc.get_element_by_id("drag-container-1").unwrap();
/// let element_2 = doc.get_element_by_id("drag-container-2").unwrap();
///
/// let mut drake = dragula(&[element_1]);
///
/// drake.add_container(element_2);
///
/// ```
#[wasm_bindgen(js_name = Drake)]
pub type Drake;
/// This property will be `true` whenever an element is being dragged.
#[wasm_bindgen(method, getter)]
pub fn dragging(this: &Drake) -> bool;
/// Gracefully end the drag event as if using **the last position marked by
/// the preview shadow** as the drop target. The proper `cancel` or `drop`
/// event will be fired, depending on whether the item was dropped back
/// where it was originally lifted from _(which is essentially a no-op
/// that's treated as a `cancel` event)_.
#[wasm_bindgen(method)]
pub fn end(this: &Drake);
/// If an element managed by `Drake` is currently being dragged, this method
/// will gracefully remove it from the DOM.
#[wasm_bindgen(method)]
pub fn remove(this: &Drake);
/// Removes all drag and drop events used by `dragula` to manage drag and
/// drop between the `containers`. If `destroy` is called while an element
/// is being dragged, the drag will be effectively cancelled.
#[wasm_bindgen(method)]
pub fn destroy(this: &Drake);
#[wasm_bindgen(method)]
fn on(this: &Drake, event_type: &str, listener: JsValue);
/// If an element managed by `Drake` is currently being dragged, this method
/// will gracefully cancel the drag action.
///
/// Note that **a _"cancellation"_ will result in a `cancel` event** only in
/// the following scenarios.
///
/// - `revert_on_spill` is `true`
/// - Drop target _(as previewed by the feedback shadow)_ is the source
/// container **and** the item is dropped in the same position where it
/// was originally dragged from
#[wasm_bindgen(method)]
pub fn cancel(this: &Drake);
/// If an element managed by `Drake` is currently being dragged, this method
/// will gracefully cancel the drag action. If `true` is passed to this
/// function, it will effectively produce the same result as if
/// `revert_on_spill` is true.
///
/// - `revert_on_spill` is `true`
/// - Drop target _(as previewed by the feedback shadow)_ is the source
/// container **and** the item is dropped in the same position where it
/// was originally dragged from
#[wasm_bindgen(method, js_name = cancel)]
pub fn cancel_with_revert(this: &Drake, revert: bool);
#[wasm_bindgen(method, getter = containers)]
fn containers_getter_impl(this: &Drake) -> JsValue;
#[wasm_bindgen(method, setter = containers)]
fn containers_setter_impl(this: &Drake, val: Box<[JsValue]>);
#[wasm_bindgen(method, js_name = start)]
fn start_impl(this: &Drake, item: JsValue);
#[wasm_bindgen(method, js_name = canMove)]
fn can_move_impl(this: &Drake, item: JsValue) -> bool;
}
impl Drake {
/// Gets the active containers currently allowing dragging
///
/// Requires that feature `js-sys` be turned on (it is on by default)
#[cfg(any(feature = "js-sys", test))]
pub fn containers(&self) -> Vec<JsValue> {
let containers = self.containers_getter_impl();
let containers = Array::from(&containers);
containers.to_vec()
}
/// Sets the list of active containers for dragging. This overrides the
/// list that is currently there.
pub fn set_containers<T>(&mut self, objs: &[T])
where
T: JsCast + Clone,
{
let obj_array =
objs.iter().cloned().map(|o| JsValue::from(&o)).collect();
self.containers_setter_impl(obj_array);
}
/// Adds to the list of active containers for dragging
///
/// Requires that feature `js-sys` be turned on (it is on by default)
#[cfg(feature = "js-sys")]
pub fn add_container<T>(&mut self, obj: T)
where
T: JsCast,
{
let mut containers = self.containers();
let container_to_add = JsValue::from(&obj);
containers.push(container_to_add);
self.set_containers(&containers);
}
/// Enter drag mode **without a shadow**. This function is most useful when
/// providing complementary keyboard shortcuts to an existing drag and drop
/// solution. Even though a shadow won't be created at first, the user will
/// get one as soon as they click on `item` and start dragging it around.
/// Note that if they click and drag something else, `end` will be called
/// before picking up the new item.
pub fn start<T>(&mut self, item: &T)
where
T: JsCast,
{
let item = JsValue::from(item);
self.start_impl(item);
}
/// Returns whether the `Drake` instance can accept drags for a DOM element
/// `item`. This function returns `true` when all the conditions outlined
/// below are met, and `false` otherwise.
///
/// - `item` is a child of one of the specified containers for `Drake`
/// - `item` passes the pertinent [`invalid`](crate::Options::invalid) checks
/// - `item` passes a `moves` check
pub fn can_move<T>(&self, item: &T) -> bool
where
T: JsCast,
{
let item = JsValue::from(item);
self.can_move_impl(item)
}
/// Sets callback for `drag` event.
/// Callback will be passed arguments `(el, source)`
/// The `drag` event implies that
/// `el` was lifted from `source`.
pub fn on_drag<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue),
{
const EVENT_NAME: &str = "drag";
let listener = closure::to_js_2(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `dragend` event.
/// Callback will be passed argument `(el)`
/// The `dragend` event implies that
/// dragging event for `el` ended with either `cancel`, `remove`, or `drop`.
pub fn on_dragend<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue),
{
const EVENT_NAME: &str = "dragend";
let listener = closure::to_js_1(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `drop` event.
/// Callback will be passed arguments `(el, target, source, sibling)`
/// The `drop` event implies that
/// `el` was dropped into `target` before a `sibling` element, and
/// originally came from `source`.
pub fn on_drop<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "drop";
let listener = closure::to_js_4(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `cancel` event.
/// Callback will be passed argument `(el, container, source)`
/// The `cancel` event implies that
/// `el` was being dragged but it got nowhere and went back into
/// `container`, its last stable parent; `el` originally came from `source`.
pub fn on_cancel<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "cancel";
let listener = closure::to_js_3(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `remove` event.
/// Callback will be passed argument `(el, container, source)`
/// The `remove` event implies that
/// `el` was being dragged but it got nowhere and it was removed from the
/// DOM. Its last stable parent was `container`, and originally came from
/// `source`.
pub fn on_remove<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "remove";
let listener = closure::to_js_3(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `shadow` event.
/// Callback will be passed argument `(el, container, source)`
/// The `shadow` event implies that
/// `el`, _the visual aid shadow_, was moved into `container`. May trigger
/// many times as the position of `el` changes, even within the same
/// `container`; `el` originally came from `source`.
pub fn on_shadow<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "shadow";
let listener = closure::to_js_3(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `over` event.
/// Callback will be passed argument `(el, container, source)`
/// The `over` event implies that
/// `el` is over `container`, and originally came from `source`.
pub fn on_over<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "over";
let listener = closure::to_js_3(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `out` event.
/// Callback will be passed argument `(el, container, source)`
/// The `out` event implies that
/// `el` was dragged out of `container` or dropped, and originally came from
/// `source`.
pub fn on_out<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "out";
let listener = closure::to_js_3(listener);
self.on(EVENT_NAME, listener);
}
/// Sets callback for `cloned` event.
/// Callback will be passed argument `(clone, original, type)`
/// The `cloned` event implies that
/// DOM element `original` was cloned as `clone`, of `type` _(`'mirror'` or
/// `'copy'`)_. Fired for mirror images and when `copy: true`.
pub fn on_cloned<F: 'static>(&mut self, listener: F)
where
F: FnMut(JsValue, JsValue, JsValue),
{
const EVENT_NAME: &str = "cloned";
let listener = closure::to_js_3(listener);
self.on(EVENT_NAME, listener);
}
}
#[cfg(test)]
pub mod test;
| 35.006601 | 82 | 0.623362 |
5056e7ffddc6b16bf9ea79b1c5c80fc6b33607b9 | 900 | use std::sync::Arc;
use futures_mpsc_lossy;
use ctx;
mod control;
pub mod event;
pub mod metrics;
pub mod sensor;
pub mod tap;
pub use self::control::{Control, MakeControl};
pub use self::event::Event;
pub use self::sensor::Sensors;
/// Creates proxy-specific runtime telemetry.
///
/// [`Sensors`] hide the details of how telemetry is recorded, but expose proxy utilties
/// that support telemetry.
///
/// [`Control`] drives processing of all telemetry events for tapping as well as metrics
/// aggregation.
///
/// # Arguments
/// - `capacity`: the size of the event queue.
///
/// [`Sensors`]: struct.Sensors.html
/// [`Control`]: struct.Control.html
pub fn new(
process: &Arc<ctx::Process>,
capacity: usize,
) -> (Sensors, MakeControl) {
let (tx, rx) = futures_mpsc_lossy::channel(capacity);
let s = Sensors::new(tx);
let c = MakeControl::new(rx, process);
(s, c)
}
| 23.076923 | 88 | 0.673333 |
d59fdf47904f7939109376f398623796c0b66516 | 91,014 | //! Defines how the compiler represents types internally.
//!
//! Two important entities in this module are:
//!
//! - [`rustc_middle::ty::Ty`], used to represent the semantics of a type.
//! - [`rustc_middle::ty::TyCtxt`], the central data structure in the compiler.
//!
//! For more information, see ["The `ty` module: representing types"] in the rustc-dev-guide.
//!
//! ["The `ty` module: representing types"]: https://rustc-dev-guide.rust-lang.org/ty.html
pub use self::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeVisitor};
pub use self::AssocItemContainer::*;
pub use self::BorrowKind::*;
pub use self::IntVarValue::*;
pub use self::Variance::*;
pub use adt::*;
pub use assoc::*;
pub use generics::*;
use rustc_data_structures::fingerprint::Fingerprint;
pub use vtable::*;
use crate::metadata::ModChild;
use crate::middle::privacy::AccessLevels;
use crate::mir::{Body, GeneratorLayout};
use crate::traits::{self, Reveal};
use crate::ty;
use crate::ty::fast_reject::SimplifiedType;
use crate::ty::subst::{GenericArg, InternalSubsts, Subst, SubstsRef};
use crate::ty::util::Discr;
use rustc_ast as ast;
use rustc_attr as attr;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_data_structures::intern::{Interned, WithStableHash};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalDefIdMap, CRATE_DEF_ID};
use rustc_hir::Node;
use rustc_macros::HashStable;
use rustc_query_system::ich::StableHashingContext;
use rustc_session::cstore::CrateStoreDyn;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
use rustc_target::abi::Align;
use std::fmt::Debug;
use std::hash::Hash;
use std::ops::ControlFlow;
use std::{fmt, str};
pub use crate::ty::diagnostics::*;
pub use rustc_type_ir::InferTy::*;
pub use rustc_type_ir::*;
pub use self::binding::BindingMode;
pub use self::binding::BindingMode::*;
pub use self::closure::{
is_ancestor_or_same_capture, place_to_string_for_capture, BorrowKind, CaptureInfo,
CapturedPlace, ClosureKind, MinCaptureInformationMap, MinCaptureList,
RootVariableMinCaptureList, UpvarCapture, UpvarCaptureMap, UpvarId, UpvarListMap, UpvarPath,
CAPTURE_STRUCT_LOCAL,
};
pub use self::consts::{
Const, ConstInt, ConstKind, ConstS, InferConst, ScalarInt, Unevaluated, ValTree,
};
pub use self::context::{
tls, CanonicalUserType, CanonicalUserTypeAnnotation, CanonicalUserTypeAnnotations,
CtxtInterners, DelaySpanBugEmitted, FreeRegionInfo, GeneratorDiagnosticData,
GeneratorInteriorTypeCause, GlobalCtxt, Lift, OnDiskCache, TyCtxt, TypeckResults, UserType,
UserTypeAnnotationIndex,
};
pub use self::instance::{Instance, InstanceDef};
pub use self::list::List;
pub use self::sty::BoundRegionKind::*;
pub use self::sty::RegionKind::*;
pub use self::sty::TyKind::*;
pub use self::sty::{
Binder, BoundRegion, BoundRegionKind, BoundTy, BoundTyKind, BoundVar, BoundVariableKind,
CanonicalPolyFnSig, ClosureSubsts, ClosureSubstsParts, ConstVid, EarlyBoundRegion,
ExistentialPredicate, ExistentialProjection, ExistentialTraitRef, FnSig, FreeRegion, GenSig,
GeneratorSubsts, GeneratorSubstsParts, InlineConstSubsts, InlineConstSubstsParts, ParamConst,
ParamTy, PolyExistentialProjection, PolyExistentialTraitRef, PolyFnSig, PolyGenSig,
PolyTraitRef, ProjectionTy, Region, RegionKind, RegionVid, TraitRef, TyKind, TypeAndMut,
UpvarSubsts, VarianceDiagInfo,
};
pub use self::trait_def::TraitDef;
pub mod _match;
pub mod adjustment;
pub mod binding;
pub mod cast;
pub mod codec;
pub mod error;
pub mod fast_reject;
pub mod flags;
pub mod fold;
pub mod inhabitedness;
pub mod layout;
pub mod normalize_erasing_regions;
pub mod print;
pub mod query;
pub mod relate;
pub mod subst;
pub mod trait_def;
pub mod util;
pub mod vtable;
pub mod walk;
mod adt;
mod assoc;
mod closure;
mod consts;
mod context;
mod diagnostics;
mod erase_regions;
mod generics;
mod impls_ty;
mod instance;
mod list;
mod structural_impls;
mod sty;
// Data types
pub type RegisteredTools = FxHashSet<Ident>;
#[derive(Debug)]
pub struct ResolverOutputs {
pub definitions: rustc_hir::definitions::Definitions,
pub cstore: Box<CrateStoreDyn>,
pub visibilities: FxHashMap<LocalDefId, Visibility>,
pub access_levels: AccessLevels,
pub extern_crate_map: FxHashMap<LocalDefId, CrateNum>,
pub maybe_unused_trait_imports: FxHashSet<LocalDefId>,
pub maybe_unused_extern_crates: Vec<(LocalDefId, Span)>,
pub reexport_map: FxHashMap<LocalDefId, Vec<ModChild>>,
pub glob_map: FxHashMap<LocalDefId, FxHashSet<Symbol>>,
/// Extern prelude entries. The value is `true` if the entry was introduced
/// via `extern crate` item and not `--extern` option or compiler built-in.
pub extern_prelude: FxHashMap<Symbol, bool>,
pub main_def: Option<MainDefinition>,
pub trait_impls: FxIndexMap<DefId, Vec<LocalDefId>>,
/// A list of proc macro LocalDefIds, written out in the order in which
/// they are declared in the static array generated by proc_macro_harness.
pub proc_macros: Vec<LocalDefId>,
/// Mapping from ident span to path span for paths that don't exist as written, but that
/// exist under `std`. For example, wrote `str::from_utf8` instead of `std::str::from_utf8`.
pub confused_type_with_std_module: FxHashMap<Span, Span>,
pub registered_tools: RegisteredTools,
}
#[derive(Clone, Copy, Debug)]
pub struct MainDefinition {
pub res: Res<ast::NodeId>,
pub is_import: bool,
pub span: Span,
}
impl MainDefinition {
pub fn opt_fn_def_id(self) -> Option<DefId> {
if let Res::Def(DefKind::Fn, def_id) = self.res { Some(def_id) } else { None }
}
}
/// The "header" of an impl is everything outside the body: a Self type, a trait
/// ref (in the case of a trait impl), and a set of predicates (from the
/// bounds / where-clauses).
#[derive(Clone, Debug, TypeFoldable)]
pub struct ImplHeader<'tcx> {
pub impl_def_id: DefId,
pub self_ty: Ty<'tcx>,
pub trait_ref: Option<TraitRef<'tcx>>,
pub predicates: Vec<Predicate<'tcx>>,
}
#[derive(Copy, Clone, Debug, TypeFoldable)]
pub enum ImplSubject<'tcx> {
Trait(TraitRef<'tcx>),
Inherent(Ty<'tcx>),
}
#[derive(
Copy,
Clone,
PartialEq,
Eq,
Hash,
TyEncodable,
TyDecodable,
HashStable,
Debug,
TypeFoldable
)]
pub enum ImplPolarity {
/// `impl Trait for Type`
Positive,
/// `impl !Trait for Type`
Negative,
/// `#[rustc_reservation_impl] impl Trait for Type`
///
/// This is a "stability hack", not a real Rust feature.
/// See #64631 for details.
Reservation,
}
impl ImplPolarity {
/// Flips polarity by turning `Positive` into `Negative` and `Negative` into `Positive`.
pub fn flip(&self) -> Option<ImplPolarity> {
match self {
ImplPolarity::Positive => Some(ImplPolarity::Negative),
ImplPolarity::Negative => Some(ImplPolarity::Positive),
ImplPolarity::Reservation => None,
}
}
}
impl fmt::Display for ImplPolarity {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Positive => f.write_str("positive"),
Self::Negative => f.write_str("negative"),
Self::Reservation => f.write_str("reservation"),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Copy, Hash, TyEncodable, TyDecodable, HashStable)]
pub enum Visibility {
/// Visible everywhere (including in other crates).
Public,
/// Visible only in the given crate-local module.
Restricted(DefId),
/// Not visible anywhere in the local crate. This is the visibility of private external items.
Invisible,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable)]
pub enum BoundConstness {
/// `T: Trait`
NotConst,
/// `T: ~const Trait`
///
/// Requires resolving to const only when we are in a const context.
ConstIfConst,
}
impl BoundConstness {
/// Reduce `self` and `constness` to two possible combined states instead of four.
pub fn and(&mut self, constness: hir::Constness) -> hir::Constness {
match (constness, self) {
(hir::Constness::Const, BoundConstness::ConstIfConst) => hir::Constness::Const,
(_, this) => {
*this = BoundConstness::NotConst;
hir::Constness::NotConst
}
}
}
}
impl fmt::Display for BoundConstness {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::NotConst => f.write_str("normal"),
Self::ConstIfConst => f.write_str("`~const`"),
}
}
}
#[derive(
Clone,
Debug,
PartialEq,
Eq,
Copy,
Hash,
TyEncodable,
TyDecodable,
HashStable,
TypeFoldable
)]
pub struct ClosureSizeProfileData<'tcx> {
/// Tuple containing the types of closure captures before the feature `capture_disjoint_fields`
pub before_feature_tys: Ty<'tcx>,
/// Tuple containing the types of closure captures after the feature `capture_disjoint_fields`
pub after_feature_tys: Ty<'tcx>,
}
pub trait DefIdTree: Copy {
fn parent(self, id: DefId) -> Option<DefId>;
#[inline]
fn local_parent(self, id: LocalDefId) -> Option<LocalDefId> {
Some(self.parent(id.to_def_id())?.expect_local())
}
fn is_descendant_of(self, mut descendant: DefId, ancestor: DefId) -> bool {
if descendant.krate != ancestor.krate {
return false;
}
while descendant != ancestor {
match self.parent(descendant) {
Some(parent) => descendant = parent,
None => return false,
}
}
true
}
}
impl<'tcx> DefIdTree for TyCtxt<'tcx> {
fn parent(self, id: DefId) -> Option<DefId> {
self.def_key(id).parent.map(|index| DefId { index, ..id })
}
}
impl Visibility {
pub fn from_hir(visibility: &hir::Visibility<'_>, id: hir::HirId, tcx: TyCtxt<'_>) -> Self {
match visibility.node {
hir::VisibilityKind::Public => Visibility::Public,
hir::VisibilityKind::Crate(_) => Visibility::Restricted(CRATE_DEF_ID.to_def_id()),
hir::VisibilityKind::Restricted { ref path, .. } => match path.res {
// If there is no resolution, `resolve` will have already reported an error, so
// assume that the visibility is public to avoid reporting more privacy errors.
Res::Err => Visibility::Public,
def => Visibility::Restricted(def.def_id()),
},
hir::VisibilityKind::Inherited => {
Visibility::Restricted(tcx.parent_module(id).to_def_id())
}
}
}
/// Returns `true` if an item with this visibility is accessible from the given block.
pub fn is_accessible_from<T: DefIdTree>(self, module: DefId, tree: T) -> bool {
let restriction = match self {
// Public items are visible everywhere.
Visibility::Public => return true,
// Private items from other crates are visible nowhere.
Visibility::Invisible => return false,
// Restricted items are visible in an arbitrary local module.
Visibility::Restricted(other) if other.krate != module.krate => return false,
Visibility::Restricted(module) => module,
};
tree.is_descendant_of(module, restriction)
}
/// Returns `true` if this visibility is at least as accessible as the given visibility
pub fn is_at_least<T: DefIdTree>(self, vis: Visibility, tree: T) -> bool {
let vis_restriction = match vis {
Visibility::Public => return self == Visibility::Public,
Visibility::Invisible => return true,
Visibility::Restricted(module) => module,
};
self.is_accessible_from(vis_restriction, tree)
}
// Returns `true` if this item is visible anywhere in the local crate.
pub fn is_visible_locally(self) -> bool {
match self {
Visibility::Public => true,
Visibility::Restricted(def_id) => def_id.is_local(),
Visibility::Invisible => false,
}
}
pub fn is_public(self) -> bool {
matches!(self, Visibility::Public)
}
}
/// The crate variances map is computed during typeck and contains the
/// variance of every item in the local crate. You should not use it
/// directly, because to do so will make your pass dependent on the
/// HIR of every item in the local crate. Instead, use
/// `tcx.variances_of()` to get the variance for a *particular*
/// item.
#[derive(HashStable, Debug)]
pub struct CrateVariancesMap<'tcx> {
/// For each item with generics, maps to a vector of the variance
/// of its generics. If an item has no generics, it will have no
/// entry.
pub variances: FxHashMap<DefId, &'tcx [ty::Variance]>,
}
// Contains information needed to resolve types and (in the future) look up
// the types of AST nodes.
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct CReaderCacheKey {
pub cnum: Option<CrateNum>,
pub pos: usize,
}
/// Represents a type.
///
/// IMPORTANT:
/// - This is a very "dumb" struct (with no derives and no `impls`).
/// - Values of this type are always interned and thus unique, and are stored
/// as an `Interned<TyS>`.
/// - `Ty` (which contains a reference to a `Interned<TyS>`) or `Interned<TyS>`
/// should be used everywhere instead of `TyS`. In particular, `Ty` has most
/// of the relevant methods.
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[allow(rustc::usage_of_ty_tykind)]
crate struct TyS<'tcx> {
/// This field shouldn't be used directly and may be removed in the future.
/// Use `Ty::kind()` instead.
kind: TyKind<'tcx>,
/// This field provides fast access to information that is also contained
/// in `kind`.
///
/// This field shouldn't be used directly and may be removed in the future.
/// Use `Ty::flags()` instead.
flags: TypeFlags,
/// This field provides fast access to information that is also contained
/// in `kind`.
///
/// This is a kind of confusing thing: it stores the smallest
/// binder such that
///
/// (a) the binder itself captures nothing but
/// (b) all the late-bound things within the type are captured
/// by some sub-binder.
///
/// So, for a type without any late-bound things, like `u32`, this
/// will be *innermost*, because that is the innermost binder that
/// captures nothing. But for a type `&'D u32`, where `'D` is a
/// late-bound region with De Bruijn index `D`, this would be `D + 1`
/// -- the binder itself does not capture `D`, but `D` is captured
/// by an inner binder.
///
/// We call this concept an "exclusive" binder `D` because all
/// De Bruijn indices within the type are contained within `0..D`
/// (exclusive).
outer_exclusive_binder: ty::DebruijnIndex,
}
// `TyS` is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(TyS<'_>, 40);
// We are actually storing a stable hash cache next to the type, so let's
// also check the full size
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(WithStableHash<TyS<'_>>, 56);
/// Use this rather than `TyS`, whenever possible.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
#[rustc_diagnostic_item = "Ty"]
#[rustc_pass_by_value]
pub struct Ty<'tcx>(Interned<'tcx, WithStableHash<TyS<'tcx>>>);
// Statics only used for internal testing.
pub static BOOL_TY: Ty<'static> = Ty(Interned::new_unchecked(&WithStableHash {
internee: BOOL_TYS,
stable_hash: Fingerprint::ZERO,
}));
const BOOL_TYS: TyS<'static> = TyS {
kind: ty::Bool,
flags: TypeFlags::empty(),
outer_exclusive_binder: DebruijnIndex::from_usize(0),
};
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TyS<'tcx> {
#[inline]
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let TyS {
kind,
// The other fields just provide fast access to information that is
// also contained in `kind`, so no need to hash them.
flags: _,
outer_exclusive_binder: _,
} = self;
kind.hash_stable(hcx, hasher)
}
}
impl ty::EarlyBoundRegion {
/// Does this early bound region have a name? Early bound regions normally
/// always have names except when using anonymous lifetimes (`'_`).
pub fn has_name(&self) -> bool {
self.name != kw::UnderscoreLifetime
}
}
/// Represents a predicate.
///
/// See comments on `TyS`, which apply here too (albeit for
/// `PredicateS`/`Predicate` rather than `TyS`/`Ty`).
#[derive(Debug)]
crate struct PredicateS<'tcx> {
kind: Binder<'tcx, PredicateKind<'tcx>>,
flags: TypeFlags,
/// See the comment for the corresponding field of [TyS].
outer_exclusive_binder: ty::DebruijnIndex,
}
// This type is used a lot. Make sure it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
static_assert_size!(PredicateS<'_>, 56);
/// Use this rather than `PredicateS`, whenever possible.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
#[rustc_pass_by_value]
pub struct Predicate<'tcx>(Interned<'tcx, PredicateS<'tcx>>);
impl<'tcx> Predicate<'tcx> {
/// Gets the inner `Binder<'tcx, PredicateKind<'tcx>>`.
#[inline]
pub fn kind(self) -> Binder<'tcx, PredicateKind<'tcx>> {
self.0.kind
}
#[inline(always)]
pub fn flags(self) -> TypeFlags {
self.0.flags
}
#[inline(always)]
pub fn outer_exclusive_binder(self) -> DebruijnIndex {
self.0.outer_exclusive_binder
}
/// Flips the polarity of a Predicate.
///
/// Given `T: Trait` predicate it returns `T: !Trait` and given `T: !Trait` returns `T: Trait`.
pub fn flip_polarity(self, tcx: TyCtxt<'tcx>) -> Option<Predicate<'tcx>> {
let kind = self
.kind()
.map_bound(|kind| match kind {
PredicateKind::Trait(TraitPredicate { trait_ref, constness, polarity }) => {
Some(PredicateKind::Trait(TraitPredicate {
trait_ref,
constness,
polarity: polarity.flip()?,
}))
}
_ => None,
})
.transpose()?;
Some(tcx.mk_predicate(kind))
}
}
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for Predicate<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let PredicateS {
ref kind,
// The other fields just provide fast access to information that is
// also contained in `kind`, so no need to hash them.
flags: _,
outer_exclusive_binder: _,
} = self.0.0;
kind.hash_stable(hcx, hasher);
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub enum PredicateKind<'tcx> {
/// Corresponds to `where Foo: Bar<A, B, C>`. `Foo` here would be
/// the `Self` type of the trait reference and `A`, `B`, and `C`
/// would be the type parameters.
Trait(TraitPredicate<'tcx>),
/// `where 'a: 'b`
RegionOutlives(RegionOutlivesPredicate<'tcx>),
/// `where T: 'a`
TypeOutlives(TypeOutlivesPredicate<'tcx>),
/// `where <T as TraitRef>::Name == X`, approximately.
/// See the `ProjectionPredicate` struct for details.
Projection(ProjectionPredicate<'tcx>),
/// No syntax: `T` well-formed.
WellFormed(GenericArg<'tcx>),
/// Trait must be object-safe.
ObjectSafe(DefId),
/// No direct syntax. May be thought of as `where T: FnFoo<...>`
/// for some substitutions `...` and `T` being a closure type.
/// Satisfied (or refuted) once we know the closure's kind.
ClosureKind(DefId, SubstsRef<'tcx>, ClosureKind),
/// `T1 <: T2`
///
/// This obligation is created most often when we have two
/// unresolved type variables and hence don't have enough
/// information to process the subtyping obligation yet.
Subtype(SubtypePredicate<'tcx>),
/// `T1` coerced to `T2`
///
/// Like a subtyping obligation, this is created most often
/// when we have two unresolved type variables and hence
/// don't have enough information to process the coercion
/// obligation yet. At the moment, we actually process coercions
/// very much like subtyping and don't handle the full coercion
/// logic.
Coerce(CoercePredicate<'tcx>),
/// Constant initializer must evaluate successfully.
ConstEvaluatable(ty::Unevaluated<'tcx, ()>),
/// Constants must be equal. The first component is the const that is expected.
ConstEquate(Const<'tcx>, Const<'tcx>),
/// Represents a type found in the environment that we can use for implied bounds.
///
/// Only used for Chalk.
TypeWellFormedFromEnv(Ty<'tcx>),
}
/// The crate outlives map is computed during typeck and contains the
/// outlives of every item in the local crate. You should not use it
/// directly, because to do so will make your pass dependent on the
/// HIR of every item in the local crate. Instead, use
/// `tcx.inferred_outlives_of()` to get the outlives for a *particular*
/// item.
#[derive(HashStable, Debug)]
pub struct CratePredicatesMap<'tcx> {
/// For each struct with outlive bounds, maps to a vector of the
/// predicate of its outlive bounds. If an item has no outlives
/// bounds, it will have no entry.
pub predicates: FxHashMap<DefId, &'tcx [(Predicate<'tcx>, Span)]>,
}
impl<'tcx> Predicate<'tcx> {
/// Performs a substitution suitable for going from a
/// poly-trait-ref to supertraits that must hold if that
/// poly-trait-ref holds. This is slightly different from a normal
/// substitution in terms of what happens with bound regions. See
/// lengthy comment below for details.
pub fn subst_supertrait(
self,
tcx: TyCtxt<'tcx>,
trait_ref: &ty::PolyTraitRef<'tcx>,
) -> Predicate<'tcx> {
// The interaction between HRTB and supertraits is not entirely
// obvious. Let me walk you (and myself) through an example.
//
// Let's start with an easy case. Consider two traits:
//
// trait Foo<'a>: Bar<'a,'a> { }
// trait Bar<'b,'c> { }
//
// Now, if we have a trait reference `for<'x> T: Foo<'x>`, then
// we can deduce that `for<'x> T: Bar<'x,'x>`. Basically, if we
// knew that `Foo<'x>` (for any 'x) then we also know that
// `Bar<'x,'x>` (for any 'x). This more-or-less falls out from
// normal substitution.
//
// In terms of why this is sound, the idea is that whenever there
// is an impl of `T:Foo<'a>`, it must show that `T:Bar<'a,'a>`
// holds. So if there is an impl of `T:Foo<'a>` that applies to
// all `'a`, then we must know that `T:Bar<'a,'a>` holds for all
// `'a`.
//
// Another example to be careful of is this:
//
// trait Foo1<'a>: for<'b> Bar1<'a,'b> { }
// trait Bar1<'b,'c> { }
//
// Here, if we have `for<'x> T: Foo1<'x>`, then what do we know?
// The answer is that we know `for<'x,'b> T: Bar1<'x,'b>`. The
// reason is similar to the previous example: any impl of
// `T:Foo1<'x>` must show that `for<'b> T: Bar1<'x, 'b>`. So
// basically we would want to collapse the bound lifetimes from
// the input (`trait_ref`) and the supertraits.
//
// To achieve this in practice is fairly straightforward. Let's
// consider the more complicated scenario:
//
// - We start out with `for<'x> T: Foo1<'x>`. In this case, `'x`
// has a De Bruijn index of 1. We want to produce `for<'x,'b> T: Bar1<'x,'b>`,
// where both `'x` and `'b` would have a DB index of 1.
// The substitution from the input trait-ref is therefore going to be
// `'a => 'x` (where `'x` has a DB index of 1).
// - The supertrait-ref is `for<'b> Bar1<'a,'b>`, where `'a` is an
// early-bound parameter and `'b' is a late-bound parameter with a
// DB index of 1.
// - If we replace `'a` with `'x` from the input, it too will have
// a DB index of 1, and thus we'll have `for<'x,'b> Bar1<'x,'b>`
// just as we wanted.
//
// There is only one catch. If we just apply the substitution `'a
// => 'x` to `for<'b> Bar1<'a,'b>`, the substitution code will
// adjust the DB index because we substituting into a binder (it
// tries to be so smart...) resulting in `for<'x> for<'b>
// Bar1<'x,'b>` (we have no syntax for this, so use your
// imagination). Basically the 'x will have DB index of 2 and 'b
// will have DB index of 1. Not quite what we want. So we apply
// the substitution to the *contents* of the trait reference,
// rather than the trait reference itself (put another way, the
// substitution code expects equal binding levels in the values
// from the substitution and the value being substituted into, and
// this trick achieves that).
// Working through the second example:
// trait_ref: for<'x> T: Foo1<'^0.0>; substs: [T, '^0.0]
// predicate: for<'b> Self: Bar1<'a, '^0.0>; substs: [Self, 'a, '^0.0]
// We want to end up with:
// for<'x, 'b> T: Bar1<'^0.0, '^0.1>
// To do this:
// 1) We must shift all bound vars in predicate by the length
// of trait ref's bound vars. So, we would end up with predicate like
// Self: Bar1<'a, '^0.1>
// 2) We can then apply the trait substs to this, ending up with
// T: Bar1<'^0.0, '^0.1>
// 3) Finally, to create the final bound vars, we concatenate the bound
// vars of the trait ref with those of the predicate:
// ['x, 'b]
let bound_pred = self.kind();
let pred_bound_vars = bound_pred.bound_vars();
let trait_bound_vars = trait_ref.bound_vars();
// 1) Self: Bar1<'a, '^0.0> -> Self: Bar1<'a, '^0.1>
let shifted_pred =
tcx.shift_bound_var_indices(trait_bound_vars.len(), bound_pred.skip_binder());
// 2) Self: Bar1<'a, '^0.1> -> T: Bar1<'^0.0, '^0.1>
let new = shifted_pred.subst(tcx, trait_ref.skip_binder().substs);
// 3) ['x] + ['b] -> ['x, 'b]
let bound_vars =
tcx.mk_bound_variable_kinds(trait_bound_vars.iter().chain(pred_bound_vars));
tcx.reuse_or_mk_predicate(self, ty::Binder::bind_with_vars(new, bound_vars))
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct TraitPredicate<'tcx> {
pub trait_ref: TraitRef<'tcx>,
pub constness: BoundConstness,
/// If polarity is Positive: we are proving that the trait is implemented.
///
/// If polarity is Negative: we are proving that a negative impl of this trait
/// exists. (Note that coherence also checks whether negative impls of supertraits
/// exist via a series of predicates.)
///
/// If polarity is Reserved: that's a bug.
pub polarity: ImplPolarity,
}
pub type PolyTraitPredicate<'tcx> = ty::Binder<'tcx, TraitPredicate<'tcx>>;
impl<'tcx> TraitPredicate<'tcx> {
pub fn remap_constness(&mut self, tcx: TyCtxt<'tcx>, param_env: &mut ParamEnv<'tcx>) {
if unlikely!(Some(self.trait_ref.def_id) == tcx.lang_items().drop_trait()) {
// remap without changing constness of this predicate.
// this is because `T: ~const Drop` has a different meaning to `T: Drop`
// FIXME(fee1-dead): remove this logic after beta bump
param_env.remap_constness_with(self.constness)
} else {
*param_env = param_env.with_constness(self.constness.and(param_env.constness()))
}
}
/// Remap the constness of this predicate before emitting it for diagnostics.
pub fn remap_constness_diag(&mut self, param_env: ParamEnv<'tcx>) {
// this is different to `remap_constness` that callees want to print this predicate
// in case of selection errors. `T: ~const Drop` bounds cannot end up here when the
// param_env is not const because we it is always satisfied in non-const contexts.
if let hir::Constness::NotConst = param_env.constness() {
self.constness = ty::BoundConstness::NotConst;
}
}
pub fn def_id(self) -> DefId {
self.trait_ref.def_id
}
pub fn self_ty(self) -> Ty<'tcx> {
self.trait_ref.self_ty()
}
#[inline]
pub fn is_const_if_const(self) -> bool {
self.constness == BoundConstness::ConstIfConst
}
}
impl<'tcx> PolyTraitPredicate<'tcx> {
pub fn def_id(self) -> DefId {
// Ok to skip binder since trait `DefId` does not care about regions.
self.skip_binder().def_id()
}
pub fn self_ty(self) -> ty::Binder<'tcx, Ty<'tcx>> {
self.map_bound(|trait_ref| trait_ref.self_ty())
}
/// Remap the constness of this predicate before emitting it for diagnostics.
pub fn remap_constness_diag(&mut self, param_env: ParamEnv<'tcx>) {
*self = self.map_bound(|mut p| {
p.remap_constness_diag(param_env);
p
});
}
#[inline]
pub fn is_const_if_const(self) -> bool {
self.skip_binder().is_const_if_const()
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct OutlivesPredicate<A, B>(pub A, pub B); // `A: B`
pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate<ty::Region<'tcx>, ty::Region<'tcx>>;
pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate<Ty<'tcx>, ty::Region<'tcx>>;
pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder<'tcx, RegionOutlivesPredicate<'tcx>>;
pub type PolyTypeOutlivesPredicate<'tcx> = ty::Binder<'tcx, TypeOutlivesPredicate<'tcx>>;
/// Encodes that `a` must be a subtype of `b`. The `a_is_expected` flag indicates
/// whether the `a` type is the type that we should label as "expected" when
/// presenting user diagnostics.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct SubtypePredicate<'tcx> {
pub a_is_expected: bool,
pub a: Ty<'tcx>,
pub b: Ty<'tcx>,
}
pub type PolySubtypePredicate<'tcx> = ty::Binder<'tcx, SubtypePredicate<'tcx>>;
/// Encodes that we have to coerce *from* the `a` type to the `b` type.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct CoercePredicate<'tcx> {
pub a: Ty<'tcx>,
pub b: Ty<'tcx>,
}
pub type PolyCoercePredicate<'tcx> = ty::Binder<'tcx, CoercePredicate<'tcx>>;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub enum Term<'tcx> {
Ty(Ty<'tcx>),
Const(Const<'tcx>),
}
impl<'tcx> From<Ty<'tcx>> for Term<'tcx> {
fn from(ty: Ty<'tcx>) -> Self {
Term::Ty(ty)
}
}
impl<'tcx> From<Const<'tcx>> for Term<'tcx> {
fn from(c: Const<'tcx>) -> Self {
Term::Const(c)
}
}
impl<'tcx> Term<'tcx> {
pub fn ty(&self) -> Option<Ty<'tcx>> {
if let Term::Ty(ty) = self { Some(*ty) } else { None }
}
pub fn ct(&self) -> Option<Const<'tcx>> {
if let Term::Const(c) = self { Some(*c) } else { None }
}
}
/// This kind of predicate has no *direct* correspondent in the
/// syntax, but it roughly corresponds to the syntactic forms:
///
/// 1. `T: TraitRef<..., Item = Type>`
/// 2. `<T as TraitRef<...>>::Item == Type` (NYI)
///
/// In particular, form #1 is "desugared" to the combination of a
/// normal trait predicate (`T: TraitRef<...>`) and one of these
/// predicates. Form #2 is a broader form in that it also permits
/// equality between arbitrary types. Processing an instance of
/// Form #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS.
#[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable, TypeFoldable)]
pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>,
pub term: Term<'tcx>,
}
pub type PolyProjectionPredicate<'tcx> = Binder<'tcx, ProjectionPredicate<'tcx>>;
impl<'tcx> PolyProjectionPredicate<'tcx> {
/// Returns the `DefId` of the trait of the associated item being projected.
#[inline]
pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId {
self.skip_binder().projection_ty.trait_def_id(tcx)
}
/// Get the [PolyTraitRef] required for this projection to be well formed.
/// Note that for generic associated types the predicates of the associated
/// type also need to be checked.
#[inline]
pub fn required_poly_trait_ref(&self, tcx: TyCtxt<'tcx>) -> PolyTraitRef<'tcx> {
// Note: unlike with `TraitRef::to_poly_trait_ref()`,
// `self.0.trait_ref` is permitted to have escaping regions.
// This is because here `self` has a `Binder` and so does our
// return value, so we are preserving the number of binding
// levels.
self.map_bound(|predicate| predicate.projection_ty.trait_ref(tcx))
}
pub fn term(&self) -> Binder<'tcx, Term<'tcx>> {
self.map_bound(|predicate| predicate.term)
}
/// The `DefId` of the `TraitItem` for the associated type.
///
/// Note that this is not the `DefId` of the `TraitRef` containing this
/// associated type, which is in `tcx.associated_item(projection_def_id()).container`.
pub fn projection_def_id(&self) -> DefId {
// Ok to skip binder since trait `DefId` does not care about regions.
self.skip_binder().projection_ty.item_def_id
}
}
pub trait ToPolyTraitRef<'tcx> {
fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx>;
}
impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> {
fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> {
self.map_bound_ref(|trait_pred| trait_pred.trait_ref)
}
}
pub trait ToPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx>;
}
impl<'tcx> ToPredicate<'tcx> for Binder<'tcx, PredicateKind<'tcx>> {
#[inline(always)]
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
tcx.mk_predicate(self)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyTraitPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(PredicateKind::Trait).to_predicate(tcx)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(PredicateKind::RegionOutlives).to_predicate(tcx)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(PredicateKind::TypeOutlives).to_predicate(tcx)
}
}
impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
self.map_bound(PredicateKind::Projection).to_predicate(tcx)
}
}
impl<'tcx> Predicate<'tcx> {
pub fn to_opt_poly_trait_pred(self) -> Option<PolyTraitPredicate<'tcx>> {
let predicate = self.kind();
match predicate.skip_binder() {
PredicateKind::Trait(t) => Some(predicate.rebind(t)),
PredicateKind::Projection(..)
| PredicateKind::Subtype(..)
| PredicateKind::Coerce(..)
| PredicateKind::RegionOutlives(..)
| PredicateKind::WellFormed(..)
| PredicateKind::ObjectSafe(..)
| PredicateKind::ClosureKind(..)
| PredicateKind::TypeOutlives(..)
| PredicateKind::ConstEvaluatable(..)
| PredicateKind::ConstEquate(..)
| PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
pub fn to_opt_type_outlives(self) -> Option<PolyTypeOutlivesPredicate<'tcx>> {
let predicate = self.kind();
match predicate.skip_binder() {
PredicateKind::TypeOutlives(data) => Some(predicate.rebind(data)),
PredicateKind::Trait(..)
| PredicateKind::Projection(..)
| PredicateKind::Subtype(..)
| PredicateKind::Coerce(..)
| PredicateKind::RegionOutlives(..)
| PredicateKind::WellFormed(..)
| PredicateKind::ObjectSafe(..)
| PredicateKind::ClosureKind(..)
| PredicateKind::ConstEvaluatable(..)
| PredicateKind::ConstEquate(..)
| PredicateKind::TypeWellFormedFromEnv(..) => None,
}
}
}
/// Represents the bounds declared on a particular set of type
/// parameters. Should eventually be generalized into a flag list of
/// where-clauses. You can obtain an `InstantiatedPredicates` list from a
/// `GenericPredicates` by using the `instantiate` method. Note that this method
/// reflects an important semantic invariant of `InstantiatedPredicates`: while
/// the `GenericPredicates` are expressed in terms of the bound type
/// parameters of the impl/trait/whatever, an `InstantiatedPredicates` instance
/// represented a set of bounds for some particular instantiation,
/// meaning that the generic parameters have been substituted with
/// their values.
///
/// Example:
///
/// struct Foo<T, U: Bar<T>> { ... }
///
/// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like
/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
/// like `Foo<isize,usize>`, then the `InstantiatedPredicates` would be `[[],
/// [usize:Bar<isize>]]`.
#[derive(Clone, Debug, TypeFoldable)]
pub struct InstantiatedPredicates<'tcx> {
pub predicates: Vec<Predicate<'tcx>>,
pub spans: Vec<Span>,
}
impl<'tcx> InstantiatedPredicates<'tcx> {
pub fn empty() -> InstantiatedPredicates<'tcx> {
InstantiatedPredicates { predicates: vec![], spans: vec![] }
}
pub fn is_empty(&self) -> bool {
self.predicates.is_empty()
}
}
#[derive(
Copy,
Clone,
Debug,
PartialEq,
Eq,
HashStable,
TyEncodable,
TyDecodable,
TypeFoldable,
Lift
)]
pub struct OpaqueTypeKey<'tcx> {
pub def_id: DefId,
pub substs: SubstsRef<'tcx>,
}
#[derive(Copy, Clone, Debug, TypeFoldable, HashStable, TyEncodable, TyDecodable)]
pub struct OpaqueHiddenType<'tcx> {
/// The span of this particular definition of the opaque type. So
/// for example:
///
/// ```ignore (incomplete snippet)
/// type Foo = impl Baz;
/// fn bar() -> Foo {
/// // ^^^ This is the span we are looking for!
/// }
/// ```
///
/// In cases where the fn returns `(impl Trait, impl Trait)` or
/// other such combinations, the result is currently
/// over-approximated, but better than nothing.
pub span: Span,
/// The type variable that represents the value of the opaque type
/// that we require. In other words, after we compile this function,
/// we will be created a constraint like:
///
/// Foo<'a, T> = ?C
///
/// where `?C` is the value of this type variable. =) It may
/// naturally refer to the type and lifetime parameters in scope
/// in this function, though ultimately it should only reference
/// those that are arguments to `Foo` in the constraint above. (In
/// other words, `?C` should not include `'b`, even though it's a
/// lifetime parameter on `foo`.)
pub ty: Ty<'tcx>,
}
impl<'tcx> OpaqueHiddenType<'tcx> {
pub fn report_mismatch(&self, other: &Self, tcx: TyCtxt<'tcx>) {
// Found different concrete types for the opaque type.
let mut err = tcx.sess.struct_span_err(
other.span,
"concrete type differs from previous defining opaque type use",
);
err.span_label(other.span, format!("expected `{}`, got `{}`", self.ty, other.ty));
if self.span == other.span {
err.span_label(
self.span,
"this expression supplies two conflicting concrete types for the same opaque type",
);
} else {
err.span_note(self.span, "previous use here");
}
err.emit();
}
}
rustc_index::newtype_index! {
/// "Universes" are used during type- and trait-checking in the
/// presence of `for<..>` binders to control what sets of names are
/// visible. Universes are arranged into a tree: the root universe
/// contains names that are always visible. Each child then adds a new
/// set of names that are visible, in addition to those of its parent.
/// We say that the child universe "extends" the parent universe with
/// new names.
///
/// To make this more concrete, consider this program:
///
/// ```
/// struct Foo { }
/// fn bar<T>(x: T) {
/// let y: for<'a> fn(&'a u8, Foo) = ...;
/// }
/// ```
///
/// The struct name `Foo` is in the root universe U0. But the type
/// parameter `T`, introduced on `bar`, is in an extended universe U1
/// -- i.e., within `bar`, we can name both `T` and `Foo`, but outside
/// of `bar`, we cannot name `T`. Then, within the type of `y`, the
/// region `'a` is in a universe U2 that extends U1, because we can
/// name it inside the fn type but not outside.
///
/// Universes are used to do type- and trait-checking around these
/// "forall" binders (also called **universal quantification**). The
/// idea is that when, in the body of `bar`, we refer to `T` as a
/// type, we aren't referring to any type in particular, but rather a
/// kind of "fresh" type that is distinct from all other types we have
/// actually declared. This is called a **placeholder** type, and we
/// use universes to talk about this. In other words, a type name in
/// universe 0 always corresponds to some "ground" type that the user
/// declared, but a type name in a non-zero universe is a placeholder
/// type -- an idealized representative of "types in general" that we
/// use for checking generic functions.
pub struct UniverseIndex {
derive [HashStable]
DEBUG_FORMAT = "U{}",
}
}
impl UniverseIndex {
pub const ROOT: UniverseIndex = UniverseIndex::from_u32(0);
/// Returns the "next" universe index in order -- this new index
/// is considered to extend all previous universes. This
/// corresponds to entering a `forall` quantifier. So, for
/// example, suppose we have this type in universe `U`:
///
/// ```
/// for<'a> fn(&'a u32)
/// ```
///
/// Once we "enter" into this `for<'a>` quantifier, we are in a
/// new universe that extends `U` -- in this new universe, we can
/// name the region `'a`, but that region was not nameable from
/// `U` because it was not in scope there.
pub fn next_universe(self) -> UniverseIndex {
UniverseIndex::from_u32(self.private.checked_add(1).unwrap())
}
/// Returns `true` if `self` can name a name from `other` -- in other words,
/// if the set of names in `self` is a superset of those in
/// `other` (`self >= other`).
pub fn can_name(self, other: UniverseIndex) -> bool {
self.private >= other.private
}
/// Returns `true` if `self` cannot name some names from `other` -- in other
/// words, if the set of names in `self` is a strict subset of
/// those in `other` (`self < other`).
pub fn cannot_name(self, other: UniverseIndex) -> bool {
self.private < other.private
}
}
/// The "placeholder index" fully defines a placeholder region, type, or const. Placeholders are
/// identified by both a universe, as well as a name residing within that universe. Distinct bound
/// regions/types/consts within the same universe simply have an unknown relationship to one
/// another.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable, PartialOrd, Ord)]
pub struct Placeholder<T> {
pub universe: UniverseIndex,
pub name: T,
}
impl<'a, T> HashStable<StableHashingContext<'a>> for Placeholder<T>
where
T: HashStable<StableHashingContext<'a>>,
{
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
self.universe.hash_stable(hcx, hasher);
self.name.hash_stable(hcx, hasher);
}
}
pub type PlaceholderRegion = Placeholder<BoundRegionKind>;
pub type PlaceholderType = Placeholder<BoundVar>;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable)]
#[derive(TyEncodable, TyDecodable, PartialOrd, Ord)]
pub struct BoundConst<'tcx> {
pub var: BoundVar,
pub ty: Ty<'tcx>,
}
pub type PlaceholderConst<'tcx> = Placeholder<BoundConst<'tcx>>;
/// A `DefId` which, in case it is a const argument, is potentially bundled with
/// the `DefId` of the generic parameter it instantiates.
///
/// This is used to avoid calls to `type_of` for const arguments during typeck
/// which cause cycle errors.
///
/// ```rust
/// struct A;
/// impl A {
/// fn foo<const N: usize>(&self) -> [u8; N] { [0; N] }
/// // ^ const parameter
/// }
/// struct B;
/// impl B {
/// fn foo<const M: u8>(&self) -> usize { 42 }
/// // ^ const parameter
/// }
///
/// fn main() {
/// let a = A;
/// let _b = a.foo::<{ 3 + 7 }>();
/// // ^^^^^^^^^ const argument
/// }
/// ```
///
/// Let's look at the call `a.foo::<{ 3 + 7 }>()` here. We do not know
/// which `foo` is used until we know the type of `a`.
///
/// We only know the type of `a` once we are inside of `typeck(main)`.
/// We also end up normalizing the type of `_b` during `typeck(main)` which
/// requires us to evaluate the const argument.
///
/// To evaluate that const argument we need to know its type,
/// which we would get using `type_of(const_arg)`. This requires us to
/// resolve `foo` as it can be either `usize` or `u8` in this example.
/// However, resolving `foo` once again requires `typeck(main)` to get the type of `a`,
/// which results in a cycle.
///
/// In short we must not call `type_of(const_arg)` during `typeck(main)`.
///
/// When first creating the `ty::Const` of the const argument inside of `typeck` we have
/// already resolved `foo` so we know which const parameter this argument instantiates.
/// This means that we also know the expected result of `type_of(const_arg)` even if we
/// aren't allowed to call that query: it is equal to `type_of(const_param)` which is
/// trivial to compute.
///
/// If we now want to use that constant in a place which potentially needs its type
/// we also pass the type of its `const_param`. This is the point of `WithOptConstParam`,
/// except that instead of a `Ty` we bundle the `DefId` of the const parameter.
/// Meaning that we need to use `type_of(const_param_did)` if `const_param_did` is `Some`
/// to get the type of `did`.
#[derive(Copy, Clone, Debug, TypeFoldable, Lift, TyEncodable, TyDecodable)]
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[derive(Hash, HashStable)]
pub struct WithOptConstParam<T> {
pub did: T,
/// The `DefId` of the corresponding generic parameter in case `did` is
/// a const argument.
///
/// Note that even if `did` is a const argument, this may still be `None`.
/// All queries taking `WithOptConstParam` start by calling `tcx.opt_const_param_of(def.did)`
/// to potentially update `param_did` in the case it is `None`.
pub const_param_did: Option<DefId>,
}
impl<T> WithOptConstParam<T> {
/// Creates a new `WithOptConstParam` setting `const_param_did` to `None`.
#[inline(always)]
pub fn unknown(did: T) -> WithOptConstParam<T> {
WithOptConstParam { did, const_param_did: None }
}
}
impl WithOptConstParam<LocalDefId> {
/// Returns `Some((did, param_did))` if `def_id` is a const argument,
/// `None` otherwise.
#[inline(always)]
pub fn try_lookup(did: LocalDefId, tcx: TyCtxt<'_>) -> Option<(LocalDefId, DefId)> {
tcx.opt_const_param_of(did).map(|param_did| (did, param_did))
}
/// In case `self` is unknown but `self.did` is a const argument, this returns
/// a `WithOptConstParam` with the correct `const_param_did`.
#[inline(always)]
pub fn try_upgrade(self, tcx: TyCtxt<'_>) -> Option<WithOptConstParam<LocalDefId>> {
if self.const_param_did.is_none() {
if let const_param_did @ Some(_) = tcx.opt_const_param_of(self.did) {
return Some(WithOptConstParam { did: self.did, const_param_did });
}
}
None
}
pub fn to_global(self) -> WithOptConstParam<DefId> {
WithOptConstParam { did: self.did.to_def_id(), const_param_did: self.const_param_did }
}
pub fn def_id_for_type_of(self) -> DefId {
if let Some(did) = self.const_param_did { did } else { self.did.to_def_id() }
}
}
impl WithOptConstParam<DefId> {
pub fn as_local(self) -> Option<WithOptConstParam<LocalDefId>> {
self.did
.as_local()
.map(|did| WithOptConstParam { did, const_param_did: self.const_param_did })
}
pub fn as_const_arg(self) -> Option<(LocalDefId, DefId)> {
if let Some(param_did) = self.const_param_did {
if let Some(did) = self.did.as_local() {
return Some((did, param_did));
}
}
None
}
pub fn is_local(self) -> bool {
self.did.is_local()
}
pub fn def_id_for_type_of(self) -> DefId {
self.const_param_did.unwrap_or(self.did)
}
}
/// When type checking, we use the `ParamEnv` to track
/// details about the set of where-clauses that are in scope at this
/// particular point.
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub struct ParamEnv<'tcx> {
/// This packs both caller bounds and the reveal enum into one pointer.
///
/// Caller bounds are `Obligation`s that the caller must satisfy. This is
/// basically the set of bounds on the in-scope type parameters, translated
/// into `Obligation`s, and elaborated and normalized.
///
/// Use the `caller_bounds()` method to access.
///
/// Typically, this is `Reveal::UserFacing`, but during codegen we
/// want `Reveal::All`.
///
/// Note: This is packed, use the reveal() method to access it.
packed: CopyTaggedPtr<&'tcx List<Predicate<'tcx>>, ParamTag, true>,
}
#[derive(Copy, Clone)]
struct ParamTag {
reveal: traits::Reveal,
constness: hir::Constness,
}
unsafe impl rustc_data_structures::tagged_ptr::Tag for ParamTag {
const BITS: usize = 2;
#[inline]
fn into_usize(self) -> usize {
match self {
Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::NotConst } => 0,
Self { reveal: traits::Reveal::All, constness: hir::Constness::NotConst } => 1,
Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::Const } => 2,
Self { reveal: traits::Reveal::All, constness: hir::Constness::Const } => 3,
}
}
#[inline]
unsafe fn from_usize(ptr: usize) -> Self {
match ptr {
0 => Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::NotConst },
1 => Self { reveal: traits::Reveal::All, constness: hir::Constness::NotConst },
2 => Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::Const },
3 => Self { reveal: traits::Reveal::All, constness: hir::Constness::Const },
_ => std::hint::unreachable_unchecked(),
}
}
}
impl<'tcx> fmt::Debug for ParamEnv<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ParamEnv")
.field("caller_bounds", &self.caller_bounds())
.field("reveal", &self.reveal())
.field("constness", &self.constness())
.finish()
}
}
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ParamEnv<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
self.caller_bounds().hash_stable(hcx, hasher);
self.reveal().hash_stable(hcx, hasher);
self.constness().hash_stable(hcx, hasher);
}
}
impl<'tcx> TypeFoldable<'tcx> for ParamEnv<'tcx> {
fn try_super_fold_with<F: ty::fold::FallibleTypeFolder<'tcx>>(
self,
folder: &mut F,
) -> Result<Self, F::Error> {
Ok(ParamEnv::new(
self.caller_bounds().try_fold_with(folder)?,
self.reveal().try_fold_with(folder)?,
self.constness().try_fold_with(folder)?,
))
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> ControlFlow<V::BreakTy> {
self.caller_bounds().visit_with(visitor)?;
self.reveal().visit_with(visitor)?;
self.constness().visit_with(visitor)
}
}
impl<'tcx> ParamEnv<'tcx> {
/// Construct a trait environment suitable for contexts where
/// there are no where-clauses in scope. Hidden types (like `impl
/// Trait`) are left hidden, so this is suitable for ordinary
/// type-checking.
#[inline]
pub fn empty() -> Self {
Self::new(List::empty(), Reveal::UserFacing, hir::Constness::NotConst)
}
#[inline]
pub fn caller_bounds(self) -> &'tcx List<Predicate<'tcx>> {
self.packed.pointer()
}
#[inline]
pub fn reveal(self) -> traits::Reveal {
self.packed.tag().reveal
}
#[inline]
pub fn constness(self) -> hir::Constness {
self.packed.tag().constness
}
#[inline]
pub fn is_const(self) -> bool {
self.packed.tag().constness == hir::Constness::Const
}
/// Construct a trait environment with no where-clauses in scope
/// where the values of all `impl Trait` and other hidden types
/// are revealed. This is suitable for monomorphized, post-typeck
/// environments like codegen or doing optimizations.
///
/// N.B., if you want to have predicates in scope, use `ParamEnv::new`,
/// or invoke `param_env.with_reveal_all()`.
#[inline]
pub fn reveal_all() -> Self {
Self::new(List::empty(), Reveal::All, hir::Constness::NotConst)
}
/// Construct a trait environment with the given set of predicates.
#[inline]
pub fn new(
caller_bounds: &'tcx List<Predicate<'tcx>>,
reveal: Reveal,
constness: hir::Constness,
) -> Self {
ty::ParamEnv { packed: CopyTaggedPtr::new(caller_bounds, ParamTag { reveal, constness }) }
}
pub fn with_user_facing(mut self) -> Self {
self.packed.set_tag(ParamTag { reveal: Reveal::UserFacing, ..self.packed.tag() });
self
}
#[inline]
pub fn with_constness(mut self, constness: hir::Constness) -> Self {
self.packed.set_tag(ParamTag { constness, ..self.packed.tag() });
self
}
#[inline]
pub fn with_const(mut self) -> Self {
self.packed.set_tag(ParamTag { constness: hir::Constness::Const, ..self.packed.tag() });
self
}
#[inline]
pub fn without_const(mut self) -> Self {
self.packed.set_tag(ParamTag { constness: hir::Constness::NotConst, ..self.packed.tag() });
self
}
#[inline]
pub fn remap_constness_with(&mut self, mut constness: ty::BoundConstness) {
*self = self.with_constness(constness.and(self.constness()))
}
/// Returns a new parameter environment with the same clauses, but
/// which "reveals" the true results of projections in all cases
/// (even for associated types that are specializable). This is
/// the desired behavior during codegen and certain other special
/// contexts; normally though we want to use `Reveal::UserFacing`,
/// which is the default.
/// All opaque types in the caller_bounds of the `ParamEnv`
/// will be normalized to their underlying types.
/// See PR #65989 and issue #65918 for more details
pub fn with_reveal_all_normalized(self, tcx: TyCtxt<'tcx>) -> Self {
if self.packed.tag().reveal == traits::Reveal::All {
return self;
}
ParamEnv::new(
tcx.normalize_opaque_types(self.caller_bounds()),
Reveal::All,
self.constness(),
)
}
/// Returns this same environment but with no caller bounds.
#[inline]
pub fn without_caller_bounds(self) -> Self {
Self::new(List::empty(), self.reveal(), self.constness())
}
/// Creates a suitable environment in which to perform trait
/// queries on the given value. When type-checking, this is simply
/// the pair of the environment plus value. But when reveal is set to
/// All, then if `value` does not reference any type parameters, we will
/// pair it with the empty environment. This improves caching and is generally
/// invisible.
///
/// N.B., we preserve the environment when type-checking because it
/// is possible for the user to have wacky where-clauses like
/// `where Box<u32>: Copy`, which are clearly never
/// satisfiable. We generally want to behave as if they were true,
/// although the surrounding function is never reachable.
pub fn and<T: TypeFoldable<'tcx>>(self, value: T) -> ParamEnvAnd<'tcx, T> {
match self.reveal() {
Reveal::UserFacing => ParamEnvAnd { param_env: self, value },
Reveal::All => {
if value.is_global() {
ParamEnvAnd { param_env: self.without_caller_bounds(), value }
} else {
ParamEnvAnd { param_env: self, value }
}
}
}
}
}
// FIXME(ecstaticmorse): Audit all occurrences of `without_const().to_predicate(tcx)` to ensure that
// the constness of trait bounds is being propagated correctly.
impl<'tcx> PolyTraitRef<'tcx> {
#[inline]
pub fn with_constness(self, constness: BoundConstness) -> PolyTraitPredicate<'tcx> {
self.map_bound(|trait_ref| ty::TraitPredicate {
trait_ref,
constness,
polarity: ty::ImplPolarity::Positive,
})
}
#[inline]
pub fn without_const(self) -> PolyTraitPredicate<'tcx> {
self.with_constness(BoundConstness::NotConst)
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TypeFoldable)]
pub struct ParamEnvAnd<'tcx, T> {
pub param_env: ParamEnv<'tcx>,
pub value: T,
}
impl<'tcx, T> ParamEnvAnd<'tcx, T> {
pub fn into_parts(self) -> (ParamEnv<'tcx>, T) {
(self.param_env, self.value)
}
#[inline]
pub fn without_const(mut self) -> Self {
self.param_env = self.param_env.without_const();
self
}
}
impl<'a, 'tcx, T> HashStable<StableHashingContext<'a>> for ParamEnvAnd<'tcx, T>
where
T: HashStable<StableHashingContext<'a>>,
{
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let ParamEnvAnd { ref param_env, ref value } = *self;
param_env.hash_stable(hcx, hasher);
value.hash_stable(hcx, hasher);
}
}
#[derive(Copy, Clone, Debug, HashStable)]
pub struct Destructor {
/// The `DefId` of the destructor method
pub did: DefId,
/// The constness of the destructor method
pub constness: hir::Constness,
}
bitflags! {
#[derive(HashStable, TyEncodable, TyDecodable)]
pub struct VariantFlags: u32 {
const NO_VARIANT_FLAGS = 0;
/// Indicates whether the field list of this variant is `#[non_exhaustive]`.
const IS_FIELD_LIST_NON_EXHAUSTIVE = 1 << 0;
/// Indicates whether this variant was obtained as part of recovering from
/// a syntactic error. May be incomplete or bogus.
const IS_RECOVERED = 1 << 1;
}
}
/// Definition of a variant -- a struct's fields or an enum variant.
#[derive(Debug, HashStable, TyEncodable, TyDecodable)]
pub struct VariantDef {
/// `DefId` that identifies the variant itself.
/// If this variant belongs to a struct or union, then this is a copy of its `DefId`.
pub def_id: DefId,
/// `DefId` that identifies the variant's constructor.
/// If this variant is a struct variant, then this is `None`.
pub ctor_def_id: Option<DefId>,
/// Variant or struct name.
pub name: Symbol,
/// Discriminant of this variant.
pub discr: VariantDiscr,
/// Fields of this variant.
pub fields: Vec<FieldDef>,
/// Type of constructor of variant.
pub ctor_kind: CtorKind,
/// Flags of the variant (e.g. is field list non-exhaustive)?
flags: VariantFlags,
}
impl VariantDef {
/// Creates a new `VariantDef`.
///
/// `variant_did` is the `DefId` that identifies the enum variant (if this `VariantDef`
/// represents an enum variant).
///
/// `ctor_did` is the `DefId` that identifies the constructor of unit or
/// tuple-variants/structs. If this is a `struct`-variant then this should be `None`.
///
/// `parent_did` is the `DefId` of the `AdtDef` representing the enum or struct that
/// owns this variant. It is used for checking if a struct has `#[non_exhaustive]` w/out having
/// to go through the redirect of checking the ctor's attributes - but compiling a small crate
/// requires loading the `AdtDef`s for all the structs in the universe (e.g., coherence for any
/// built-in trait), and we do not want to load attributes twice.
///
/// If someone speeds up attribute loading to not be a performance concern, they can
/// remove this hack and use the constructor `DefId` everywhere.
pub fn new(
name: Symbol,
variant_did: Option<DefId>,
ctor_def_id: Option<DefId>,
discr: VariantDiscr,
fields: Vec<FieldDef>,
ctor_kind: CtorKind,
adt_kind: AdtKind,
parent_did: DefId,
recovered: bool,
is_field_list_non_exhaustive: bool,
) -> Self {
debug!(
"VariantDef::new(name = {:?}, variant_did = {:?}, ctor_def_id = {:?}, discr = {:?},
fields = {:?}, ctor_kind = {:?}, adt_kind = {:?}, parent_did = {:?})",
name, variant_did, ctor_def_id, discr, fields, ctor_kind, adt_kind, parent_did,
);
let mut flags = VariantFlags::NO_VARIANT_FLAGS;
if is_field_list_non_exhaustive {
flags |= VariantFlags::IS_FIELD_LIST_NON_EXHAUSTIVE;
}
if recovered {
flags |= VariantFlags::IS_RECOVERED;
}
VariantDef {
def_id: variant_did.unwrap_or(parent_did),
ctor_def_id,
name,
discr,
fields,
ctor_kind,
flags,
}
}
/// Is this field list non-exhaustive?
#[inline]
pub fn is_field_list_non_exhaustive(&self) -> bool {
self.flags.intersects(VariantFlags::IS_FIELD_LIST_NON_EXHAUSTIVE)
}
/// Was this variant obtained as part of recovering from a syntactic error?
#[inline]
pub fn is_recovered(&self) -> bool {
self.flags.intersects(VariantFlags::IS_RECOVERED)
}
/// Computes the `Ident` of this variant by looking up the `Span`
pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident {
Ident::new(self.name, tcx.def_ident_span(self.def_id).unwrap())
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, HashStable)]
pub enum VariantDiscr {
/// Explicit value for this variant, i.e., `X = 123`.
/// The `DefId` corresponds to the embedded constant.
Explicit(DefId),
/// The previous variant's discriminant plus one.
/// For efficiency reasons, the distance from the
/// last `Explicit` discriminant is being stored,
/// or `0` for the first variant, if it has none.
Relative(u32),
}
#[derive(Debug, HashStable, TyEncodable, TyDecodable)]
pub struct FieldDef {
pub did: DefId,
pub name: Symbol,
pub vis: Visibility,
}
bitflags! {
#[derive(TyEncodable, TyDecodable, Default, HashStable)]
pub struct ReprFlags: u8 {
const IS_C = 1 << 0;
const IS_SIMD = 1 << 1;
const IS_TRANSPARENT = 1 << 2;
// Internal only for now. If true, don't reorder fields.
const IS_LINEAR = 1 << 3;
// If true, don't expose any niche to type's context.
const HIDE_NICHE = 1 << 4;
// If true, the type's layout can be randomized using
// the seed stored in `ReprOptions.layout_seed`
const RANDOMIZE_LAYOUT = 1 << 5;
// Any of these flags being set prevent field reordering optimisation.
const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits
| ReprFlags::IS_SIMD.bits
| ReprFlags::IS_LINEAR.bits;
}
}
/// Represents the repr options provided by the user,
#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Default, HashStable)]
pub struct ReprOptions {
pub int: Option<attr::IntType>,
pub align: Option<Align>,
pub pack: Option<Align>,
pub flags: ReprFlags,
/// The seed to be used for randomizing a type's layout
///
/// Note: This could technically be a `[u8; 16]` (a `u128`) which would
/// be the "most accurate" hash as it'd encompass the item and crate
/// hash without loss, but it does pay the price of being larger.
/// Everything's a tradeoff, a `u64` seed should be sufficient for our
/// purposes (primarily `-Z randomize-layout`)
pub field_shuffle_seed: u64,
}
impl ReprOptions {
pub fn new(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions {
let mut flags = ReprFlags::empty();
let mut size = None;
let mut max_align: Option<Align> = None;
let mut min_pack: Option<Align> = None;
// Generate a deterministically-derived seed from the item's path hash
// to allow for cross-crate compilation to actually work
let mut field_shuffle_seed = tcx.def_path_hash(did).0.to_smaller_hash();
// If the user defined a custom seed for layout randomization, xor the item's
// path hash with the user defined seed, this will allowing determinism while
// still allowing users to further randomize layout generation for e.g. fuzzing
if let Some(user_seed) = tcx.sess.opts.debugging_opts.layout_seed {
field_shuffle_seed ^= user_seed;
}
for attr in tcx.get_attrs(did).iter() {
for r in attr::find_repr_attrs(&tcx.sess, attr) {
flags.insert(match r {
attr::ReprC => ReprFlags::IS_C,
attr::ReprPacked(pack) => {
let pack = Align::from_bytes(pack as u64).unwrap();
min_pack = Some(if let Some(min_pack) = min_pack {
min_pack.min(pack)
} else {
pack
});
ReprFlags::empty()
}
attr::ReprTransparent => ReprFlags::IS_TRANSPARENT,
attr::ReprNoNiche => ReprFlags::HIDE_NICHE,
attr::ReprSimd => ReprFlags::IS_SIMD,
attr::ReprInt(i) => {
size = Some(i);
ReprFlags::empty()
}
attr::ReprAlign(align) => {
max_align = max_align.max(Some(Align::from_bytes(align as u64).unwrap()));
ReprFlags::empty()
}
});
}
}
// If `-Z randomize-layout` was enabled for the type definition then we can
// consider performing layout randomization
if tcx.sess.opts.debugging_opts.randomize_layout {
flags.insert(ReprFlags::RANDOMIZE_LAYOUT);
}
// This is here instead of layout because the choice must make it into metadata.
if !tcx.consider_optimizing(|| format!("Reorder fields of {:?}", tcx.def_path_str(did))) {
flags.insert(ReprFlags::IS_LINEAR);
}
Self { int: size, align: max_align, pack: min_pack, flags, field_shuffle_seed }
}
#[inline]
pub fn simd(&self) -> bool {
self.flags.contains(ReprFlags::IS_SIMD)
}
#[inline]
pub fn c(&self) -> bool {
self.flags.contains(ReprFlags::IS_C)
}
#[inline]
pub fn packed(&self) -> bool {
self.pack.is_some()
}
#[inline]
pub fn transparent(&self) -> bool {
self.flags.contains(ReprFlags::IS_TRANSPARENT)
}
#[inline]
pub fn linear(&self) -> bool {
self.flags.contains(ReprFlags::IS_LINEAR)
}
#[inline]
pub fn hide_niche(&self) -> bool {
self.flags.contains(ReprFlags::HIDE_NICHE)
}
/// Returns the discriminant type, given these `repr` options.
/// This must only be called on enums!
pub fn discr_type(&self) -> attr::IntType {
self.int.unwrap_or(attr::SignedInt(ast::IntTy::Isize))
}
/// Returns `true` if this `#[repr()]` should inhabit "smart enum
/// layout" optimizations, such as representing `Foo<&T>` as a
/// single pointer.
pub fn inhibit_enum_layout_opt(&self) -> bool {
self.c() || self.int.is_some()
}
/// Returns `true` if this `#[repr()]` should inhibit struct field reordering
/// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
if let Some(pack) = self.pack {
if pack.bytes() == 1 {
return true;
}
}
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
}
/// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
/// was enabled for its declaration crate
pub fn can_randomize_type_layout(&self) -> bool {
!self.inhibit_struct_field_reordering_opt()
&& self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
}
/// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
pub fn inhibit_union_abi_opt(&self) -> bool {
self.c()
}
}
impl<'tcx> FieldDef {
/// Returns the type of this field. The resulting type is not normalized. The `subst` is
/// typically obtained via the second field of [`TyKind::Adt`].
pub fn ty(&self, tcx: TyCtxt<'tcx>, subst: SubstsRef<'tcx>) -> Ty<'tcx> {
tcx.type_of(self.did).subst(tcx, subst)
}
/// Computes the `Ident` of this variant by looking up the `Span`
pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident {
Ident::new(self.name, tcx.def_ident_span(self.did).unwrap())
}
}
pub type Attributes<'tcx> = &'tcx [ast::Attribute];
#[derive(Debug, PartialEq, Eq)]
pub enum ImplOverlapKind {
/// These impls are always allowed to overlap.
Permitted {
/// Whether or not the impl is permitted due to the trait being a `#[marker]` trait
marker: bool,
},
/// These impls are allowed to overlap, but that raises
/// an issue #33140 future-compatibility warning.
///
/// Some background: in Rust 1.0, the trait-object types `Send + Sync` (today's
/// `dyn Send + Sync`) and `Sync + Send` (now `dyn Sync + Send`) were different.
///
/// The widely-used version 0.1.0 of the crate `traitobject` had accidentally relied
/// that difference, making what reduces to the following set of impls:
///
/// ```
/// trait Trait {}
/// impl Trait for dyn Send + Sync {}
/// impl Trait for dyn Sync + Send {}
/// ```
///
/// Obviously, once we made these types be identical, that code causes a coherence
/// error and a fairly big headache for us. However, luckily for us, the trait
/// `Trait` used in this case is basically a marker trait, and therefore having
/// overlapping impls for it is sound.
///
/// To handle this, we basically regard the trait as a marker trait, with an additional
/// future-compatibility warning. To avoid accidentally "stabilizing" this feature,
/// it has the following restrictions:
///
/// 1. The trait must indeed be a marker-like trait (i.e., no items), and must be
/// positive impls.
/// 2. The trait-ref of both impls must be equal.
/// 3. The trait-ref of both impls must be a trait object type consisting only of
/// marker traits.
/// 4. Neither of the impls can have any where-clauses.
///
/// Once `traitobject` 0.1.0 is no longer an active concern, this hack can be removed.
Issue33140,
}
impl<'tcx> TyCtxt<'tcx> {
pub fn typeck_body(self, body: hir::BodyId) -> &'tcx TypeckResults<'tcx> {
self.typeck(self.hir().body_owner_def_id(body))
}
pub fn provided_trait_methods(self, id: DefId) -> impl 'tcx + Iterator<Item = &'tcx AssocItem> {
self.associated_items(id)
.in_definition_order()
.filter(|item| item.kind == AssocKind::Fn && item.defaultness.has_value())
}
fn opt_item_name(self, def_id: DefId) -> Option<Symbol> {
if let Some(cnum) = def_id.as_crate_root() {
Some(self.crate_name(cnum))
} else {
let def_key = self.def_key(def_id);
match def_key.disambiguated_data.data {
// The name of a constructor is that of its parent.
rustc_hir::definitions::DefPathData::Ctor => self
.opt_item_name(DefId { krate: def_id.krate, index: def_key.parent.unwrap() }),
// The name of opaque types only exists in HIR.
rustc_hir::definitions::DefPathData::ImplTrait
if let Some(def_id) = def_id.as_local() =>
self.hir().opt_name(self.hir().local_def_id_to_hir_id(def_id)),
_ => def_key.get_opt_name(),
}
}
}
/// Look up the name of a definition across crates. This does not look at HIR.
///
/// When possible, this function should be used for cross-crate lookups over
/// [`opt_item_name`] to avoid invalidating the incremental cache. If you
/// need to handle items without a name, or HIR items that will not be
/// serialized cross-crate, or if you need the span of the item, use
/// [`opt_item_name`] instead.
///
/// [`opt_item_name`]: Self::opt_item_name
pub fn item_name(self, id: DefId) -> Symbol {
// Look at cross-crate items first to avoid invalidating the incremental cache
// unless we have to.
self.opt_item_name(id).unwrap_or_else(|| {
bug!("item_name: no name for {:?}", self.def_path(id));
})
}
/// Look up the name and span of a definition.
///
/// See [`item_name`][Self::item_name] for more information.
pub fn opt_item_ident(self, def_id: DefId) -> Option<Ident> {
let def = self.opt_item_name(def_id)?;
let span = def_id
.as_local()
.and_then(|id| self.def_ident_span(id))
.unwrap_or(rustc_span::DUMMY_SP);
Some(Ident::new(def, span))
}
pub fn opt_associated_item(self, def_id: DefId) -> Option<&'tcx AssocItem> {
if let DefKind::AssocConst | DefKind::AssocFn | DefKind::AssocTy = self.def_kind(def_id) {
Some(self.associated_item(def_id))
} else {
None
}
}
pub fn field_index(self, hir_id: hir::HirId, typeck_results: &TypeckResults<'_>) -> usize {
typeck_results.field_indices().get(hir_id).cloned().expect("no index for a field")
}
pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option<usize> {
variant
.fields
.iter()
.position(|field| self.hygienic_eq(ident, field.ident(self), variant.def_id))
}
/// Returns `true` if the impls are the same polarity and the trait either
/// has no items or is annotated `#[marker]` and prevents item overrides.
pub fn impls_are_allowed_to_overlap(
self,
def_id1: DefId,
def_id2: DefId,
) -> Option<ImplOverlapKind> {
// If either trait impl references an error, they're allowed to overlap,
// as one of them essentially doesn't exist.
if self.impl_trait_ref(def_id1).map_or(false, |tr| tr.references_error())
|| self.impl_trait_ref(def_id2).map_or(false, |tr| tr.references_error())
{
return Some(ImplOverlapKind::Permitted { marker: false });
}
match (self.impl_polarity(def_id1), self.impl_polarity(def_id2)) {
(ImplPolarity::Reservation, _) | (_, ImplPolarity::Reservation) => {
// `#[rustc_reservation_impl]` impls don't overlap with anything
debug!(
"impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted) (reservations)",
def_id1, def_id2
);
return Some(ImplOverlapKind::Permitted { marker: false });
}
(ImplPolarity::Positive, ImplPolarity::Negative)
| (ImplPolarity::Negative, ImplPolarity::Positive) => {
// `impl AutoTrait for Type` + `impl !AutoTrait for Type`
debug!(
"impls_are_allowed_to_overlap({:?}, {:?}) - None (differing polarities)",
def_id1, def_id2
);
return None;
}
(ImplPolarity::Positive, ImplPolarity::Positive)
| (ImplPolarity::Negative, ImplPolarity::Negative) => {}
};
let is_marker_overlap = {
let is_marker_impl = |def_id: DefId| -> bool {
let trait_ref = self.impl_trait_ref(def_id);
trait_ref.map_or(false, |tr| self.trait_def(tr.def_id).is_marker)
};
is_marker_impl(def_id1) && is_marker_impl(def_id2)
};
if is_marker_overlap {
debug!(
"impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted) (marker overlap)",
def_id1, def_id2
);
Some(ImplOverlapKind::Permitted { marker: true })
} else {
if let Some(self_ty1) = self.issue33140_self_ty(def_id1) {
if let Some(self_ty2) = self.issue33140_self_ty(def_id2) {
if self_ty1 == self_ty2 {
debug!(
"impls_are_allowed_to_overlap({:?}, {:?}) - issue #33140 HACK",
def_id1, def_id2
);
return Some(ImplOverlapKind::Issue33140);
} else {
debug!(
"impls_are_allowed_to_overlap({:?}, {:?}) - found {:?} != {:?}",
def_id1, def_id2, self_ty1, self_ty2
);
}
}
}
debug!("impls_are_allowed_to_overlap({:?}, {:?}) = None", def_id1, def_id2);
None
}
}
/// Returns `ty::VariantDef` if `res` refers to a struct,
/// or variant or their constructors, panics otherwise.
pub fn expect_variant_res(self, res: Res) -> &'tcx VariantDef {
match res {
Res::Def(DefKind::Variant, did) => {
let enum_did = self.parent(did).unwrap();
self.adt_def(enum_did).variant_with_id(did)
}
Res::Def(DefKind::Struct | DefKind::Union, did) => self.adt_def(did).non_enum_variant(),
Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_did) => {
let variant_did = self.parent(variant_ctor_did).unwrap();
let enum_did = self.parent(variant_did).unwrap();
self.adt_def(enum_did).variant_with_ctor_id(variant_ctor_did)
}
Res::Def(DefKind::Ctor(CtorOf::Struct, ..), ctor_did) => {
let struct_did = self.parent(ctor_did).expect("struct ctor has no parent");
self.adt_def(struct_did).non_enum_variant()
}
_ => bug!("expect_variant_res used with unexpected res {:?}", res),
}
}
/// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair.
pub fn instance_mir(self, instance: ty::InstanceDef<'tcx>) -> &'tcx Body<'tcx> {
match instance {
ty::InstanceDef::Item(def) => match self.def_kind(def.did) {
DefKind::Const
| DefKind::Static(..)
| DefKind::AssocConst
| DefKind::Ctor(..)
| DefKind::AnonConst
| DefKind::InlineConst => self.mir_for_ctfe_opt_const_arg(def),
// If the caller wants `mir_for_ctfe` of a function they should not be using
// `instance_mir`, so we'll assume const fn also wants the optimized version.
_ => {
assert_eq!(def.const_param_did, None);
self.optimized_mir(def.did)
}
},
ty::InstanceDef::VtableShim(..)
| ty::InstanceDef::ReifyShim(..)
| ty::InstanceDef::Intrinsic(..)
| ty::InstanceDef::FnPtrShim(..)
| ty::InstanceDef::Virtual(..)
| ty::InstanceDef::ClosureOnceShim { .. }
| ty::InstanceDef::DropGlue(..)
| ty::InstanceDef::CloneShim(..) => self.mir_shims(instance),
}
}
/// Gets the attributes of a definition.
pub fn get_attrs(self, did: DefId) -> Attributes<'tcx> {
if let Some(did) = did.as_local() {
self.hir().attrs(self.hir().local_def_id_to_hir_id(did))
} else {
self.item_attrs(did)
}
}
/// Determines whether an item is annotated with an attribute.
pub fn has_attr(self, did: DefId, attr: Symbol) -> bool {
self.sess.contains_name(&self.get_attrs(did), attr)
}
/// Returns `true` if this is an `auto trait`.
pub fn trait_is_auto(self, trait_def_id: DefId) -> bool {
self.trait_def(trait_def_id).has_auto_impl
}
/// Returns layout of a generator. Layout might be unavailable if the
/// generator is tainted by errors.
pub fn generator_layout(self, def_id: DefId) -> Option<&'tcx GeneratorLayout<'tcx>> {
self.optimized_mir(def_id).generator_layout()
}
/// Given the `DefId` of an impl, returns the `DefId` of the trait it implements.
/// If it implements no trait, returns `None`.
pub fn trait_id_of_impl(self, def_id: DefId) -> Option<DefId> {
self.impl_trait_ref(def_id).map(|tr| tr.def_id)
}
/// If the given `DefId` describes a method belonging to an impl, returns the
/// `DefId` of the impl that the method belongs to; otherwise, returns `None`.
pub fn impl_of_method(self, def_id: DefId) -> Option<DefId> {
self.opt_associated_item(def_id).and_then(|trait_item| match trait_item.container {
TraitContainer(_) => None,
ImplContainer(def_id) => Some(def_id),
})
}
/// If the given `DefId` belongs to a trait that was automatically derived, returns `true`.
pub fn is_builtin_derive(self, def_id: DefId) -> bool {
self.has_attr(def_id, sym::automatically_derived)
}
/// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err`
/// with the name of the crate containing the impl.
pub fn span_of_impl(self, impl_did: DefId) -> Result<Span, Symbol> {
if let Some(impl_did) = impl_did.as_local() {
Ok(self.def_span(impl_did))
} else {
Err(self.crate_name(impl_did.krate))
}
}
/// Hygienically compares a use-site name (`use_name`) for a field or an associated item with
/// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed
/// definition's parent/scope to perform comparison.
pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
// We could use `Ident::eq` here, but we deliberately don't. The name
// comparison fails frequently, and we want to avoid the expensive
// `normalize_to_macros_2_0()` calls required for the span comparison whenever possible.
use_name.name == def_name.name
&& use_name
.span
.ctxt()
.hygienic_eq(def_name.span.ctxt(), self.expn_that_defined(def_parent_def_id))
}
pub fn adjust_ident(self, mut ident: Ident, scope: DefId) -> Ident {
ident.span.normalize_to_macros_2_0_and_adjust(self.expn_that_defined(scope));
ident
}
pub fn adjust_ident_and_get_scope(
self,
mut ident: Ident,
scope: DefId,
block: hir::HirId,
) -> (Ident, DefId) {
let scope = ident
.span
.normalize_to_macros_2_0_and_adjust(self.expn_that_defined(scope))
.and_then(|actual_expansion| actual_expansion.expn_data().parent_module)
.unwrap_or_else(|| self.parent_module(block).to_def_id());
(ident, scope)
}
pub fn is_object_safe(self, key: DefId) -> bool {
self.object_safety_violations(key).is_empty()
}
#[inline]
pub fn is_const_fn_raw(self, def_id: DefId) -> bool {
matches!(self.def_kind(def_id), DefKind::Fn | DefKind::AssocFn | DefKind::Ctor(..))
&& self.impl_constness(def_id) == hir::Constness::Const
}
}
/// Yields the parent function's `LocalDefId` if `def_id` is an `impl Trait` definition.
pub fn is_impl_trait_defn(tcx: TyCtxt<'_>, def_id: DefId) -> Option<LocalDefId> {
let def_id = def_id.as_local()?;
if let Node::Item(item) = tcx.hir().get_by_def_id(def_id) {
if let hir::ItemKind::OpaqueTy(ref opaque_ty) = item.kind {
return match opaque_ty.origin {
hir::OpaqueTyOrigin::FnReturn(parent) | hir::OpaqueTyOrigin::AsyncFn(parent) => {
Some(parent)
}
hir::OpaqueTyOrigin::TyAlias => None,
};
}
}
None
}
pub fn int_ty(ity: ast::IntTy) -> IntTy {
match ity {
ast::IntTy::Isize => IntTy::Isize,
ast::IntTy::I8 => IntTy::I8,
ast::IntTy::I16 => IntTy::I16,
ast::IntTy::I32 => IntTy::I32,
ast::IntTy::I64 => IntTy::I64,
ast::IntTy::I128 => IntTy::I128,
}
}
pub fn uint_ty(uty: ast::UintTy) -> UintTy {
match uty {
ast::UintTy::Usize => UintTy::Usize,
ast::UintTy::U8 => UintTy::U8,
ast::UintTy::U16 => UintTy::U16,
ast::UintTy::U32 => UintTy::U32,
ast::UintTy::U64 => UintTy::U64,
ast::UintTy::U128 => UintTy::U128,
}
}
pub fn float_ty(fty: ast::FloatTy) -> FloatTy {
match fty {
ast::FloatTy::F32 => FloatTy::F32,
ast::FloatTy::F64 => FloatTy::F64,
}
}
pub fn ast_int_ty(ity: IntTy) -> ast::IntTy {
match ity {
IntTy::Isize => ast::IntTy::Isize,
IntTy::I8 => ast::IntTy::I8,
IntTy::I16 => ast::IntTy::I16,
IntTy::I32 => ast::IntTy::I32,
IntTy::I64 => ast::IntTy::I64,
IntTy::I128 => ast::IntTy::I128,
}
}
pub fn ast_uint_ty(uty: UintTy) -> ast::UintTy {
match uty {
UintTy::Usize => ast::UintTy::Usize,
UintTy::U8 => ast::UintTy::U8,
UintTy::U16 => ast::UintTy::U16,
UintTy::U32 => ast::UintTy::U32,
UintTy::U64 => ast::UintTy::U64,
UintTy::U128 => ast::UintTy::U128,
}
}
pub fn provide(providers: &mut ty::query::Providers) {
closure::provide(providers);
context::provide(providers);
erase_regions::provide(providers);
layout::provide(providers);
util::provide(providers);
print::provide(providers);
super::util::bug::provide(providers);
super::middle::provide(providers);
*providers = ty::query::Providers {
trait_impls_of: trait_def::trait_impls_of_provider,
incoherent_impls: trait_def::incoherent_impls_provider,
type_uninhabited_from: inhabitedness::type_uninhabited_from,
const_param_default: consts::const_param_default,
vtable_allocation: vtable::vtable_allocation_provider,
..*providers
};
}
/// A map for the local crate mapping each type to a vector of its
/// inherent impls. This is not meant to be used outside of coherence;
/// rather, you should request the vector for a specific type via
/// `tcx.inherent_impls(def_id)` so as to minimize your dependencies
/// (constructing this map requires touching the entire crate).
#[derive(Clone, Debug, Default, HashStable)]
pub struct CrateInherentImpls {
pub inherent_impls: LocalDefIdMap<Vec<DefId>>,
pub incoherent_impls: FxHashMap<SimplifiedType, Vec<LocalDefId>>,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, HashStable)]
pub struct SymbolName<'tcx> {
/// `&str` gives a consistent ordering, which ensures reproducible builds.
pub name: &'tcx str,
}
impl<'tcx> SymbolName<'tcx> {
pub fn new(tcx: TyCtxt<'tcx>, name: &str) -> SymbolName<'tcx> {
SymbolName {
name: unsafe { str::from_utf8_unchecked(tcx.arena.alloc_slice(name.as_bytes())) },
}
}
}
impl<'tcx> fmt::Display for SymbolName<'tcx> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.name, fmt)
}
}
impl<'tcx> fmt::Debug for SymbolName<'tcx> {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.name, fmt)
}
}
#[derive(Debug, Default, Copy, Clone)]
pub struct FoundRelationships {
/// This is true if we identified that this Ty (`?T`) is found in a `?T: Foo`
/// obligation, where:
///
/// * `Foo` is not `Sized`
/// * `(): Foo` may be satisfied
pub self_in_trait: bool,
/// This is true if we identified that this Ty (`?T`) is found in a `<_ as
/// _>::AssocType = ?T`
pub output: bool,
}
| 37.624638 | 100 | 0.622509 |
9b2e1fe83c1cc2015d00d527e1e4244c71b1c83f | 2,331 | // run-pass
#![allow(warnings)]
#![feature(in_band_lifetimes)]
fn foo(x: &'x u8) -> &'x u8 { x }
fn foo2(x: &'a u8, y: &u8) -> &'a u8 { x }
fn check_in_band_can_be_late_bound() {
let _: for<'x> fn(&'x u8, &u8) -> &'x u8 = foo2;
}
struct ForInherentNoParams;
impl ForInherentNoParams {
fn foo(x: &'a u32, y: &u32) -> &'a u32 { x }
}
struct X<'a>(&'a u8);
impl<'a> X<'a> {
fn inner(&self) -> &'a u8 {
self.0
}
fn same_lifetime_as_parameter(&mut self, x: &'a u8) {
self.0 = x;
}
}
impl X<'b> {
fn inner_2(&self) -> &'b u8 {
self.0
}
fn reference_already_introduced_in_band_from_method_with_explicit_binders<'a>(
&'b self, x: &'a u32
) {}
}
struct Y<T>(T);
impl Y<&'a u8> {
fn inner(&self) -> &'a u8 {
self.0
}
}
trait MyTrait<'a> {
fn my_lifetime(&self) -> &'a u8;
fn any_lifetime() -> &'b u8;
fn borrowed_lifetime(&'b self) -> &'b u8;
fn default_impl(&self, x: &'b u32, y: &u32) -> &'b u32 { x }
fn in_band_def_explicit_impl(&self, x: &'b u8);
}
impl MyTrait<'a> for Y<&'a u8> {
fn my_lifetime(&self) -> &'a u8 { self.0 }
fn any_lifetime() -> &'b u8 { &0 }
fn borrowed_lifetime(&'b self) -> &'b u8 { &*self.0 }
fn in_band_def_explicit_impl<'b>(&self, x: &'b u8) {}
}
fn test_hrtb_defined_lifetime_where<F>(_: F) where for<'a> F: Fn(&'a u8) {}
fn test_hrtb_defined_lifetime_polytraitref<F>(_: F) where F: for<'a> Fn(&'a u8) {}
fn reference_in_band_from_locals(x: &'test u32) -> &'test u32 {
let y: &'test u32 = x;
y
}
fn in_generics_in_band<T: MyTrait<'a>>(x: &T) {}
fn where_clause_in_band<T>(x: &T) where T: MyTrait<'a> {}
fn impl_trait_in_band(x: &impl MyTrait<'a>) {}
// Tests around using in-band lifetimes within existential traits.
trait FunkyTrait<'a> { }
impl<'a, T> FunkyTrait<'a> for T { }
fn ret_pos_impl_trait_in_band_outlives(x: &'a u32) -> impl ::std::fmt::Debug + 'a {
x
}
fn ret_pos_impl_trait_in_band_param(x: &'a u32) -> impl FunkyTrait<'a> {
x
}
fn ret_pos_impl_trait_in_band_param_static(x: &'a u32) -> impl FunkyTrait<'static> + 'a {
x
}
fn ret_pos_impl_trait_in_band_param_outlives(x: &'a u32) -> impl FunkyTrait<'a> + 'a {
x
}
fn ret_pos_impl_trait_in_band_higher_ranked(x: &'a u32) -> impl for<'b> FunkyTrait<'b> + 'a {
x
}
fn main() {}
| 24.030928 | 93 | 0.593737 |
f8f56466b487a9c322bd7107cae73cca3ccba45e | 12,963 | use crate::parser;
use std::fmt;
// Types
// =====
// Term
// ----
#[derive(Clone, Debug)]
pub enum Term {
Var { name: String },
Dup { nam0: String, nam1: String, expr: BTerm, body: BTerm },
Let { name: String, expr: BTerm, body: BTerm },
Lam { name: String, body: BTerm },
App { func: BTerm, argm: BTerm },
Ctr { name: String, args: Vec<BTerm> },
U32 { numb: u32 },
Op2 { oper: Oper, val0: BTerm, val1: BTerm },
}
pub type BTerm = Box<Term>;
#[derive(Clone, Copy, Debug)]
pub enum Oper {
Add,
Sub,
Mul,
Div,
Mod,
And,
Or,
Xor,
Shl,
Shr,
Ltn,
Lte,
Eql,
Gte,
Gtn,
Neq,
}
// Rule
// ----
#[derive(Clone, Debug)]
pub struct Rule {
pub lhs: BTerm,
pub rhs: BTerm,
}
// File
// ----
pub struct File {
pub rules: Vec<Rule>,
}
// Stringifier
// ===========
// Term
// ----
impl fmt::Display for Oper {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
match self {
Self::Add => "+",
Self::Sub => "-",
Self::Mul => "*",
Self::Div => "/",
Self::Mod => "%",
Self::And => "&",
Self::Or => "|",
Self::Xor => "^",
Self::Shl => "<<",
Self::Shr => ">>",
Self::Ltn => "<",
Self::Lte => "<=",
Self::Eql => "==",
Self::Gte => ">=",
Self::Gtn => ">",
Self::Neq => "!=",
}
)
}
}
impl fmt::Display for Term {
// WARN: I think this could overflow, might need to rewrite it to be iterative instead of recursive?
// NOTE: Another issue is complexity. This function is O(N^2). Should use ropes to be linear.
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fn str_sugar(term: &Term) -> Option<String> {
fn go(term: &Term, text: &mut String) -> Option<()> {
if let Term::Ctr { name, args } = term {
if name == "StrCons" && args.len() == 2 {
if let Term::U32 { numb } = *args[0] {
text.push(std::char::from_u32(numb)?);
go(&args[1], text)?;
}
return Some(());
}
if name == "StrNil" && args.is_empty() {
return Some(());
}
}
None
}
let mut result = String::new();
result.push('"');
go(term, &mut result)?;
result.push('"');
Some(result)
}
match self {
Self::Var { name } => write!(f, "{}", name),
Self::Dup { nam0, nam1, expr, body } => {
write!(f, "dup {} {} = {}; {}", nam0, nam1, expr, body)
}
Self::Let { name, expr, body } => write!(f, "let {} = {}; {}", name, expr, body),
Self::Lam { name, body } => write!(f, "λ{} {}", name, body),
Self::App { func, argm } => write!(f, "({} {})", func, argm),
Self::Ctr { name, args } => {
if let Some(term) = str_sugar(self) {
write!(f, "{}", term)
} else {
write!(f, "({}{})", name, args.iter().map(|x| format!(" {}", x)).collect::<String>())
}
}
Self::U32 { numb } => write!(f, "{}", numb),
Self::Op2 { oper, val0, val1 } => write!(f, "({} {} {})", oper, val0, val1),
}
}
}
// Rule
// ----
impl fmt::Display for Rule {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{} = {}", self.lhs, self.rhs)
}
}
// File
// ----
impl fmt::Display for File {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}",
self.rules.iter().map(|rule| format!("{}", rule)).collect::<Vec<String>>().join("\n")
)
}
}
// Parser
// ======
pub fn parse_let(state: parser::State) -> parser::Answer<Option<BTerm>> {
return parser::guard(
parser::text_parser("let "),
Box::new(|state| {
let (state, _) = parser::consume("let ", state)?;
let (state, name) = parser::name1(state)?;
let (state, _) = parser::consume("=", state)?;
let (state, expr) = parse_term(state)?;
let (state, _) = parser::text(";", state)?;
let (state, body) = parse_term(state)?;
Ok((state, Box::new(Term::Let { name, expr, body })))
}),
state,
);
}
pub fn parse_dup(state: parser::State) -> parser::Answer<Option<BTerm>> {
return parser::guard(
parser::text_parser("dup "),
Box::new(|state| {
let (state, _) = parser::consume("dup ", state)?;
let (state, nam0) = parser::name1(state)?;
let (state, nam1) = parser::name1(state)?;
let (state, _) = parser::consume("=", state)?;
let (state, expr) = parse_term(state)?;
let (state, _) = parser::text(";", state)?;
let (state, body) = parse_term(state)?;
Ok((state, Box::new(Term::Dup { nam0, nam1, expr, body })))
}),
state,
);
}
pub fn parse_lam(state: parser::State) -> parser::Answer<Option<BTerm>> {
return parser::guard(
parser::text_parser("λ"),
Box::new(|state| {
let (state, _) = parser::text("λ", state)?;
let (state, name) = parser::name(state)?;
let (state, body) = parse_term(state)?;
Ok((state, Box::new(Term::Lam { name, body })))
}),
state,
);
}
// TODO: move this to parse_lam to avoid duplicated code
pub fn parse_lam_ugly(state: parser::State) -> parser::Answer<Option<BTerm>> {
return parser::guard(
parser::text_parser("@"),
Box::new(|state| {
let (state, _) = parser::text("@", state)?;
let (state, name) = parser::name(state)?;
let (state, body) = parse_term(state)?;
Ok((state, Box::new(Term::Lam { name, body })))
}),
state,
);
}
pub fn parse_app(state: parser::State) -> parser::Answer<Option<BTerm>> {
return parser::guard(
parser::text_parser("("),
Box::new(|state| {
parser::list(
parser::text_parser("("),
parser::text_parser(""),
parser::text_parser(")"),
Box::new(parse_term),
Box::new(|args| {
if !args.is_empty() {
args.into_iter().reduce(|a, b| Box::new(Term::App { func: a, argm: b })).unwrap()
} else {
Box::new(Term::U32 { numb: 0 })
}
}),
state,
)
}),
state,
);
}
pub fn parse_ctr(state: parser::State) -> parser::Answer<Option<BTerm>> {
parser::guard(
Box::new(|state| {
let (state, _) = parser::text("(", state)?;
let (state, head) = parser::get_char(state)?;
Ok((state, ('A'..='Z').contains(&head) || head == '.'))
}),
Box::new(|state| {
let (state, open) = parser::text("(", state)?;
let (state, name) = parser::name1(state)?;
let (state, args) = if open {
parser::until(parser::text_parser(")"), Box::new(parse_term), state)?
} else {
(state, Vec::new())
};
Ok((state, Box::new(Term::Ctr { name, args })))
}),
state,
)
}
pub fn parse_u32(state: parser::State) -> parser::Answer<Option<BTerm>> {
parser::guard(
Box::new(|state| {
let (state, head) = parser::get_char(state)?;
Ok((state, ('0'..='9').contains(&head)))
}),
Box::new(|state| {
let (state, numb) = parser::name1(state)?;
if !numb.is_empty() {
Ok((state, Box::new(Term::U32 { numb: numb.parse::<u32>().unwrap() })))
} else {
Ok((state, Box::new(Term::U32 { numb: 0 })))
}
}),
state,
)
}
pub fn parse_op2(state: parser::State) -> parser::Answer<Option<BTerm>> {
fn is_op_char(chr: char) -> bool {
false
|| chr == '+'
|| chr == '-'
|| chr == '*'
|| chr == '/'
|| chr == '%'
|| chr == '&'
|| chr == '|'
|| chr == '^'
|| chr == '<'
|| chr == '>'
|| chr == '='
|| chr == '!'
}
fn parse_oper(state: parser::State) -> parser::Answer<Oper> {
fn op<'a>(symbol: &'static str, oper: Oper) -> parser::Parser<'a, Option<Oper>> {
Box::new(move |state| {
let (state, done) = parser::text(symbol, state)?;
Ok((state, if done { Some(oper) } else { None }))
})
}
parser::grammar(
"Oper",
&[
op("+", Oper::Add),
op("-", Oper::Sub),
op("*", Oper::Mul),
op("/", Oper::Div),
op("%", Oper::Mod),
op("&", Oper::And),
op("|", Oper::Or),
op("^", Oper::Xor),
op("<<", Oper::Shl),
op(">>", Oper::Shr),
op("<", Oper::Ltn),
op("<=", Oper::Lte),
op("==", Oper::Eql),
op(">=", Oper::Gte),
op(">", Oper::Gtn),
op("!=", Oper::Neq),
],
state,
)
}
parser::guard(
Box::new(|state| {
let (state, open) = parser::text("(", state)?;
let (state, head) = parser::get_char(state)?;
Ok((state, open && is_op_char(head)))
}),
Box::new(|state| {
let (state, _) = parser::text("(", state)?;
let (state, oper) = parse_oper(state)?;
let (state, val0) = parse_term(state)?;
let (state, val1) = parse_term(state)?;
let (state, _) = parser::text(")", state)?;
Ok((state, Box::new(Term::Op2 { oper, val0, val1 })))
}),
state,
)
}
pub fn parse_var(state: parser::State) -> parser::Answer<Option<BTerm>> {
parser::guard(
Box::new(|state| {
let (state, head) = parser::get_char(state)?;
Ok((state, ('a'..='z').contains(&head) || head == '_'))
}),
Box::new(|state| {
let (state, name) = parser::name(state)?;
Ok((state, Box::new(Term::Var { name })))
}),
state,
)
}
// TODO: parse escape sequences
pub fn parse_str_sugar(state: parser::State) -> parser::Answer<Option<BTerm>> {
parser::guard(
Box::new(|state| {
let (state, head) = parser::get_char(state)?;
Ok((state, head == '"'))
}),
Box::new(|state| {
let (state, _head) = parser::text("\"", state)?;
let mut chars: Vec<char> = Vec::new();
let mut state = state;
loop {
let (new_state, next) = parser::get_char(state)?;
if next == '"' || next == '\0' {
state = new_state;
break;
} else {
chars.push(next);
state = new_state;
}
}
let empty = Term::Ctr { name: "StrNil".to_string(), args: Vec::new() };
let list = Box::new(chars.iter().rfold(empty, |t, h| Term::Ctr {
name: "StrCons".to_string(),
args: vec![Box::new(Term::U32 { numb: *h as u32 }), Box::new(t)],
}));
Ok((state, list))
}),
state,
)
}
// TODO: parse escape sequences
//pub fn parse_lst_sugar(state: parser::State) -> parser::Answer<Option<BTerm>> {
//parser::guard(
//Box::new(|state| {
//let (state, head) = parser::get_char(state)?;
//Ok((state, head == '['))
//}),
//Box::new(|state| {
//let (state, _head) = parser::text("[", state)?;
//let mut elems: Vec<Box<Term>> = Vec::new();
//let mut state = state;
//loop {
//let (new_state, done) = parser::text("]")?;
//if done {
//state = new_state;
//break;
//} else {
//let elem = parse_term(state)?;
//elems.push(Box::new(elem));
//state = new_state;
//}
//}
//let empty = Term::Ctr { name: "Nil".to_string(), args: Vec::new() };
//let list = Box::new(chars.iter().rfold(empty, |t, h| Term::Ctr {
//name: "Cons".to_string(),
//args: vec![h, Box::new(t)],
//}));
//Ok((state, list))
//}),
//state,
//)
//}
pub fn parse_term(state: parser::State) -> parser::Answer<BTerm> {
parser::grammar(
"Term",
&[
Box::new(parse_let),
Box::new(parse_dup),
Box::new(parse_lam),
Box::new(parse_lam_ugly),
Box::new(parse_ctr),
Box::new(parse_op2),
Box::new(parse_app),
Box::new(parse_u32),
Box::new(parse_str_sugar),
Box::new(parse_var),
Box::new(|state| Ok((state, None))),
],
state,
)
}
pub fn parse_rule(state: parser::State) -> parser::Answer<Option<Rule>> {
return parser::guard(
parser::text_parser(""),
Box::new(|state| {
let (state, lhs) = parse_term(state)?;
let (state, _) = parser::consume("=", state)?;
let (state, rhs) = parse_term(state)?;
Ok((state, Rule { lhs, rhs }))
}),
state,
);
}
pub fn parse_file(state: parser::State) -> parser::Answer<File> {
let mut rules = Vec::new();
let mut state = state;
loop {
let (new_state, done) = parser::done(state)?;
if done {
break;
}
let (new_state, rule) = parse_rule(new_state)?;
if let Some(rule) = rule {
rules.push(rule);
} else {
return parser::expected("definition", 1, state);
}
state = new_state;
}
Ok((state, File { rules }))
}
pub fn read_term(code: &str) -> Box<Term> {
parser::read(Box::new(parse_term), code)
}
pub fn read_file(code: &str) -> File {
parser::read(Box::new(parse_file), code)
}
#[allow(dead_code)]
pub fn read_rule(code: &str) -> Option<Rule> {
parser::read(Box::new(parse_rule), code)
}
| 26.187879 | 102 | 0.495179 |
4beeb251c84dd8d0bb6537656ecea1a2fc0de09c | 11,428 | use crate::get_error;
use crate::surface::SurfaceRef;
use crate::video;
use crate::EventPump;
use std::mem::transmute;
use crate::sys;
mod relative;
pub use self::relative::RelativeMouseState;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
#[repr(u32)]
pub enum SystemCursor {
Arrow = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_ARROW as u32,
IBeam = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_IBEAM as u32,
Wait = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_WAIT as u32,
Crosshair = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_CROSSHAIR as u32,
WaitArrow = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_WAITARROW as u32,
SizeNWSE = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_SIZENWSE as u32,
SizeNESW = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_SIZENESW as u32,
SizeWE = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_SIZEWE as u32,
SizeNS = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_SIZENS as u32,
SizeAll = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_SIZEALL as u32,
No = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_NO as u32,
Hand = sys::SDL_SystemCursor::SDL_SYSTEM_CURSOR_HAND as u32,
}
pub struct Cursor {
raw: *mut sys::SDL_Cursor
}
impl Drop for Cursor {
#[inline]
fn drop(&mut self) {
unsafe { sys::SDL_FreeCursor(self.raw) };
}
}
impl Cursor {
pub fn new(data: &[u8], mask: &[u8], width: i32, height: i32, hot_x: i32, hot_y: i32) -> Result<Cursor, String> {
unsafe {
let raw = sys::SDL_CreateCursor(data.as_ptr(),
mask.as_ptr(),
width as i32, height as i32,
hot_x as i32, hot_y as i32);
if raw.is_null() {
Err(get_error())
} else {
Ok(Cursor{ raw: raw })
}
}
}
// TODO: figure out how to pass Surface in here correctly
pub fn from_surface<S: AsRef<SurfaceRef>>(surface: S, hot_x: i32, hot_y: i32) -> Result<Cursor, String> {
unsafe {
let raw = sys::SDL_CreateColorCursor(surface.as_ref().raw(), hot_x, hot_y);
if raw.is_null() {
Err(get_error())
} else {
Ok(Cursor{ raw: raw })
}
}
}
pub fn from_system(cursor: SystemCursor) -> Result<Cursor, String> {
unsafe {
let raw = sys::SDL_CreateSystemCursor(transmute(cursor as u32));
if raw.is_null() {
Err(get_error())
} else {
Ok(Cursor{ raw: raw })
}
}
}
pub fn set(&self) {
unsafe { sys::SDL_SetCursor(self.raw); }
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum MouseWheelDirection {
Normal,
Flipped,
Unknown(u32),
}
// 0 and 1 are not fixed values in the SDL source code. This value is defined as an enum which is then cast to a Uint32.
// The enum in C is defined as such:
/**
* \brief Scroll direction types for the Scroll event
*/
//typedef enum
//{
// SDL_MOUSEWHEEL_NORMAL, /**< The scroll direction is normal */
// SDL_MOUSEWHEEL_FLIPPED /**< The scroll direction is flipped / natural */
//} SDL_MouseWheelDirection;
// Since no value is given in the enum definition these values are auto assigned by the C compiler starting at 0.
// Normally I would prefer to use the enum rather than hard code what it is implied to represent however
// the mouse wheel direction value could be described equally as well by a bool, so I don't think changes
// to this enum in the C source code are going to be a problem.
impl MouseWheelDirection {
#[inline]
pub fn from_ll(direction: u32) -> MouseWheelDirection {
match direction {
0 => MouseWheelDirection::Normal,
1 => MouseWheelDirection::Flipped,
_ => MouseWheelDirection::Unknown(direction),
}
}
#[inline]
pub fn to_ll(&self) -> u32 {
match *self {
MouseWheelDirection::Normal => 0,
MouseWheelDirection::Flipped => 1,
MouseWheelDirection::Unknown(direction) => direction,
}
}
}
#[repr(u8)]
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub enum MouseButton {
Unknown = 0,
Left = sys::SDL_BUTTON_LEFT as u8,
Middle = sys::SDL_BUTTON_MIDDLE as u8,
Right = sys::SDL_BUTTON_RIGHT as u8,
X1 = sys::SDL_BUTTON_X1 as u8,
X2 = sys::SDL_BUTTON_X2 as u8,
}
impl MouseButton {
#[inline]
pub fn from_ll(button: u8) -> MouseButton {
match button as u32 {
sys::SDL_BUTTON_LEFT => MouseButton::Left,
sys::SDL_BUTTON_MIDDLE => MouseButton::Middle,
sys::SDL_BUTTON_RIGHT => MouseButton::Right,
sys::SDL_BUTTON_X1 => MouseButton::X1,
sys::SDL_BUTTON_X2 => MouseButton::X2,
_ => MouseButton::Unknown,
}
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct MouseState {
mouse_state: u32,
x: i32,
y: i32
}
impl MouseState {
pub fn new(_e: &EventPump) -> MouseState {
let mut x = 0;
let mut y = 0;
let mouse_state: u32 = unsafe {
sys::SDL_GetMouseState(&mut x, &mut y)
};
MouseState {
mouse_state: mouse_state,
x: x as i32,
y: y as i32
}
}
pub fn from_sdl_state(state: u32) -> MouseState {
MouseState { mouse_state : state, x: 0, y: 0 }
}
pub fn to_sdl_state(&self) -> u32 {
self.mouse_state
}
fn button_mask(&self, button: u32) -> u32 {
1 << (button - 1)
}
/// Returns true if the left mouse button is pressed.
///
/// # Example
/// ```no_run
/// use sdl2::mouse::MouseButton;
///
/// fn is_a_pressed(e: &sdl2::EventPump) -> bool {
/// e.mouse_state().left()
/// }
/// ```
pub fn left(&self) -> bool { (self.mouse_state & self.button_mask(sys::SDL_BUTTON_LEFT)) != 0 }
/// Tests if the middle mouse button was pressed.
pub fn middle(&self) -> bool { (self.mouse_state & self.button_mask(sys::SDL_BUTTON_MIDDLE)) != 0 }
/// Tests if the right mouse button was pressed.
pub fn right(&self) -> bool { (self.mouse_state & self.button_mask(sys::SDL_BUTTON_RIGHT)) != 0 }
/// Tests if the X1 mouse button was pressed.
pub fn x1(&self) -> bool { (self.mouse_state & self.button_mask(sys::SDL_BUTTON_X1)) != 0 }
/// Tests if the X2 mouse button was pressed.
pub fn x2(&self) -> bool { (self.mouse_state & self.button_mask(sys::SDL_BUTTON_X2)) != 0 }
/// Returns the x coordinate of the state
pub fn x(&self) -> i32 { self.x }
/// Returns the y coordinate of the state
pub fn y(&self) -> i32 { self.y }
/// Returns true if the mouse button is pressed.
///
/// # Example
/// ```no_run
/// use sdl2::mouse::MouseButton;
///
/// fn is_left_pressed(e: &sdl2::EventPump) -> bool {
/// e.mouse_state().is_mouse_button_pressed(MouseButton::Left)
/// }
/// ```
pub fn is_mouse_button_pressed(&self, mouse_button: MouseButton) -> bool {
let mask = 1 << ((mouse_button as u32)-1);
self.mouse_state & mask != 0
}
/// Returns an iterator all mouse buttons with a boolean indicating if the scancode is pressed.
///
/// # Example
/// ```no_run
/// use sdl2::mouse::MouseButton;
/// use std::collections::HashMap;
///
/// fn mouse_button_set(e: &sdl2::EventPump) -> HashMap<MouseButton, bool> {
/// e.mouse_state().mouse_buttons().collect()
/// }
///
/// fn find_first_pressed(e: &sdl2::EventPump) -> bool {
/// for (key,value) in mouse_button_set(e) {
/// return value != false
/// }
/// false
/// }
///
/// ```
pub fn mouse_buttons(&self) -> MouseButtonIterator {
MouseButtonIterator {
cur_button: 1,
mouse_state: &self.mouse_state
}
}
/// Returns an iterator of pressed mouse buttons.
///
/// # Example
/// ```no_run
/// use sdl2::mouse::MouseButton;
/// use std::collections::HashSet;
///
/// fn pressed_mouse_button_set(e: &sdl2::EventPump) -> HashSet<MouseButton> {
/// e.mouse_state().pressed_mouse_buttons().collect()
/// }
///
/// fn newly_pressed(old: &HashSet<MouseButton>, new: &HashSet<MouseButton>) -> HashSet<MouseButton> {
/// new - old
/// // sugar for: new.difference(old).collect()
/// }
/// ```
pub fn pressed_mouse_buttons(&self) -> PressedMouseButtonIterator {
PressedMouseButtonIterator {
iter: self.mouse_buttons()
}
}
}
pub struct MouseButtonIterator<'a> {
cur_button: u8,
mouse_state: &'a u32
}
impl<'a> Iterator for MouseButtonIterator<'a> {
type Item = (MouseButton, bool);
fn next(&mut self) -> Option<(MouseButton, bool)> {
if self.cur_button < MouseButton::X2 as u8 + 1 {
let mouse_button = self.cur_button;
let mask = 1 << ((self.cur_button as u32)-1);
let pressed = self.mouse_state & mask != 0;
self.cur_button += 1;
Some((MouseButton::from_ll(mouse_button), pressed))
} else {
None
}
}
}
pub struct PressedMouseButtonIterator<'a> {
iter: MouseButtonIterator<'a>
}
impl<'a> Iterator for PressedMouseButtonIterator<'a> {
type Item = MouseButton;
fn next(&mut self) -> Option<MouseButton> {
while let Some((mouse_button, pressed)) = self.iter.next() {
if pressed { return Some(mouse_button) }
}
None
}
}
impl crate::Sdl {
#[inline]
pub fn mouse(&self) -> MouseUtil {
MouseUtil {
_sdldrop: self.sdldrop()
}
}
}
/// Mouse utility functions. Access with `Sdl::mouse()`.
///
/// ```no_run
/// let sdl_context = sdl2::init().unwrap();
///
/// // Hide the cursor
/// sdl_context.mouse().show_cursor(false);
/// ```
pub struct MouseUtil {
_sdldrop: ::std::rc::Rc<crate::SdlDrop>
}
impl MouseUtil {
/// Gets the id of the window which currently has mouse focus.
pub fn focused_window_id(&self) -> Option<u32> {
let raw = unsafe { sys::SDL_GetMouseFocus() };
if raw.is_null() {
None
} else {
let id = unsafe { sys::SDL_GetWindowID(raw) };
Some(id)
}
}
pub fn warp_mouse_in_window(&self, window: &video::Window, x: i32, y: i32) {
unsafe { sys::SDL_WarpMouseInWindow(window.raw(), x, y); }
}
pub fn set_relative_mouse_mode(&self, on: bool) {
let on = if on { sys::SDL_bool::SDL_TRUE } else { sys::SDL_bool::SDL_FALSE };
unsafe { sys::SDL_SetRelativeMouseMode(on); }
}
pub fn relative_mouse_mode(&self) -> bool {
unsafe { sys::SDL_GetRelativeMouseMode() == sys::SDL_bool::SDL_TRUE }
}
pub fn is_cursor_showing(&self) -> bool {
unsafe { sys::SDL_ShowCursor(crate::sys::SDL_QUERY) == 1 }
}
pub fn show_cursor(&self, show: bool) {
unsafe { sys::SDL_ShowCursor(show as i32); }
}
pub fn capture(&self, enable: bool) {
let enable = if enable { sys::SDL_bool::SDL_TRUE } else { sys::SDL_bool::SDL_FALSE };
unsafe { sys::SDL_CaptureMouse(enable); }
}
}
| 30.393617 | 121 | 0.585754 |
c1b0f15ab572c41f3ffdc64377b3103e3041e7ff | 971 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use glib::object::Cast;
use glib::translate::*;
use gtk_sys;
use std::fmt;
use IMContext;
glib_wrapper! {
pub struct IMContextSimple(Object<gtk_sys::GtkIMContextSimple, gtk_sys::GtkIMContextSimpleClass>) @extends IMContext;
match fn {
get_type => || gtk_sys::gtk_im_context_simple_get_type(),
}
}
impl IMContextSimple {
pub fn new() -> IMContextSimple {
assert_initialized_main_thread!();
unsafe { IMContext::from_glib_full(gtk_sys::gtk_im_context_simple_new()).unsafe_cast() }
}
}
impl Default for IMContextSimple {
fn default() -> Self {
Self::new()
}
}
pub const NONE_IM_CONTEXT_SIMPLE: Option<&IMContextSimple> = None;
impl fmt::Display for IMContextSimple {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "IMContextSimple")
}
}
| 24.897436 | 121 | 0.678682 |
e68bddd0ad584c0a69e78e8d804b410d9f88fda2 | 2,907 | use regex_automata::{Regex, RegexBuilder};
use smol_str::SmolStr;
use crate::{ctrl::Error, ServerState};
lazy_static::lazy_static! {
static ref VALID_URL: Regex = RegexBuilder::new()
.minimize(true)
.anchored(true)
.build("")
.unwrap();
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum OEmbedFormat {
Json,
XML,
}
impl Default for OEmbedFormat {
fn default() -> Self {
OEmbedFormat::Json
}
}
#[derive(Debug, Default, Clone, Deserialize)]
pub struct OEmbedRequest {
#[serde(default)]
pub url: Option<SmolStr>,
#[serde(default)]
pub maxwidth: Option<u32>,
#[serde(default)]
pub maxheight: Option<u32>,
#[serde(default)]
pub format: OEmbedFormat,
}
#[derive(Debug, Clone, Serialize)]
#[serde(rename_all = "lowercase", tag = "type")]
pub enum OEmebedType {
Photo { url: SmolStr, width: u32, height: u32 },
Video { html: SmolStr, width: u32, height: u32 },
Link,
Rich { html: SmolStr, width: u32, height: u32 },
}
#[derive(Debug, Clone, Serialize)]
pub struct OEmbedResponse {
#[serde(flatten)]
pub ty: OEmebedType,
pub version: &'static str,
pub provider_name: &'static str,
pub provider_url: &'static str,
pub cache_age: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<SmolStr>,
#[serde(skip_serializing_if = "Option::is_none")]
pub author_name: Option<SmolStr>,
#[serde(skip_serializing_if = "Option::is_none")]
pub author_url: Option<SmolStr>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thumbnail_url: Option<SmolStr>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thumbnail_width: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub thumbnail_height: Option<u32>,
}
impl Default for OEmbedResponse {
fn default() -> Self {
OEmbedResponse {
ty: OEmebedType::Link,
version: "1.0",
provider_name: "Lantern",
provider_url: "https://lantern.chat",
cache_age: 60 * 60 * 24,
title: None,
author_name: None,
author_url: None,
thumbnail_url: None,
thumbnail_width: None,
thumbnail_height: None,
}
}
}
pub async fn process_oembed(_state: ServerState, _req: &OEmbedRequest) -> Result<OEmbedResponse, Error> {
let url = SmolStr::new("https://lantern.chat/static/assets/preview.png");
Ok(OEmbedResponse {
ty: OEmebedType::Photo {
url: url.clone(),
height: 955,
width: 1920,
},
title: Some(SmolStr::new_inline("Lantern Chat")),
thumbnail_url: Some(url),
thumbnail_height: Some(955),
thumbnail_width: Some(1920),
..OEmbedResponse::default()
})
}
| 26.427273 | 105 | 0.614723 |
d60c920c3eeb724cc90722b85ce876d86ad99180 | 1,477 | mod cache;
mod cache_arena;
mod impls;
#[cfg(test)]
mod test;
use smallvec::SmallVec;
type SmallVec8<T> = SmallVec<[T; 8]>;
pub type CacheArena = cache_arena::CacheArena<Hash256>;
pub use crate::cache::TreeHashCache;
pub use crate::impls::int_log;
use ethereum_types::H256 as Hash256;
use tree_hash::TreeHash;
#[derive(Debug, PartialEq, Clone)]
pub enum Error {
/// Attempting to provide more than 2^depth leaves to a Merkle tree is disallowed.
TooManyLeaves,
/// Shrinking a Merkle tree cache by providing it with less leaves than it currently has is
/// disallowed (for simplicity).
CannotShrink,
/// Cache is inconsistent with the list of dirty indices provided.
CacheInconsistent,
CacheArenaError(cache_arena::Error),
/// Unable to find left index in Merkle tree.
MissingLeftIdx(usize),
}
impl From<cache_arena::Error> for Error {
fn from(e: cache_arena::Error) -> Error {
Error::CacheArenaError(e)
}
}
/// Trait for types which can make use of a cache to accelerate calculation of their tree hash root.
pub trait CachedTreeHash<Cache>: TreeHash {
/// Create a new cache appropriate for use with values of this type.
fn new_tree_hash_cache(&self, arena: &mut CacheArena) -> Cache;
/// Update the cache and use it to compute the tree hash root for `self`.
fn recalculate_tree_hash_root(
&self,
arena: &mut CacheArena,
cache: &mut Cache,
) -> Result<Hash256, Error>;
}
| 30.770833 | 100 | 0.700745 |
6423d3ca341546e2cadb0366817da404c5a5b2a6 | 4,996 | extern crate gluon;
use gluon::{new_vm, Compiler};
#[test]
fn dont_stack_overflow_on_let_bindings() {
let text = r#"
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in
let _ = 1
in 1
"#;
let vm = new_vm();
Compiler::new()
.load_script_async(&vm, "", text)
.sync_or_error()
.unwrap();
}
| 6.760487 | 42 | 0.540432 |
61dc559c82260602d55737a81a94770bd80bd635 | 3,253 | #![cfg_attr(not(feature = "std"), no_std)]
/// Edit this file to define custom logic or remove it if it is not needed.
/// Learn more about FRAME and the core library of Substrate FRAME pallets:
/// https://substrate.dev/docs/en/knowledgebase/runtime/frame
pub use pallet::*;
#[cfg(test)]
mod mock;
#[cfg(test)]
mod tests;
#[frame_support::pallet]
pub mod pallet {
use frame_support::{
dispatch::DispatchResultWithPostInfo,
pallet_prelude::*
};
use frame_system::pallet_prelude::*;
use sp_std::vec::Vec;
#[pallet::config]
pub trait Config: frame_system::Config {
/// Because this pallet emits events, it depends on the runtime's definition of an event.
type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;
#[pallet::constant]
type VecLimit: Get<u8>;
}
#[pallet::pallet]
#[pallet::generate_store(pub(super) trait Store)]
pub struct Pallet<T>(_);
#[pallet::storage]
#[pallet::getter(fn proofs)]
pub type Proofs<T: Config> = StorageMap<
_,
Blake2_128Concat,
Vec<u8>,
(T::AccountId, T::BlockNumber)
>;
#[pallet::event]
#[pallet::metadata(T::AccountId = "AccountId")]
#[pallet::generate_deposit(pub(super) fn deposit_event)]
pub enum Event<T:Config> {
ClaimCreated(T::AccountId, Vec<u8>),
ClaimRevoked(T::AccountId, Vec<u8>),
ClaimMutated(T::AccountId, Vec<u8>, T::AccountId)
}
#[pallet::error]
pub enum Error<T> {
ProofAlreadyExist,
ClaimNotExisit,
NotClaimOwner,
BadMetadata
}
#[pallet::hooks]
impl<T:Config> Hooks<BlockNumberFor<T>> for Pallet<T> {
}
#[pallet::call]
impl<T:Config> Pallet<T> {
#[pallet::weight(0)]
pub fn create_claim(origin: OriginFor<T>, claim: Vec<u8>) -> DispatchResultWithPostInfo {
ensure!(claim.len() < T::VecLimit::get() as usize, Error::<T>::BadMetadata);
let sender = ensure_signed(origin)?;
ensure!(!Proofs::<T>::contains_key(&claim), Error::<T>::ProofAlreadyExist);
Proofs::<T>::insert(&claim, (sender.clone(), frame_system::Pallet::<T>::block_number()));
Self::deposit_event(Event::ClaimCreated(sender, claim));
Ok(().into())
}
#[pallet::weight(0)]
pub fn revoke_claim(origin: OriginFor<T>, claim: Vec<u8>) -> DispatchResultWithPostInfo {
ensure!(claim.len() < T::VecLimit::get() as usize, Error::<T>::BadMetadata);
let sender = ensure_signed(origin)?;
let (owner, _) = Proofs::<T>::get(&claim).ok_or(Error::<T>::ClaimNotExisit)?;
ensure!(sender == owner, Error::<T>::NotClaimOwner);
Proofs::<T>::remove(&claim);
Self::deposit_event(Event::ClaimRevoked(sender, claim));
Ok(().into())
}
#[pallet::weight(0)]
pub fn transfer_claim(origin: OriginFor<T>, claim: Vec<u8>, reciever: T::AccountId) -> DispatchResultWithPostInfo {
ensure!(claim.len() < T::VecLimit::get() as usize, Error::<T>::BadMetadata);
let sender = ensure_signed(origin)?;
//let reciever = T::AccountId::decode(&mut dest.as_bytes()).unwrap_or_default();
let (owner, _) = Proofs::<T>::get(&claim).ok_or(Error::<T>::ClaimNotExisit)?;
ensure!(sender == owner, Error::<T>::NotClaimOwner);
Proofs::<T>::insert(&claim, (reciever.clone(), frame_system::Pallet::<T>::block_number()));
Self::deposit_event(Event::ClaimMutated(sender, claim, reciever));
Ok(().into())
}
}
}
| 30.401869 | 117 | 0.671995 |
ab41ba0b47a0052034281b970c17c0748d27abde | 2,959 | use crate::config::injection::Injector;
use crate::config::{is_router, uses_remote_metastore, Config};
use crate::metastore::MetaStore;
use crate::sql::SqlService;
use crate::CubeError;
use std::convert::Infallible;
use std::net::SocketAddr;
use std::sync::Arc;
use warp::http::StatusCode;
use warp::Filter;
pub fn serve_status_probes(c: &Config) {
let addr = match c.config_obj().status_bind_address() {
Some(a) => a.clone(),
None => return,
};
let p = match RouterProbes::try_new(c) {
Some(p) => p,
None => return,
};
let pc = p.clone();
let l = warp::path!("livez").and_then(move || {
let pc = pc.clone();
async move { status_probe_reply("liveness", pc.is_live().await) }
});
let r = warp::path!("readyz").and_then(move || {
let p = p.clone();
async move { status_probe_reply("readiness", p.is_ready().await) }
});
let addr: SocketAddr = addr.parse().expect("cannot parse status probe address");
match warp::serve(l.or(r)).try_bind_ephemeral(addr) {
Ok((addr, f)) => {
log::info!("Serving status probes at {}", addr);
tokio::spawn(f);
}
Err(e) => {
log::error!("Failed to serve status probes at {}: {}", addr, e);
}
}
}
pub fn status_probe_reply(probe: &str, r: Result<(), CubeError>) -> Result<StatusCode, Infallible> {
match r {
Ok(()) => Ok(StatusCode::OK),
Err(e) => {
log::warn!("{} probe failed: {}", probe, e.display_with_backtrace());
Ok(StatusCode::INTERNAL_SERVER_ERROR)
}
}
}
#[derive(Clone)]
struct RouterProbes {
services: Arc<Injector>,
}
impl RouterProbes {
pub fn try_new(config: &Config) -> Option<RouterProbes> {
if !is_router(config.config_obj().as_ref()) {
return None;
}
Some(RouterProbes {
services: config.injector(),
})
}
pub async fn is_live(&self) -> Result<(), CubeError> {
if let Some(s) = self
.services
.try_get_service_typed::<dyn SqlService>()
.await
{
s.exec_query("SELECT 1").await?;
}
Ok(())
}
pub async fn is_ready(&self) -> Result<(), CubeError> {
if uses_remote_metastore(&self.services).await {
return Ok(());
}
let m = match self.services.try_get_service_typed::<dyn MetaStore>().await {
None => return Err(CubeError::internal("metastore is not ready".to_string())),
Some(m) => m,
};
// Check metastore is not stalled.
m.get_schemas().await?;
// It is tempting to check worker connectivity on the router, but we cannot do this now.
// Workers connect to the router for warmup, so router must be ready before workers are up.
// TODO: warmup explicitly with router request instead?
Ok(())
}
}
| 30.505155 | 100 | 0.568435 |
fe498e67196cc51aea36e1ae63e3acc6bd29d964 | 2,739 | use super::*;
use crate::{
query_ast::*,
query_graph::{Node, NodeRef, QueryGraph, QueryGraphDependency},
ParsedInputValue,
};
use prisma_models::{ModelRef, RelationFieldRef};
use std::{convert::TryInto, sync::Arc};
pub fn connect_nested_set(
graph: &mut QueryGraph,
parent_node: NodeRef,
parent_relation_field: &RelationFieldRef,
value: ParsedInputValue,
child_model: &ModelRef,
) -> QueryGraphBuilderResult<()> {
let mut finders = Vec::new();
for value in utils::coerce_vec(value) {
let record_finder = extract_record_finder(value, &child_model)?;
finders.push(record_finder);
}
let child_read_query = utils::read_ids_infallible(&child_model, finders);
let child_node = graph.create_node(child_read_query);
graph.create_edge(&parent_node, &child_node, QueryGraphDependency::ExecutionOrder)?;
// connect::connect_records_node(graph, &parent_node, &child_node, &parent_relation_field, None, None)?;
let set = WriteQuery::SetRecords(SetRecords {
parent: None,
wheres: vec![],
relation_field: Arc::clone(&parent_relation_field),
});
let set_node = graph.create_node(Query::Write(set));
// Edge from parent to set.
graph.create_edge(
&parent_node,
&set_node,
QueryGraphDependency::ParentIds(Box::new(|mut child_node, mut parent_ids| {
let len = parent_ids.len();
if len == 0 {
Err(QueryGraphBuilderError::AssertionError(format!(
"Required exactly one parent ID to be present for disconnect query, found none."
)))
} else if len > 1 {
Err(QueryGraphBuilderError::AssertionError(format!(
"Required exactly one parent ID to be present for disconnect query, found {}.",
len
)))
} else {
if let Node::Query(Query::Write(WriteQuery::SetRecords(ref mut x))) = child_node {
let parent_id = parent_ids.pop().unwrap();
x.parent = Some(parent_id.try_into()?);
}
Ok(child_node)
}
})),
)?;
// Edge from child to set.
graph.create_edge(
&child_node,
&set_node,
QueryGraphDependency::ParentIds(Box::new(|mut child_node, parent_ids| {
if let Node::Query(Query::Write(WriteQuery::SetRecords(ref mut x))) = child_node {
x.wheres = parent_ids
.iter()
.map(|x| x.try_into().expect("Prisma Value was not a GraphqlId"))
.collect();
}
Ok(child_node)
})),
)?;
Ok(())
}
| 33.814815 | 108 | 0.585615 |
4b5bf778709affa6f145ec77ab754564d5678a76 | 9,751 | // Copyright (C) 2021 Scott Lamb <[email protected]>
// SPDX-License-Identifier: MIT OR Apache-2.0
//! nom-based parser which is a fairly straightforward translation of the ABNF
//! from [RFC 7235](https://datatracker.ietf.org/doc/html/rfc7235):
//!
//! * Some functional differences are noted in [`http_auth::parser::ChallengeParser`].
//! * We alter the `challenge` definition to avoid ambiguities when placing it
//! into `1#challenge`. You can see this effect by adjusting `test_2level_list1`
//! to not use the `_inner` form.
use log::trace;
use nom::branch::alt;
use nom::bytes::complete::is_a;
use nom::character::complete::{char, satisfy};
use nom::combinator::{all_consuming, consumed, map, opt, value};
use nom::multi::{fold_many0, many0_count, many1, many1_count, separated_list0, separated_list1};
use nom::sequence::{delimited, pair, preceded, separated_pair, tuple};
use http_auth::{ChallengeRef, ParamValue};
/// Parses optional whitespace as in [RFC 7230 section 3.2.3](https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.3).
///
/// ```text
/// OWS = *( SP / HTAB )
/// ; optional whitespace
/// ```
use nom::character::complete::space0 as ows;
/// Parses "bad" whitespace as in [RFC 7230 section 3.2.3](https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.3).
///
/// This is functionally identical to `ows`.
use nom::character::complete::space0 as bws;
/// Parses a token as in RFC 7230 section 3.2.6.
///
/// ```text
/// token = 1*tchar
///
/// tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
/// / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
/// / DIGIT / ALPHA
/// ; any VCHAR, except delimiters
/// ```
fn token(input: &str) -> nom::IResult<&str, &str> {
trace!("token attempt on {:?}", input);
is_a("!#$%&'*+-.^_`|~0123456789abcdefghijklmnopqrstuvxwyzABCDEFGHIJKLMNOPQRSTUVWXYZ")(input)
}
/// Parses `quoted-string` as in [RFC 7230 section 3.2.6](https://datatracker.ietf.org/doc/html/rfc7230#section-3.2.6).
///
/// ```text
/// quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
/// qdtext = HTAB / SP /%x21 / %x23-5B / %x5D-7E / obs-text
/// obs-text = %x80-FF
/// quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
/// VCHAR = %x21-7E
/// ; visible (printing) characters
/// ```
fn quoted_string(input: &str) -> nom::IResult<&str, ParamValue> {
trace!("quoted_string attempt on {:?}", input);
let is_qdtext = |c| match c {
'\t' | ' ' | '\x21' | '\x23'..='\x5B' | '\x5D'..='\x7E' => true,
_ => false,
};
let is_escapable = |c| match c {
'\t' | ' ' | '\x21'..='\x7E' => true,
_ => false,
};
delimited(
char('"'),
map(
consumed(fold_many0(
alt((
value(0, many1(satisfy(is_qdtext))),
value(1, pair(char('\\'), satisfy(is_escapable))),
)),
|| 0,
|acc: usize, item: usize| acc + item,
)),
|(raw, escapes)| ParamValue::new(escapes, raw),
),
char('"'),
)(input)
}
/// Parses `auth-param` as in [RFC 7235 section
/// 2.1](https://datatracker.ietf.org/doc/html/rfc7235#section-2.1).
///
/// ```text
/// auth-param = token BWS "=" BWS ( token / quoted-string )
/// ```
fn auth_param(input: &str) -> nom::IResult<&str, (&str, ParamValue)> {
trace!("auth_param attempt on {:?}", input);
separated_pair(
token,
tuple((bws, char('='), bws)),
alt((map(token, |raw| ParamValue::new(0, raw)), quoted_string)),
)(input)
}
/// Parses `1#element` as defined in
/// [RFC 7230 section 7](https://datatracker.ietf.org/doc/html/rfc7230#section-7).
///
/// > A recipient MUST accept lists that satisfy the following syntax:
/// > ```text
/// > 1#element => *( "," OWS ) element *( OWS "," [ OWS element ] )
/// > ```
fn list1_relaxed<'i, O, F>(f: F) -> impl FnMut(&'i str) -> nom::IResult<&'i str, Vec<O>>
where
F: nom::Parser<&'i str, O, nom::error::Error<&'i str>>,
{
delimited(
// *( "," OWS )
many0_count(pair(char(','), ows)),
list1_relaxed_inner(f),
// *( OWS "," )
many0_count(pair(ows, char(','))),
)
}
/// Parses `1#element` minus the leading and trailing portions.
fn list1_relaxed_inner<'i, O, F>(f: F) -> impl FnMut(&'i str) -> nom::IResult<&'i str, Vec<O>>
where
F: nom::Parser<&'i str, O, nom::error::Error<&'i str>>,
{
// element *( OWS 1*( "," OWS ) element )
separated_list1(pair(ows, many1_count(pair(char(','), ows))), f)
}
/// Parses `#element` as defined in [RFC 7230 section 7](https://datatracker.ietf.org/doc/html/rfc7230#section-7).
///
/// > A recipient MUST accept lists that satisfy the following syntax:
/// > ```text
/// > #element => [ ( "," / element ) *( OWS "," [ OWS element ] ) ]
/// > ```
#[cfg(test)]
fn list0_relaxed<'i, O, F>(f: F) -> impl FnMut(&'i str) -> nom::IResult<&'i str, Vec<O>>
where
F: nom::Parser<&'i str, O, nom::error::Error<&'i str>>,
{
delimited(
// *( "," OWS )
many0_count(pair(char(','), ows)),
list0_relaxed_inner(f),
// *( OWS "," )
many0_count(pair(ows, char(','))),
)
}
/// Parses `1#element` minus the leading and trailing portions.
///
/// This is used in the `challenge` definition; it avoids ambiguities with
/// the outer list1.
fn list0_relaxed_inner<'i, O, F>(f: F) -> impl FnMut(&'i str) -> nom::IResult<&'i str, Vec<O>>
where
F: nom::Parser<&'i str, O, nom::error::Error<&'i str>>,
{
// [ element *( OWS 1*( "," OWS ) element ) ]
separated_list0(pair(ows, many1_count(pair(char(','), ows))), f)
}
/// Parses a challenge as in [RFC 7235].
///
/// Section 2.1 defines this rule as follows:
/// ```text
/// auth-scheme = token
/// challenge = auth-scheme [ 1*SP ( token68 / #auth-param ) ]
/// ```
///
/// Although in practice this is ambiguous when placed into a `1#challenge`,
/// which we resolve by using `list0_relaxed_inner` rather than `list0_relaxed`.
fn challenge(input: &str) -> nom::IResult<&str, ChallengeRef> {
trace!("challenge attempt on {:?}", input);
map(
tuple((
token,
opt(preceded(char(' '), list0_relaxed_inner(auth_param))),
)),
|(scheme, opt_params)| ChallengeRef {
scheme,
params: opt_params.unwrap_or_default(),
},
)(input)
}
/// Appends the challenges described by `value` into `challenges`.
///
/// This can be used to parse `Proxy-Authenticate` and/or `WWW-Authenticate` header values.
///
/// ```text
/// Proxy-Authenticate = *( "," OWS ) challenge *( OWS "," [ OWS
/// challenge ] )
///
/// WWW-Authenticate = *( "," OWS ) challenge *( OWS "," [ OWS challenge
/// ] )
/// ```
pub fn challenges(input: &str) -> nom::IResult<&str, Vec<ChallengeRef>> {
all_consuming(list1_relaxed(challenge))(input)
}
#[cfg(test)]
mod tests {
use nom::bytes::complete::tag;
use nom::error::{Error, ErrorKind};
use nom::Err;
use super::*;
#[test]
fn test_quoted_string() {
assert_eq!(
quoted_string(&r#""foo""#),
Ok(("", ParamValue::new(0, "foo")))
);
assert_eq!(
quoted_string(&r#""foo bar""#),
Ok(("", ParamValue::new(0, "foo bar")))
);
assert_eq!(
quoted_string(&r#""foo \" bar""#),
Ok(("", ParamValue::new(1, r#"foo \" bar"#))),
);
assert_eq!(quoted_string(r#""""#), Ok(("", ParamValue::new(0, ""))));
}
#[test]
fn test_challenges() {
assert_eq!(
challenges(r#"Scheme foo="blah \" blah""#),
Ok((
"",
vec![ChallengeRef {
scheme: "Scheme",
params: vec![("foo", ParamValue::new(1, "blah \\\" blah"),)],
}]
))
);
}
#[test]
fn test_list1() {
assert_eq!(
list1_relaxed(token)("foo,bar"),
Ok(("", vec!["foo", "bar"]))
);
assert_eq!(
list1_relaxed(token)("foo ,bar"),
Ok(("", vec!["foo", "bar"]))
);
assert_eq!(
list1_relaxed(token)("foo ,bar, charlie "),
Ok((" ", vec!["foo", "bar", "charlie"]))
);
assert_eq!(
list1_relaxed(token)(""),
Err(Err::Error(Error::new("", ErrorKind::IsA)))
);
assert_eq!(
list1_relaxed(token)(","),
Err(Err::Error(Error::new("", ErrorKind::IsA)))
);
assert_eq!(
list1_relaxed(token)(", ,"),
Err(Err::Error(Error::new("", ErrorKind::IsA)))
);
}
#[test]
fn test_2level_list1() {
let foo_bar_list = preceded(tag("foo "), list1_relaxed_inner(tag("bar")));
assert_eq!(
list1_relaxed(foo_bar_list)(", foo bar,bar, foo bar,"),
Ok(("", vec![vec!["bar", "bar"], vec!["bar"]]))
);
}
#[test]
fn test_list0() {
assert_eq!(
list0_relaxed(token)("foo,bar"),
Ok(("", vec!["foo", "bar"]))
);
assert_eq!(
list0_relaxed(token)("foo ,bar"),
Ok(("", vec!["foo", "bar"]))
);
assert_eq!(
list0_relaxed(token)("foo ,bar, charlie "),
Ok((" ", vec!["foo", "bar", "charlie"]))
);
assert_eq!(list0_relaxed(token)(""), Ok(("", vec![])));
assert_eq!(list0_relaxed(token)(","), Ok(("", vec![])));
assert_eq!(list0_relaxed(token)(", ,"), Ok(("", vec![])));
}
}
| 32.83165 | 123 | 0.524356 |
0eceafc3707d4de739bc277dcb9a3595a57d90a8 | 55,751 | // Generated by swizzlegen. Do not edit.
use super::Vec4Swizzles;
use crate::{UVec2, UVec3, UVec4, XY, XYZ, XYZW};
impl Vec4Swizzles for UVec4 {
type Vec2 = UVec2;
type Vec3 = UVec3;
#[inline]
fn xxxx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.x,
w: self.x,
})
}
#[inline]
fn xxxy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.x,
w: self.y,
})
}
#[inline]
fn xxxz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.x,
w: self.z,
})
}
#[inline]
fn xxxw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.x,
w: self.w,
})
}
#[inline]
fn xxyx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.y,
w: self.x,
})
}
#[inline]
fn xxyy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.y,
w: self.y,
})
}
#[inline]
fn xxyz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.y,
w: self.z,
})
}
#[inline]
fn xxyw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.y,
w: self.w,
})
}
#[inline]
fn xxzx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.z,
w: self.x,
})
}
#[inline]
fn xxzy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.z,
w: self.y,
})
}
#[inline]
fn xxzz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.z,
w: self.z,
})
}
#[inline]
fn xxzw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.z,
w: self.w,
})
}
#[inline]
fn xxwx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.w,
w: self.x,
})
}
#[inline]
fn xxwy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.w,
w: self.y,
})
}
#[inline]
fn xxwz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.w,
w: self.z,
})
}
#[inline]
fn xxww(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.x,
z: self.w,
w: self.w,
})
}
#[inline]
fn xyxx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.x,
w: self.x,
})
}
#[inline]
fn xyxy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.x,
w: self.y,
})
}
#[inline]
fn xyxz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.x,
w: self.z,
})
}
#[inline]
fn xyxw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.x,
w: self.w,
})
}
#[inline]
fn xyyx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.y,
w: self.x,
})
}
#[inline]
fn xyyy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.y,
w: self.y,
})
}
#[inline]
fn xyyz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.y,
w: self.z,
})
}
#[inline]
fn xyyw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.y,
w: self.w,
})
}
#[inline]
fn xyzx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.z,
w: self.x,
})
}
#[inline]
fn xyzy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.z,
w: self.y,
})
}
#[inline]
fn xyzz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.z,
w: self.z,
})
}
#[inline]
fn xywx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.w,
w: self.x,
})
}
#[inline]
fn xywy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.w,
w: self.y,
})
}
#[inline]
fn xywz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.w,
w: self.z,
})
}
#[inline]
fn xyww(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.y,
z: self.w,
w: self.w,
})
}
#[inline]
fn xzxx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.x,
w: self.x,
})
}
#[inline]
fn xzxy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.x,
w: self.y,
})
}
#[inline]
fn xzxz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.x,
w: self.z,
})
}
#[inline]
fn xzxw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.x,
w: self.w,
})
}
#[inline]
fn xzyx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.y,
w: self.x,
})
}
#[inline]
fn xzyy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.y,
w: self.y,
})
}
#[inline]
fn xzyz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.y,
w: self.z,
})
}
#[inline]
fn xzyw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.y,
w: self.w,
})
}
#[inline]
fn xzzx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.z,
w: self.x,
})
}
#[inline]
fn xzzy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.z,
w: self.y,
})
}
#[inline]
fn xzzz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.z,
w: self.z,
})
}
#[inline]
fn xzzw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.z,
w: self.w,
})
}
#[inline]
fn xzwx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.w,
w: self.x,
})
}
#[inline]
fn xzwy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.w,
w: self.y,
})
}
#[inline]
fn xzwz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.w,
w: self.z,
})
}
#[inline]
fn xzww(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.z,
z: self.w,
w: self.w,
})
}
#[inline]
fn xwxx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.x,
w: self.x,
})
}
#[inline]
fn xwxy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.x,
w: self.y,
})
}
#[inline]
fn xwxz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.x,
w: self.z,
})
}
#[inline]
fn xwxw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.x,
w: self.w,
})
}
#[inline]
fn xwyx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.y,
w: self.x,
})
}
#[inline]
fn xwyy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.y,
w: self.y,
})
}
#[inline]
fn xwyz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.y,
w: self.z,
})
}
#[inline]
fn xwyw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.y,
w: self.w,
})
}
#[inline]
fn xwzx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.z,
w: self.x,
})
}
#[inline]
fn xwzy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.z,
w: self.y,
})
}
#[inline]
fn xwzz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.z,
w: self.z,
})
}
#[inline]
fn xwzw(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.z,
w: self.w,
})
}
#[inline]
fn xwwx(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.w,
w: self.x,
})
}
#[inline]
fn xwwy(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.w,
w: self.y,
})
}
#[inline]
fn xwwz(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.w,
w: self.z,
})
}
#[inline]
fn xwww(self) -> UVec4 {
UVec4(XYZW {
x: self.x,
y: self.w,
z: self.w,
w: self.w,
})
}
#[inline]
fn yxxx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.x,
w: self.x,
})
}
#[inline]
fn yxxy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.x,
w: self.y,
})
}
#[inline]
fn yxxz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.x,
w: self.z,
})
}
#[inline]
fn yxxw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.x,
w: self.w,
})
}
#[inline]
fn yxyx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.y,
w: self.x,
})
}
#[inline]
fn yxyy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.y,
w: self.y,
})
}
#[inline]
fn yxyz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.y,
w: self.z,
})
}
#[inline]
fn yxyw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.y,
w: self.w,
})
}
#[inline]
fn yxzx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.z,
w: self.x,
})
}
#[inline]
fn yxzy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.z,
w: self.y,
})
}
#[inline]
fn yxzz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.z,
w: self.z,
})
}
#[inline]
fn yxzw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.z,
w: self.w,
})
}
#[inline]
fn yxwx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.w,
w: self.x,
})
}
#[inline]
fn yxwy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.w,
w: self.y,
})
}
#[inline]
fn yxwz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.w,
w: self.z,
})
}
#[inline]
fn yxww(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.x,
z: self.w,
w: self.w,
})
}
#[inline]
fn yyxx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.x,
w: self.x,
})
}
#[inline]
fn yyxy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.x,
w: self.y,
})
}
#[inline]
fn yyxz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.x,
w: self.z,
})
}
#[inline]
fn yyxw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.x,
w: self.w,
})
}
#[inline]
fn yyyx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.y,
w: self.x,
})
}
#[inline]
fn yyyy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.y,
w: self.y,
})
}
#[inline]
fn yyyz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.y,
w: self.z,
})
}
#[inline]
fn yyyw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.y,
w: self.w,
})
}
#[inline]
fn yyzx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.z,
w: self.x,
})
}
#[inline]
fn yyzy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.z,
w: self.y,
})
}
#[inline]
fn yyzz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.z,
w: self.z,
})
}
#[inline]
fn yyzw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.z,
w: self.w,
})
}
#[inline]
fn yywx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.w,
w: self.x,
})
}
#[inline]
fn yywy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.w,
w: self.y,
})
}
#[inline]
fn yywz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.w,
w: self.z,
})
}
#[inline]
fn yyww(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.y,
z: self.w,
w: self.w,
})
}
#[inline]
fn yzxx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.x,
w: self.x,
})
}
#[inline]
fn yzxy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.x,
w: self.y,
})
}
#[inline]
fn yzxz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.x,
w: self.z,
})
}
#[inline]
fn yzxw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.x,
w: self.w,
})
}
#[inline]
fn yzyx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.y,
w: self.x,
})
}
#[inline]
fn yzyy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.y,
w: self.y,
})
}
#[inline]
fn yzyz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.y,
w: self.z,
})
}
#[inline]
fn yzyw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.y,
w: self.w,
})
}
#[inline]
fn yzzx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.z,
w: self.x,
})
}
#[inline]
fn yzzy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.z,
w: self.y,
})
}
#[inline]
fn yzzz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.z,
w: self.z,
})
}
#[inline]
fn yzzw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.z,
w: self.w,
})
}
#[inline]
fn yzwx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.w,
w: self.x,
})
}
#[inline]
fn yzwy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.w,
w: self.y,
})
}
#[inline]
fn yzwz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.w,
w: self.z,
})
}
#[inline]
fn yzww(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.z,
z: self.w,
w: self.w,
})
}
#[inline]
fn ywxx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.x,
w: self.x,
})
}
#[inline]
fn ywxy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.x,
w: self.y,
})
}
#[inline]
fn ywxz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.x,
w: self.z,
})
}
#[inline]
fn ywxw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.x,
w: self.w,
})
}
#[inline]
fn ywyx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.y,
w: self.x,
})
}
#[inline]
fn ywyy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.y,
w: self.y,
})
}
#[inline]
fn ywyz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.y,
w: self.z,
})
}
#[inline]
fn ywyw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.y,
w: self.w,
})
}
#[inline]
fn ywzx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.z,
w: self.x,
})
}
#[inline]
fn ywzy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.z,
w: self.y,
})
}
#[inline]
fn ywzz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.z,
w: self.z,
})
}
#[inline]
fn ywzw(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.z,
w: self.w,
})
}
#[inline]
fn ywwx(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.w,
w: self.x,
})
}
#[inline]
fn ywwy(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.w,
w: self.y,
})
}
#[inline]
fn ywwz(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.w,
w: self.z,
})
}
#[inline]
fn ywww(self) -> UVec4 {
UVec4(XYZW {
x: self.y,
y: self.w,
z: self.w,
w: self.w,
})
}
#[inline]
fn zxxx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.x,
w: self.x,
})
}
#[inline]
fn zxxy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.x,
w: self.y,
})
}
#[inline]
fn zxxz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.x,
w: self.z,
})
}
#[inline]
fn zxxw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.x,
w: self.w,
})
}
#[inline]
fn zxyx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.y,
w: self.x,
})
}
#[inline]
fn zxyy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.y,
w: self.y,
})
}
#[inline]
fn zxyz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.y,
w: self.z,
})
}
#[inline]
fn zxyw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.y,
w: self.w,
})
}
#[inline]
fn zxzx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.z,
w: self.x,
})
}
#[inline]
fn zxzy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.z,
w: self.y,
})
}
#[inline]
fn zxzz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.z,
w: self.z,
})
}
#[inline]
fn zxzw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.z,
w: self.w,
})
}
#[inline]
fn zxwx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.w,
w: self.x,
})
}
#[inline]
fn zxwy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.w,
w: self.y,
})
}
#[inline]
fn zxwz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.w,
w: self.z,
})
}
#[inline]
fn zxww(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.x,
z: self.w,
w: self.w,
})
}
#[inline]
fn zyxx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.x,
w: self.x,
})
}
#[inline]
fn zyxy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.x,
w: self.y,
})
}
#[inline]
fn zyxz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.x,
w: self.z,
})
}
#[inline]
fn zyxw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.x,
w: self.w,
})
}
#[inline]
fn zyyx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.y,
w: self.x,
})
}
#[inline]
fn zyyy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.y,
w: self.y,
})
}
#[inline]
fn zyyz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.y,
w: self.z,
})
}
#[inline]
fn zyyw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.y,
w: self.w,
})
}
#[inline]
fn zyzx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.z,
w: self.x,
})
}
#[inline]
fn zyzy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.z,
w: self.y,
})
}
#[inline]
fn zyzz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.z,
w: self.z,
})
}
#[inline]
fn zyzw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.z,
w: self.w,
})
}
#[inline]
fn zywx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.w,
w: self.x,
})
}
#[inline]
fn zywy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.w,
w: self.y,
})
}
#[inline]
fn zywz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.w,
w: self.z,
})
}
#[inline]
fn zyww(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.y,
z: self.w,
w: self.w,
})
}
#[inline]
fn zzxx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.x,
w: self.x,
})
}
#[inline]
fn zzxy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.x,
w: self.y,
})
}
#[inline]
fn zzxz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.x,
w: self.z,
})
}
#[inline]
fn zzxw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.x,
w: self.w,
})
}
#[inline]
fn zzyx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.y,
w: self.x,
})
}
#[inline]
fn zzyy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.y,
w: self.y,
})
}
#[inline]
fn zzyz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.y,
w: self.z,
})
}
#[inline]
fn zzyw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.y,
w: self.w,
})
}
#[inline]
fn zzzx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.z,
w: self.x,
})
}
#[inline]
fn zzzy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.z,
w: self.y,
})
}
#[inline]
fn zzzz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.z,
w: self.z,
})
}
#[inline]
fn zzzw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.z,
w: self.w,
})
}
#[inline]
fn zzwx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.w,
w: self.x,
})
}
#[inline]
fn zzwy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.w,
w: self.y,
})
}
#[inline]
fn zzwz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.w,
w: self.z,
})
}
#[inline]
fn zzww(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.z,
z: self.w,
w: self.w,
})
}
#[inline]
fn zwxx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.x,
w: self.x,
})
}
#[inline]
fn zwxy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.x,
w: self.y,
})
}
#[inline]
fn zwxz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.x,
w: self.z,
})
}
#[inline]
fn zwxw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.x,
w: self.w,
})
}
#[inline]
fn zwyx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.y,
w: self.x,
})
}
#[inline]
fn zwyy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.y,
w: self.y,
})
}
#[inline]
fn zwyz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.y,
w: self.z,
})
}
#[inline]
fn zwyw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.y,
w: self.w,
})
}
#[inline]
fn zwzx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.z,
w: self.x,
})
}
#[inline]
fn zwzy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.z,
w: self.y,
})
}
#[inline]
fn zwzz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.z,
w: self.z,
})
}
#[inline]
fn zwzw(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.z,
w: self.w,
})
}
#[inline]
fn zwwx(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.w,
w: self.x,
})
}
#[inline]
fn zwwy(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.w,
w: self.y,
})
}
#[inline]
fn zwwz(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.w,
w: self.z,
})
}
#[inline]
fn zwww(self) -> UVec4 {
UVec4(XYZW {
x: self.z,
y: self.w,
z: self.w,
w: self.w,
})
}
#[inline]
fn wxxx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.x,
w: self.x,
})
}
#[inline]
fn wxxy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.x,
w: self.y,
})
}
#[inline]
fn wxxz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.x,
w: self.z,
})
}
#[inline]
fn wxxw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.x,
w: self.w,
})
}
#[inline]
fn wxyx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.y,
w: self.x,
})
}
#[inline]
fn wxyy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.y,
w: self.y,
})
}
#[inline]
fn wxyz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.y,
w: self.z,
})
}
#[inline]
fn wxyw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.y,
w: self.w,
})
}
#[inline]
fn wxzx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.z,
w: self.x,
})
}
#[inline]
fn wxzy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.z,
w: self.y,
})
}
#[inline]
fn wxzz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.z,
w: self.z,
})
}
#[inline]
fn wxzw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.z,
w: self.w,
})
}
#[inline]
fn wxwx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.w,
w: self.x,
})
}
#[inline]
fn wxwy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.w,
w: self.y,
})
}
#[inline]
fn wxwz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.w,
w: self.z,
})
}
#[inline]
fn wxww(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.x,
z: self.w,
w: self.w,
})
}
#[inline]
fn wyxx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.x,
w: self.x,
})
}
#[inline]
fn wyxy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.x,
w: self.y,
})
}
#[inline]
fn wyxz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.x,
w: self.z,
})
}
#[inline]
fn wyxw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.x,
w: self.w,
})
}
#[inline]
fn wyyx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.y,
w: self.x,
})
}
#[inline]
fn wyyy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.y,
w: self.y,
})
}
#[inline]
fn wyyz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.y,
w: self.z,
})
}
#[inline]
fn wyyw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.y,
w: self.w,
})
}
#[inline]
fn wyzx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.z,
w: self.x,
})
}
#[inline]
fn wyzy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.z,
w: self.y,
})
}
#[inline]
fn wyzz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.z,
w: self.z,
})
}
#[inline]
fn wyzw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.z,
w: self.w,
})
}
#[inline]
fn wywx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.w,
w: self.x,
})
}
#[inline]
fn wywy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.w,
w: self.y,
})
}
#[inline]
fn wywz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.w,
w: self.z,
})
}
#[inline]
fn wyww(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.y,
z: self.w,
w: self.w,
})
}
#[inline]
fn wzxx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.x,
w: self.x,
})
}
#[inline]
fn wzxy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.x,
w: self.y,
})
}
#[inline]
fn wzxz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.x,
w: self.z,
})
}
#[inline]
fn wzxw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.x,
w: self.w,
})
}
#[inline]
fn wzyx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.y,
w: self.x,
})
}
#[inline]
fn wzyy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.y,
w: self.y,
})
}
#[inline]
fn wzyz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.y,
w: self.z,
})
}
#[inline]
fn wzyw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.y,
w: self.w,
})
}
#[inline]
fn wzzx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.z,
w: self.x,
})
}
#[inline]
fn wzzy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.z,
w: self.y,
})
}
#[inline]
fn wzzz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.z,
w: self.z,
})
}
#[inline]
fn wzzw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.z,
w: self.w,
})
}
#[inline]
fn wzwx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.w,
w: self.x,
})
}
#[inline]
fn wzwy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.w,
w: self.y,
})
}
#[inline]
fn wzwz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.w,
w: self.z,
})
}
#[inline]
fn wzww(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.z,
z: self.w,
w: self.w,
})
}
#[inline]
fn wwxx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.x,
w: self.x,
})
}
#[inline]
fn wwxy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.x,
w: self.y,
})
}
#[inline]
fn wwxz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.x,
w: self.z,
})
}
#[inline]
fn wwxw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.x,
w: self.w,
})
}
#[inline]
fn wwyx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.y,
w: self.x,
})
}
#[inline]
fn wwyy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.y,
w: self.y,
})
}
#[inline]
fn wwyz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.y,
w: self.z,
})
}
#[inline]
fn wwyw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.y,
w: self.w,
})
}
#[inline]
fn wwzx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.z,
w: self.x,
})
}
#[inline]
fn wwzy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.z,
w: self.y,
})
}
#[inline]
fn wwzz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.z,
w: self.z,
})
}
#[inline]
fn wwzw(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.z,
w: self.w,
})
}
#[inline]
fn wwwx(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.w,
w: self.x,
})
}
#[inline]
fn wwwy(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.w,
w: self.y,
})
}
#[inline]
fn wwwz(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.w,
w: self.z,
})
}
#[inline]
fn wwww(self) -> UVec4 {
UVec4(XYZW {
x: self.w,
y: self.w,
z: self.w,
w: self.w,
})
}
#[inline]
fn xxx(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.x,
z: self.x,
})
}
#[inline]
fn xxy(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.x,
z: self.y,
})
}
#[inline]
fn xxz(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.x,
z: self.z,
})
}
#[inline]
fn xxw(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.x,
z: self.w,
})
}
#[inline]
fn xyx(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.y,
z: self.x,
})
}
#[inline]
fn xyy(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.y,
z: self.y,
})
}
#[inline]
fn xyz(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.y,
z: self.z,
})
}
#[inline]
fn xyw(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.y,
z: self.w,
})
}
#[inline]
fn xzx(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.z,
z: self.x,
})
}
#[inline]
fn xzy(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.z,
z: self.y,
})
}
#[inline]
fn xzz(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.z,
z: self.z,
})
}
#[inline]
fn xzw(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.z,
z: self.w,
})
}
#[inline]
fn xwx(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.w,
z: self.x,
})
}
#[inline]
fn xwy(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.w,
z: self.y,
})
}
#[inline]
fn xwz(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.w,
z: self.z,
})
}
#[inline]
fn xww(self) -> UVec3 {
UVec3(XYZ {
x: self.x,
y: self.w,
z: self.w,
})
}
#[inline]
fn yxx(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.x,
z: self.x,
})
}
#[inline]
fn yxy(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.x,
z: self.y,
})
}
#[inline]
fn yxz(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.x,
z: self.z,
})
}
#[inline]
fn yxw(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.x,
z: self.w,
})
}
#[inline]
fn yyx(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.y,
z: self.x,
})
}
#[inline]
fn yyy(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.y,
z: self.y,
})
}
#[inline]
fn yyz(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.y,
z: self.z,
})
}
#[inline]
fn yyw(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.y,
z: self.w,
})
}
#[inline]
fn yzx(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.z,
z: self.x,
})
}
#[inline]
fn yzy(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.z,
z: self.y,
})
}
#[inline]
fn yzz(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.z,
z: self.z,
})
}
#[inline]
fn yzw(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.z,
z: self.w,
})
}
#[inline]
fn ywx(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.w,
z: self.x,
})
}
#[inline]
fn ywy(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.w,
z: self.y,
})
}
#[inline]
fn ywz(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.w,
z: self.z,
})
}
#[inline]
fn yww(self) -> UVec3 {
UVec3(XYZ {
x: self.y,
y: self.w,
z: self.w,
})
}
#[inline]
fn zxx(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.x,
z: self.x,
})
}
#[inline]
fn zxy(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.x,
z: self.y,
})
}
#[inline]
fn zxz(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.x,
z: self.z,
})
}
#[inline]
fn zxw(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.x,
z: self.w,
})
}
#[inline]
fn zyx(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.y,
z: self.x,
})
}
#[inline]
fn zyy(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.y,
z: self.y,
})
}
#[inline]
fn zyz(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.y,
z: self.z,
})
}
#[inline]
fn zyw(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.y,
z: self.w,
})
}
#[inline]
fn zzx(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.z,
z: self.x,
})
}
#[inline]
fn zzy(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.z,
z: self.y,
})
}
#[inline]
fn zzz(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.z,
z: self.z,
})
}
#[inline]
fn zzw(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.z,
z: self.w,
})
}
#[inline]
fn zwx(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.w,
z: self.x,
})
}
#[inline]
fn zwy(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.w,
z: self.y,
})
}
#[inline]
fn zwz(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.w,
z: self.z,
})
}
#[inline]
fn zww(self) -> UVec3 {
UVec3(XYZ {
x: self.z,
y: self.w,
z: self.w,
})
}
#[inline]
fn wxx(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.x,
z: self.x,
})
}
#[inline]
fn wxy(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.x,
z: self.y,
})
}
#[inline]
fn wxz(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.x,
z: self.z,
})
}
#[inline]
fn wxw(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.x,
z: self.w,
})
}
#[inline]
fn wyx(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.y,
z: self.x,
})
}
#[inline]
fn wyy(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.y,
z: self.y,
})
}
#[inline]
fn wyz(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.y,
z: self.z,
})
}
#[inline]
fn wyw(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.y,
z: self.w,
})
}
#[inline]
fn wzx(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.z,
z: self.x,
})
}
#[inline]
fn wzy(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.z,
z: self.y,
})
}
#[inline]
fn wzz(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.z,
z: self.z,
})
}
#[inline]
fn wzw(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.z,
z: self.w,
})
}
#[inline]
fn wwx(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.w,
z: self.x,
})
}
#[inline]
fn wwy(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.w,
z: self.y,
})
}
#[inline]
fn wwz(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.w,
z: self.z,
})
}
#[inline]
fn www(self) -> UVec3 {
UVec3(XYZ {
x: self.w,
y: self.w,
z: self.w,
})
}
#[inline]
fn xx(self) -> UVec2 {
UVec2(XY {
x: self.x,
y: self.x,
})
}
#[inline]
fn xy(self) -> UVec2 {
UVec2(XY {
x: self.x,
y: self.y,
})
}
#[inline]
fn xz(self) -> UVec2 {
UVec2(XY {
x: self.x,
y: self.z,
})
}
#[inline]
fn xw(self) -> UVec2 {
UVec2(XY {
x: self.x,
y: self.w,
})
}
#[inline]
fn yx(self) -> UVec2 {
UVec2(XY {
x: self.y,
y: self.x,
})
}
#[inline]
fn yy(self) -> UVec2 {
UVec2(XY {
x: self.y,
y: self.y,
})
}
#[inline]
fn yz(self) -> UVec2 {
UVec2(XY {
x: self.y,
y: self.z,
})
}
#[inline]
fn yw(self) -> UVec2 {
UVec2(XY {
x: self.y,
y: self.w,
})
}
#[inline]
fn zx(self) -> UVec2 {
UVec2(XY {
x: self.z,
y: self.x,
})
}
#[inline]
fn zy(self) -> UVec2 {
UVec2(XY {
x: self.z,
y: self.y,
})
}
#[inline]
fn zz(self) -> UVec2 {
UVec2(XY {
x: self.z,
y: self.z,
})
}
#[inline]
fn zw(self) -> UVec2 {
UVec2(XY {
x: self.z,
y: self.w,
})
}
#[inline]
fn wx(self) -> UVec2 {
UVec2(XY {
x: self.w,
y: self.x,
})
}
#[inline]
fn wy(self) -> UVec2 {
UVec2(XY {
x: self.w,
y: self.y,
})
}
#[inline]
fn wz(self) -> UVec2 {
UVec2(XY {
x: self.w,
y: self.z,
})
}
#[inline]
fn ww(self) -> UVec2 {
UVec2(XY {
x: self.w,
y: self.w,
})
}
}
| 19.027645 | 48 | 0.313071 |
907b4a0106d3e566605363c7ff4587ac7b415b2e | 21,981 | #![doc = "generated by AutoRust 0.1.0"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Events {
pub value: Vec<Event>,
#[serde(rename = "nextLink", skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Event {
#[serde(flatten)]
pub resource: Resource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<event::Properties>,
}
pub mod event {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "eventType", skip_serializing_if = "Option::is_none")]
pub event_type: Option<properties::EventType>,
#[serde(rename = "eventSource", skip_serializing_if = "Option::is_none")]
pub event_source: Option<properties::EventSource>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status: Option<properties::Status>,
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub header: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub level: Option<properties::Level>,
#[serde(rename = "eventLevel", skip_serializing_if = "Option::is_none")]
pub event_level: Option<properties::EventLevel>,
#[serde(skip_serializing_if = "Option::is_none")]
pub article: Option<properties::Article>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub links: Vec<Link>,
#[serde(rename = "impactStartTime", skip_serializing_if = "Option::is_none")]
pub impact_start_time: Option<String>,
#[serde(rename = "impactMitigationTime", skip_serializing_if = "Option::is_none")]
pub impact_mitigation_time: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub impact: Vec<Impact>,
#[serde(rename = "recommendedActions", skip_serializing_if = "Option::is_none")]
pub recommended_actions: Option<properties::RecommendedActions>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub faqs: Vec<Faq>,
#[serde(rename = "isHIR", skip_serializing_if = "Option::is_none")]
pub is_hir: Option<bool>,
#[serde(rename = "enableMicrosoftSupport", skip_serializing_if = "Option::is_none")]
pub enable_microsoft_support: Option<bool>,
#[serde(rename = "enableChatWithUs", skip_serializing_if = "Option::is_none")]
pub enable_chat_with_us: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub priority: Option<i64>,
#[serde(rename = "lastUpdateTime", skip_serializing_if = "Option::is_none")]
pub last_update_time: Option<String>,
#[serde(rename = "hirStage", skip_serializing_if = "Option::is_none")]
pub hir_stage: Option<String>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EventType {
ServiceIssue,
PlannedMaintenance,
HealthAdvisory,
#[serde(rename = "RCA")]
Rca,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EventSource {
ResourceHealth,
ServiceHealth,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Active,
Resolved,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Level {
Critical,
Warning,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum EventLevel {
Critical,
Warning,
Informational,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Article {
#[serde(rename = "articleContent", skip_serializing_if = "Option::is_none")]
pub article_content: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecommendedActions {
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub actions: Vec<serde_json::Value>,
#[serde(rename = "localeCode", skip_serializing_if = "Option::is_none")]
pub locale_code: Option<String>,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Link {
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub type_: Option<link::Type>,
#[serde(rename = "displayText", skip_serializing_if = "Option::is_none")]
pub display_text: Option<link::DisplayText>,
#[serde(rename = "extensionName", skip_serializing_if = "Option::is_none")]
pub extension_name: Option<String>,
#[serde(rename = "bladeName", skip_serializing_if = "Option::is_none")]
pub blade_name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub parameters: Option<serde_json::Value>,
}
pub mod link {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Type {
Button,
Hyperlink,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct DisplayText {
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
#[serde(rename = "localizedValue", skip_serializing_if = "Option::is_none")]
pub localized_value: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Faq {
#[serde(skip_serializing_if = "Option::is_none")]
pub question: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub answer: Option<String>,
#[serde(rename = "localeCode", skip_serializing_if = "Option::is_none")]
pub locale_code: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Impact {
#[serde(rename = "impactedService", skip_serializing_if = "Option::is_none")]
pub impacted_service: Option<String>,
#[serde(rename = "impactedRegions", skip_serializing_if = "Vec::is_empty")]
pub impacted_regions: Vec<ImpactedServiceRegion>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImpactedServiceRegion {
#[serde(rename = "impactedRegion", skip_serializing_if = "Option::is_none")]
pub impacted_region: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status: Option<impacted_service_region::Status>,
#[serde(rename = "impactedSubscriptions", skip_serializing_if = "Vec::is_empty")]
pub impacted_subscriptions: Vec<String>,
#[serde(rename = "lastUpdateTime", skip_serializing_if = "Option::is_none")]
pub last_update_time: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub updates: Vec<Update>,
}
pub mod impacted_service_region {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Active,
Resolved,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Update {
#[serde(skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
#[serde(rename = "updateDateTime", skip_serializing_if = "Option::is_none")]
pub update_date_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImpactedResourceListResult {
pub value: Vec<ImpactedResourceStatus>,
#[serde(rename = "nextLink", skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImpactedResourceStatus {
#[serde(flatten)]
pub resource: Resource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<impacted_resource_status::Properties>,
}
pub mod impacted_resource_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "availabilityState", skip_serializing_if = "Option::is_none")]
pub availability_state: Option<properties::AvailabilityState>,
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
#[serde(rename = "reasonType", skip_serializing_if = "Option::is_none")]
pub reason_type: Option<properties::ReasonType>,
#[serde(rename = "occuredTime", skip_serializing_if = "Option::is_none")]
pub occured_time: Option<String>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AvailabilityState {
Available,
Unavailable,
Degraded,
Unknown,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReasonType {
Unplanned,
Planned,
UserInitiated,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilityStatusListResult {
pub value: Vec<AvailabilityStatus>,
#[serde(rename = "nextLink", skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilityStatus {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<availability_status::Properties>,
}
pub mod availability_status {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Properties {
#[serde(rename = "availabilityState", skip_serializing_if = "Option::is_none")]
pub availability_state: Option<properties::AvailabilityState>,
#[serde(skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
#[serde(rename = "detailedStatus", skip_serializing_if = "Option::is_none")]
pub detailed_status: Option<String>,
#[serde(rename = "reasonType", skip_serializing_if = "Option::is_none")]
pub reason_type: Option<String>,
#[serde(rename = "rootCauseAttributionTime", skip_serializing_if = "Option::is_none")]
pub root_cause_attribution_time: Option<String>,
#[serde(rename = "healthEventType", skip_serializing_if = "Option::is_none")]
pub health_event_type: Option<String>,
#[serde(rename = "healthEventCause", skip_serializing_if = "Option::is_none")]
pub health_event_cause: Option<String>,
#[serde(rename = "healthEventCategory", skip_serializing_if = "Option::is_none")]
pub health_event_category: Option<String>,
#[serde(rename = "healthEventId", skip_serializing_if = "Option::is_none")]
pub health_event_id: Option<String>,
#[serde(rename = "resolutionETA", skip_serializing_if = "Option::is_none")]
pub resolution_eta: Option<String>,
#[serde(rename = "occuredTime", skip_serializing_if = "Option::is_none")]
pub occured_time: Option<String>,
#[serde(rename = "reasonChronicity", skip_serializing_if = "Option::is_none")]
pub reason_chronicity: Option<properties::ReasonChronicity>,
#[serde(rename = "reportedTime", skip_serializing_if = "Option::is_none")]
pub reported_time: Option<String>,
#[serde(rename = "recentlyResolved", skip_serializing_if = "Option::is_none")]
pub recently_resolved: Option<properties::RecentlyResolved>,
#[serde(rename = "recommendedActions", skip_serializing_if = "Vec::is_empty")]
pub recommended_actions: Vec<RecommendedAction>,
#[serde(rename = "serviceImpactingEvents", skip_serializing_if = "Vec::is_empty")]
pub service_impacting_events: Vec<ServiceImpactingEvent>,
}
pub mod properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AvailabilityState {
Available,
Unavailable,
Degraded,
Unknown,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum ReasonChronicity {
Transient,
Persistent,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecentlyResolved {
#[serde(rename = "unavailableOccurredTime", skip_serializing_if = "Option::is_none")]
pub unavailable_occurred_time: Option<String>,
#[serde(rename = "resolvedTime", skip_serializing_if = "Option::is_none")]
pub resolved_time: Option<String>,
#[serde(rename = "unavailabilitySummary", skip_serializing_if = "Option::is_none")]
pub unavailability_summary: Option<String>,
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct RecommendedAction {
#[serde(skip_serializing_if = "Option::is_none")]
pub action: Option<String>,
#[serde(rename = "actionUrl", skip_serializing_if = "Option::is_none")]
pub action_url: Option<String>,
#[serde(rename = "actionUrlText", skip_serializing_if = "Option::is_none")]
pub action_url_text: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ServiceImpactingEvent {
#[serde(rename = "eventStartTime", skip_serializing_if = "Option::is_none")]
pub event_start_time: Option<String>,
#[serde(rename = "eventStatusLastModifiedTime", skip_serializing_if = "Option::is_none")]
pub event_status_last_modified_time: Option<String>,
#[serde(rename = "correlationId", skip_serializing_if = "Option::is_none")]
pub correlation_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub status: Option<service_impacting_event::Status>,
#[serde(rename = "incidentProperties", skip_serializing_if = "Option::is_none")]
pub incident_properties: Option<service_impacting_event::IncidentProperties>,
}
pub mod service_impacting_event {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Status {
#[serde(skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct IncidentProperties {
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub service: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
#[serde(rename = "incidentType", skip_serializing_if = "Option::is_none")]
pub incident_type: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatusBanner {
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cloud: Option<String>,
#[serde(rename = "lastModifiedTime", skip_serializing_if = "Option::is_none")]
pub last_modified_time: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ImpactedRegion {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EmergingIssueImpact {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub regions: Vec<ImpactedRegion>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct StatusActiveEvent {
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(rename = "trackingId", skip_serializing_if = "Option::is_none")]
pub tracking_id: Option<String>,
#[serde(rename = "startTime", skip_serializing_if = "Option::is_none")]
pub start_time: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cloud: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub severity: Option<status_active_event::Severity>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stage: Option<status_active_event::Stage>,
#[serde(skip_serializing_if = "Option::is_none")]
pub published: Option<bool>,
#[serde(rename = "lastModifiedTime", skip_serializing_if = "Option::is_none")]
pub last_modified_time: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub impacts: Vec<EmergingIssueImpact>,
}
pub mod status_active_event {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Severity {
Information,
Warning,
Error,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Stage {
Active,
Resolve,
Archived,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EmergingIssuesGetResult {
#[serde(flatten)]
pub resource: Resource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<EmergingIssue>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EmergingIssue {
#[serde(rename = "refreshTimestamp", skip_serializing_if = "Option::is_none")]
pub refresh_timestamp: Option<String>,
#[serde(rename = "statusBanners", skip_serializing_if = "Vec::is_empty")]
pub status_banners: Vec<StatusBanner>,
#[serde(rename = "statusActiveEvents", skip_serializing_if = "Vec::is_empty")]
pub status_active_events: Vec<StatusActiveEvent>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct EmergingIssueListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<EmergingIssuesGetResult>,
#[serde(rename = "nextLink", skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
pub value: Vec<Operation>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Operation {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Display {
#[serde(skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct ErrorResponse {
#[serde(skip_serializing)]
pub code: Option<String>,
#[serde(skip_serializing)]
pub message: Option<String>,
#[serde(skip_serializing)]
pub details: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetadataEntityListResult {
#[serde(skip_serializing_if = "Vec::is_empty")]
pub value: Vec<MetadataEntity>,
#[serde(rename = "nextLink", skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetadataEntity {
#[serde(flatten)]
pub resource: Resource,
#[serde(skip_serializing_if = "Option::is_none")]
pub properties: Option<MetadataEntityProperties>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetadataEntityProperties {
#[serde(rename = "displayName", skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "dependsOn", skip_serializing_if = "Vec::is_empty")]
pub depends_on: Vec<String>,
#[serde(rename = "applicableScenarios", skip_serializing_if = "Vec::is_empty")]
pub applicable_scenarios: Vec<String>,
#[serde(rename = "supportedValues", skip_serializing_if = "Vec::is_empty")]
pub supported_values: Vec<MetadataSupportedValueDetail>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MetadataSupportedValueDetail {
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "displayName", skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct Resource {
#[serde(skip_serializing)]
pub id: Option<String>,
#[serde(skip_serializing)]
pub name: Option<String>,
#[serde(rename = "type", skip_serializing)]
pub type_: Option<String>,
}
| 42.764591 | 97 | 0.670488 |
b9a8e0804da2f573bb049eba14b90059dbf83145 | 1,239 | //! `CreateAccount` subcommand
#![allow(clippy::never_loop)]
use std::path::PathBuf;
use std::process::exit;
use abscissa_core::{Command, Options, Runnable};
use anyhow::Error;
use libra_types::transaction::SignedTransaction;
use ol_types::config::TxType;
use crate::{entrypoint, submit_tx::{TxParams, maybe_submit, tx_params_wrapper}};
/// `CreateAccount` subcommand
#[derive(Command, Debug, Default, Options)]
pub struct DemoCmd {}
impl Runnable for DemoCmd {
fn run(&self) {
let entry_args = entrypoint::get_args();
let tx_params = tx_params_wrapper(TxType::Cheap).unwrap();
match demo_tx(
&tx_params,
entry_args.no_send,
entry_args.save_path
) {
Ok(r) => {
println!("{:?}", &r);
},
Err(e) => {
println!("ERROR: could not submit demo transaction, message: \n{:?}", &e);
exit(1);
},
}
}
}
/// a no-op tx to test transctions
pub fn demo_tx(tx_params: &TxParams, no_send: bool, save_path: Option<PathBuf>) -> Result<SignedTransaction, Error>{
maybe_submit(
transaction_builder::encode_demo_e2e_script(42),
&tx_params,
no_send,
save_path
)
} | 26.361702 | 116 | 0.612591 |
18858f9f33f10b35e4b5eb2c08090a1fcdcdecbe | 2,108 | mod generated_some_struct;
#[cfg(test)]
mod test {
use std::collections::HashMap;
use std::fs::{create_dir, remove_dir_all, File};
use std::io::{Read, Write};
use std::process::Command;
#[test]
fn test_equals() {
env_logger::init();
dotenv::dotenv().unwrap();
let file = std::env::current_dir().unwrap().join("src");
let envs: HashMap<_, _> = std::env::vars().collect();
let output = Command::new("cargo")
.envs(envs)
.arg("expand")
.arg("generated_some_struct")
.output()
.unwrap();
if output.stdout.is_empty() {
panic!("{:#?}", output);
}
let rs = "generated_some_struct.rs";
let temp_dir = file.join("temp");
let _ = remove_dir_all(&temp_dir);
create_dir(&temp_dir).unwrap();
let mut f = File::create(temp_dir.join(rs)).unwrap();
f.write(&output.stdout).unwrap();
let mut source_str = String::new();
File::open(file.join("gen").join(rs))
.unwrap()
.read_to_string(&mut source_str)
.unwrap();
let mut target_str = String::new();
File::open(temp_dir.join(rs))
.unwrap()
.read_to_string(&mut target_str)
.unwrap();
// Remove weird auto indenting when a file is in the module system
let replaced = |s: &str| {
s.replace("\n", "")
.replace("\t", "")
.replace(" ", "")
.trim()
// Needed because else still newlines are shown
.split_whitespace()
.into_iter()
.collect::<Vec<_>>()
.join("")
};
let target_replaced = replaced(&target_str);
let replaced = replaced(&source_str);
if replaced == target_replaced {
// Ok, equal
} else {
panic!(
"Not equal, target: \n{}\n, replaced: \n{}",
target_replaced, replaced
);
}
}
}
| 27.736842 | 74 | 0.487666 |
eb0a72825a93491f95dd0a73b14eeeeb6ad67b8b | 2,003 | #![allow(unused_imports)]
use super::*;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
extern "C" {
# [ wasm_bindgen ( extends = :: js_sys :: Object , js_name = ConstrainDOMStringParameters ) ]
#[derive(Debug, Clone, PartialEq, Eq)]
#[doc = "The `ConstrainDomStringParameters` dictionary."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `ConstrainDomStringParameters`*"]
pub type ConstrainDomStringParameters;
}
impl ConstrainDomStringParameters {
#[doc = "Construct a new `ConstrainDomStringParameters`."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `ConstrainDomStringParameters`*"]
pub fn new() -> Self {
#[allow(unused_mut)]
let mut ret: Self = ::wasm_bindgen::JsCast::unchecked_into(::js_sys::Object::new());
ret
}
#[doc = "Change the `exact` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `ConstrainDomStringParameters`*"]
pub fn exact(&mut self, val: &::wasm_bindgen::JsValue) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(self.as_ref(), &JsValue::from("exact"), &JsValue::from(val));
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
#[doc = "Change the `ideal` field of this object."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `ConstrainDomStringParameters`*"]
pub fn ideal(&mut self, val: &::wasm_bindgen::JsValue) -> &mut Self {
use wasm_bindgen::JsValue;
let r = ::js_sys::Reflect::set(self.as_ref(), &JsValue::from("ideal"), &JsValue::from(val));
debug_assert!(
r.is_ok(),
"setting properties should never fail on our dictionary objects"
);
let _ = r;
self
}
}
| 40.877551 | 111 | 0.619071 |
de43d1e5531b2d809f9d752d9c353effdfb0c4f1 | 47,338 | use super::*;
use crate::{
bb,
pac::{self, DMA1, DMA2, RCC},
serial::{Rx, Tx},
};
use core::ops::Deref;
pub(crate) mod sealed {
/// Converts value to bits for setting a register value.
pub trait Bits<T> {
/// Returns the bit value.
fn bits(self) -> T;
}
pub trait Sealed {}
}
use sealed::{Bits, Sealed};
/// Trait for DMA streams types.
pub trait Stream: Sealed {
/// Number of the register stream.
const NUMBER: usize;
/// Clear all interrupts for the DMA stream.
fn clear_interrupts(&mut self);
/// Clear transfer complete interrupt (tcif) for the DMA stream.
fn clear_transfer_complete_interrupt(&mut self);
/// Clear half transfer interrupt (htif) for the DMA stream.
fn clear_half_transfer_interrupt(&mut self);
/// Clear transfer error interrupt (teif) for the DMA stream.
fn clear_transfer_error_interrupt(&mut self);
/// Clear direct mode error interrupt (dmeif) for the DMA stream.
fn clear_direct_mode_error_interrupt(&mut self);
/// Clear fifo error interrupt (feif) for the DMA stream.
fn clear_fifo_error_interrupt(&mut self);
/// Get transfer complete flag.
fn get_transfer_complete_flag() -> bool;
/// Get half transfer flag.
fn get_half_transfer_flag() -> bool;
/// Set the peripheral address (par) for the DMA stream.
fn set_peripheral_address(&mut self, value: u32);
/// Set the memory address (m0ar) for the DMA stream.
fn set_memory_address(&mut self, value: u32);
/// Get the memory address (m0ar) for the DMA stream.
fn get_memory_address(&self) -> u32;
/// Set the double buffer address (m1ar) for the DMA stream.
fn set_memory_double_buffer_address(&mut self, value: u32);
/// Get the double buffer address (m1ar) for the DMA stream.
fn get_memory_double_buffer_address(&self) -> u32;
/// Set the number of transfers (ndt) for the DMA stream.
fn set_number_of_transfers(&mut self, value: u16);
/// Get the number of transfers (ndt) for the DMA stream.
fn get_number_of_transfers() -> u16;
/// Enable the DMA stream.
///
/// # Safety
///
/// The user must ensure that all registers are properly configured.
unsafe fn enable(&mut self);
/// Returns the state of the DMA stream.
fn is_enabled() -> bool;
/// Disable the DMA stream.
///
/// Disabling the stream during an on-going transfer needs to be performed in a certain way to
/// prevent problems if the stream is to be re-enabled shortly after, because of that, this
/// method will also clear all the stream's interrupt flags if the stream is active.
fn disable(&mut self);
/// Set the channel for the (chsel) the DMA stream.
fn set_channel<C: Channel>(&mut self, channel: C);
/// Set the priority (pl) the DMA stream.
fn set_priority(&mut self, priority: config::Priority);
/// Set the memory size (msize) for the DMA stream.
///
/// # Safety
/// This must have the same alignment of the buffer used in the transfer.
/// Valid values:
/// * 0 -> byte
/// * 1 -> half word
/// * 2 -> word
unsafe fn set_memory_size(&mut self, size: u8);
/// Set the peripheral memory size (psize) for the DMA stream.
///
/// # Safety
/// This must have the same alignment of the peripheral data used in the transfer.
/// Valid values:
/// * 0 -> byte
/// * 1 -> half word
/// * 2 -> word
unsafe fn set_peripheral_size(&mut self, size: u8);
/// Enable/disable memory increment (minc) for the DMA stream.
fn set_memory_increment(&mut self, increment: bool);
/// Enable/disable peripheral increment (pinc) for the DMA stream.
fn set_peripheral_increment(&mut self, increment: bool);
/// Set the direction (dir) of the DMA stream.
fn set_direction<D: Direction>(&mut self, direction: D);
/// Convenience method to configure the 4 common interrupts for the DMA stream.
fn set_interrupts_enable(
&mut self,
transfer_complete: bool,
half_transfer: bool,
transfer_error: bool,
direct_mode_error: bool,
);
/// Convenience method to get the value of the 4 common interrupts for the DMA stream.
/// The order of the returns are: `transfer_complete`, `half_transfer`, `transfer_error` and
/// `direct_mode_error`.
fn get_interrupts_enable() -> (bool, bool, bool, bool);
/// Enable/disable the transfer complete interrupt (tcie) of the DMA stream.
fn set_transfer_complete_interrupt_enable(&mut self, transfer_complete_interrupt: bool);
/// Enable/disable the half transfer interrupt (htie) of the DMA stream.
fn set_half_transfer_interrupt_enable(&mut self, half_transfer_interrupt: bool);
/// Enable/disable the transfer error interrupt (teie) of the DMA stream.
fn set_transfer_error_interrupt_enable(&mut self, transfer_error_interrupt: bool);
/// Enable/disable the direct mode error interrupt (dmeie) of the DMA stream.
fn set_direct_mode_error_interrupt_enable(&mut self, direct_mode_error_interrupt: bool);
/// Enable/disable the fifo error interrupt (feie) of the DMA stream.
fn set_fifo_error_interrupt_enable(&mut self, fifo_error_interrupt: bool);
/// Enable/disable the double buffer (dbm) of the DMA stream.
fn set_double_buffer(&mut self, double_buffer: bool);
/// Set the fifo threshold (fcr.fth) of the DMA stream.
fn set_fifo_threshold(&mut self, fifo_threshold: config::FifoThreshold);
/// Enable/disable the fifo (dmdis) of the DMA stream.
fn set_fifo_enable(&mut self, fifo_enable: bool);
/// Set memory burst mode (mburst) of the DMA stream.
fn set_memory_burst(&mut self, memory_burst: config::BurstMode);
/// Set peripheral burst mode (pburst) of the DMA stream.
fn set_peripheral_burst(&mut self, peripheral_burst: config::BurstMode);
/// Get the current fifo level (fs) of the DMA stream.
fn fifo_level() -> FifoLevel;
/// Get which buffer is currently in use by the DMA.
fn current_buffer() -> CurrentBuffer;
}
/// DMA direction.
pub trait Direction: Bits<u8> {
/// Creates a new instance of the type.
fn new() -> Self;
/// Returns the `DmaDirection` of the type.
fn direction() -> DmaDirection;
}
/// Get an address and memory size the DMA can use.
///
/// # Safety
///
/// Both the memory size and the address must be correct for the specific peripheral and for the
/// DMA.
pub unsafe trait PeriAddress<T> {
/// Memory size of the peripheral.
type MemSize;// = T;
/// Returns the address to be used by the DMA stream.
fn address(&self) -> u32;
}
// Convenience macro for implementing addresses on peripherals
macro_rules! address {
($(($peripheral:ty, $register:ident, $size: ty)),+ $(,)*) => {
$(
unsafe impl PeriAddress<$size> for $peripheral {
#[inline(always)]
fn address(&self) -> u32 {
&self.$register as *const _ as u32
}
type MemSize = $size;
}
)+
};
}
impl Sealed for DMA1 {}
impl Sealed for DMA2 {}
#[cfg(not(any(
feature = "stm32f411",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f410"
)))]
/// Type alias to a DMA RegisterBlock.
pub type DMARegisterBlock = pac::dma2::RegisterBlock;
#[cfg(any(
feature = "stm32f411",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f410"
))]
/// Type alias to a DMA RegisterBlock.
pub type DMARegisterBlock = pac::dma1::RegisterBlock;
/// Trait that represents an instance of a DMA peripheral.
pub trait Instance: Deref<Target = DMARegisterBlock> + Sealed {
/// Gives a pointer to the RegisterBlock.
fn ptr() -> *const DMARegisterBlock;
}
impl Instance for DMA1 {
#[inline(always)]
fn ptr() -> *const DMARegisterBlock {
DMA1::ptr()
}
}
impl Instance for DMA2 {
#[inline(always)]
fn ptr() -> *const DMARegisterBlock {
DMA2::ptr()
}
}
/// Trait for peripheral's clock enabling.
pub trait RccEnable: Sealed {
/// Enable the peripheral's clock in the RCC.
fn rcc_enable(&self);
}
impl RccEnable for pac::DMA1 {
fn rcc_enable(&self) {
unsafe {
//NOTE(unsafe) this reference will only be used for atomic writes with no side effects
let rcc = &(*RCC::ptr());
// Enable and reset the timer peripheral
bb::set(&rcc.ahb1enr, 21);
bb::set(&rcc.ahb1rstr, 21);
bb::clear(&rcc.ahb1rstr, 21);
}
}
}
impl RccEnable for pac::DMA2 {
fn rcc_enable(&self) {
unsafe {
//NOTE(unsafe) this reference will only be used for atomic writes with no side effects
let rcc = &(*RCC::ptr());
// Enable and reset the timer peripheral
bb::set(&rcc.ahb1enr, 22);
bb::set(&rcc.ahb1rstr, 22);
bb::clear(&rcc.ahb1rstr, 22);
}
}
}
macro_rules! tim_channels {
($($name:ident),+ $(,)*) => {
$(
/// Wrapper type that indicates which register of the contained timer to use for DMA.
pub struct $name<T> (T);
impl<T> Deref for $name<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
&self.0
}
}
)+
};
}
/// A channel that can be configured on a DMA stream.
pub trait Channel: Bits<u8> {
/// Returns a new instance of the type.
fn new() -> Self;
}
/// Trait to mark a set of Stream, Channel, Peripheral and Direction as correct together.
///
/// # Safety
///
/// Memory corruption might occur if this trait is implemented for an invalid combination.
pub unsafe trait DMASet {}
tim_channels!(CCR1, CCR2, CCR3, CCR4, DMAR, ARR);
macro_rules! dma_map {
($(($Stream:ty, $channel:ty, $Peripheral:ty, $dir:ty)),+ $(,)*) => {
$(
unsafe impl DMASet for ($Stream, $channel, $Peripheral, $dir) {}
)+
};
}
#[cfg(any(
feature = "stm32f401",
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream0<DMA1>, Channel2, CCR1<pac::TIM4>, MemoryToPeripheral), //TIM4_CH1
(Stream0<DMA1>, Channel2, CCR1<pac::TIM4>, PeripheralToMemory), //TIM4_CH1
(Stream2<DMA1>, Channel5, CCR4<pac::TIM3>, MemoryToPeripheral), //TIM3_CH4
(Stream2<DMA1>, Channel5, CCR4<pac::TIM3>, PeripheralToMemory), //TIM3_CH4
(Stream2<DMA1>, Channel5, DMAR<pac::TIM3>, MemoryToPeripheral), //TIM3_UP
(Stream2<DMA1>, Channel5, DMAR<pac::TIM3>, PeripheralToMemory), //TIM3_UP
(Stream3<DMA1>, Channel2, CCR2<pac::TIM4>, MemoryToPeripheral), //TIM4_CH2
(Stream3<DMA1>, Channel2, CCR2<pac::TIM4>, PeripheralToMemory), //TIM4_CH2
(Stream4<DMA1>, Channel5, CCR1<pac::TIM3>, MemoryToPeripheral), //TIM3_CH1
(Stream4<DMA1>, Channel5, CCR1<pac::TIM3>, PeripheralToMemory), //TIM3_CH1
(Stream4<DMA1>, Channel5, DMAR<pac::TIM3>, MemoryToPeripheral), //TIM3_TRIG
(Stream4<DMA1>, Channel5, DMAR<pac::TIM3>, PeripheralToMemory), //TIM3_TRIG
(Stream5<DMA1>, Channel3, CCR1<pac::TIM2>, MemoryToPeripheral), //TIM2_CH1
(Stream5<DMA1>, Channel3, CCR1<pac::TIM2>, PeripheralToMemory), //TIM2_CH1
(Stream5<DMA1>, Channel5, CCR2<pac::TIM3>, MemoryToPeripheral), //TIM3_CH2
(Stream5<DMA1>, Channel5, CCR2<pac::TIM3>, PeripheralToMemory), //TIM3_CH2
(Stream6<DMA1>, Channel2, DMAR<pac::TIM4>, MemoryToPeripheral), //TIM4_UP
(Stream6<DMA1>, Channel2, DMAR<pac::TIM4>, PeripheralToMemory), //TIM4_UP
(Stream6<DMA1>, Channel3, CCR2<pac::TIM2>, MemoryToPeripheral), //TIM2_CH2
(Stream6<DMA1>, Channel3, CCR2<pac::TIM2>, PeripheralToMemory), //TIM2_CH2
(Stream6<DMA1>, Channel3, CCR4<pac::TIM2>, MemoryToPeripheral), //TIM2_CH4
(Stream6<DMA1>, Channel3, CCR4<pac::TIM2>, PeripheralToMemory), //TIM2_CH4
(Stream7<DMA1>, Channel2, CCR3<pac::TIM4>, MemoryToPeripheral), //TIM4_CH3
(Stream7<DMA1>, Channel2, CCR3<pac::TIM4>, PeripheralToMemory), //TIM4_CH3
(Stream7<DMA1>, Channel5, CCR3<pac::TIM3>, MemoryToPeripheral), //TIM3_CH3
(Stream7<DMA1>, Channel5, CCR3<pac::TIM3>, PeripheralToMemory), //TIM3_CH3
(Stream0<DMA1>, Channel0, pac::SPI3, PeripheralToMemory), //SPI3_RX
(Stream2<DMA1>, Channel0, pac::SPI3, PeripheralToMemory), //SPI3_RX
(Stream4<DMA1>, Channel3, pac::I2C3, MemoryToPeripheral), //I2C3_TX
(Stream5<DMA1>, Channel0, pac::SPI3, MemoryToPeripheral), //SPI3_TX
(Stream7<DMA1>, Channel0, pac::SPI3, MemoryToPeripheral), //SPI3_TX
);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(CCR1<pac::TIM4>, ccr1, u16),
(CCR4<pac::TIM3>, ccr4, u16),
(CCR1<pac::TIM2>, ccr1, u16),
(CCR1<pac::TIM3>, ccr1, u16),
(CCR2<pac::TIM2>, ccr2, u16),
(CCR2<pac::TIM3>, ccr2, u16),
(CCR2<pac::TIM4>, ccr2, u16),
(CCR3<pac::TIM3>, ccr3, u16),
(CCR3<pac::TIM4>, ccr3, u16),
(CCR4<pac::TIM2>, ccr4, u16),
(DMAR<pac::TIM3>, dmar, u16),
(DMAR<pac::TIM4>, dmar, u16),
(pac::SPI3, dr, u8),
(pac::SPI3, dr, u16),
(pac::I2C3, dr, u8),
);
#[cfg(not(any(feature = "stm32f410", feature = "stm32f446")))]
dma_map!(
(Stream3<DMA2>, Channel4, pac::SDIO, MemoryToPeripheral), //SDIO
(Stream3<DMA2>, Channel4, pac::SDIO, PeripheralToMemory), //SDIO
(Stream6<DMA2>, Channel4, pac::SDIO, MemoryToPeripheral), //SDIO
(Stream6<DMA2>, Channel4, pac::SDIO, PeripheralToMemory), //SDIO
);
#[cfg(not(any(feature = "stm32f410", feature = "stm32f446")))]
address!((pac::SDIO, fifo, u32),);
#[cfg(feature = "stm32f446")]
dma_map!(
(Stream3<DMA2>, Channel4, pac::SDMMC, MemoryToPeripheral), //SDMMC
(Stream3<DMA2>, Channel4, pac::SDMMC, PeripheralToMemory), //SDMMC
(Stream6<DMA2>, Channel4, pac::SDMMC, MemoryToPeripheral), //SDMMC
(Stream6<DMA2>, Channel4, pac::SDMMC, PeripheralToMemory), //SDMMC
);
#[cfg(feature = "stm32f446")]
address!((pac::SDMMC, sdmmc_fifor, u32),);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f410",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream0<DMA1>, Channel6, CCR3<pac::TIM5>, MemoryToPeripheral), //TIM5_CH3
(Stream0<DMA1>, Channel6, CCR3<pac::TIM5>, PeripheralToMemory), //TIM5_CH3
(Stream0<DMA1>, Channel6, DMAR<pac::TIM5>, MemoryToPeripheral), //TIM5_UP
(Stream0<DMA1>, Channel6, DMAR<pac::TIM5>, PeripheralToMemory), //TIM5_UP
(Stream1<DMA1>, Channel6, CCR4<pac::TIM5>, MemoryToPeripheral), //TIM5_CH4
(Stream1<DMA1>, Channel6, CCR4<pac::TIM5>, PeripheralToMemory), //TIM5_CH4
(Stream1<DMA1>, Channel6, DMAR<pac::TIM5>, MemoryToPeripheral), //TIM5_TRIG
(Stream1<DMA1>, Channel6, DMAR<pac::TIM5>, PeripheralToMemory), //TIM5_TRIG
(Stream2<DMA1>, Channel6, CCR1<pac::TIM5>, MemoryToPeripheral), //TIM5_CH1
(Stream2<DMA1>, Channel6, CCR1<pac::TIM5>, PeripheralToMemory), //TIM5_CH1
(Stream3<DMA1>, Channel6, CCR4<pac::TIM5>, MemoryToPeripheral), //TIM5_CH4
(Stream3<DMA1>, Channel6, CCR4<pac::TIM5>, PeripheralToMemory), //TIM5_CH4
(Stream3<DMA1>, Channel6, DMAR<pac::TIM5>, MemoryToPeripheral), //TIM5_TRIG
(Stream3<DMA1>, Channel6, DMAR<pac::TIM5>, PeripheralToMemory), //TIM5_TRIG
(Stream4<DMA1>, Channel6, CCR2<pac::TIM5>, MemoryToPeripheral), //TIM5_CH2
(Stream4<DMA1>, Channel6, CCR2<pac::TIM5>, PeripheralToMemory), //TIM5_CH2
(Stream6<DMA1>, Channel6, DMAR<pac::TIM5>, MemoryToPeripheral), //TIM5_UP
(Stream6<DMA1>, Channel6, DMAR<pac::TIM5>, PeripheralToMemory), //TIM5_UP
(Stream0<DMA2>, Channel6, DMAR<pac::TIM1>, MemoryToPeripheral), //TIM1_TRIG
(Stream0<DMA2>, Channel6, DMAR<pac::TIM1>, PeripheralToMemory), //TIM1_TRIG
(Stream1<DMA2>, Channel6, CCR1<pac::TIM1>, MemoryToPeripheral), //TIM1_CH1
(Stream1<DMA2>, Channel6, CCR1<pac::TIM1>, PeripheralToMemory), //TIM1_CH1
(Stream2<DMA2>, Channel6, CCR2<pac::TIM1>, MemoryToPeripheral), //TIM1_CH2
(Stream2<DMA2>, Channel6, CCR2<pac::TIM1>, PeripheralToMemory), //TIM1_CH2
(Stream3<DMA2>, Channel6, CCR1<pac::TIM1>, MemoryToPeripheral), //TIM1_CH1
(Stream3<DMA2>, Channel6, CCR1<pac::TIM1>, PeripheralToMemory), //TIM1_CH1
(Stream4<DMA2>, Channel6, CCR4<pac::TIM1>, MemoryToPeripheral), //TIM1_CH4
(Stream4<DMA2>, Channel6, CCR4<pac::TIM1>, PeripheralToMemory), //TIM1_CH4
(Stream4<DMA2>, Channel6, DMAR<pac::TIM1>, MemoryToPeripheral), //TIM1_TRIG/COM
(Stream4<DMA2>, Channel6, DMAR<pac::TIM1>, PeripheralToMemory), //TIM1_TRIG/COM
(Stream5<DMA2>, Channel6, DMAR<pac::TIM1>, MemoryToPeripheral), //TIM1_UP
(Stream5<DMA2>, Channel6, DMAR<pac::TIM1>, PeripheralToMemory), //TIM1_UP
(Stream6<DMA2>, Channel0, CCR1<pac::TIM1>, MemoryToPeripheral), //TIM1_CH1
(Stream6<DMA2>, Channel0, CCR1<pac::TIM1>, PeripheralToMemory), //TIM1_CH1
(Stream6<DMA2>, Channel0, CCR2<pac::TIM1>, MemoryToPeripheral), //TIM1_CH2
(Stream6<DMA2>, Channel0, CCR2<pac::TIM1>, PeripheralToMemory), //TIM1_CH2
(Stream6<DMA2>, Channel0, CCR3<pac::TIM1>, MemoryToPeripheral), //TIM1_CH3
(Stream6<DMA2>, Channel0, CCR3<pac::TIM1>, PeripheralToMemory), //TIM1_CH3
(Stream6<DMA2>, Channel6, CCR3<pac::TIM1>, MemoryToPeripheral), //TIM1_CH3
(Stream6<DMA2>, Channel6, CCR3<pac::TIM1>, PeripheralToMemory), //TIM1_CH3
(Stream0<DMA1>, Channel1, pac::I2C1, PeripheralToMemory), //I2C1_RX
(Stream2<DMA1>, Channel7, pac::I2C2, PeripheralToMemory), //I2C2_RX
(Stream3<DMA1>, Channel0, pac::SPI2, PeripheralToMemory), //SPI2_RX
(Stream3<DMA1>, Channel7, pac::I2C2, PeripheralToMemory), //I2C2_RX
(Stream4<DMA1>, Channel0, pac::SPI2, MemoryToPeripheral), //SPI2_TX
(Stream5<DMA1>, Channel1, pac::I2C1, PeripheralToMemory), //I2C1_RX
(Stream5<DMA1>, Channel4, pac::USART2, PeripheralToMemory), //USART2_RX
(Stream5<DMA1>, Channel4, Rx<pac::USART2>, PeripheralToMemory), //USART2_RX
(Stream6<DMA1>, Channel4, pac::USART2, MemoryToPeripheral), //USART2_TX
(Stream6<DMA1>, Channel4, Tx<pac::USART2>, MemoryToPeripheral), //USART2_TX
(Stream7<DMA1>, Channel7, pac::I2C2, MemoryToPeripheral), //I2C2_TX
(Stream0<DMA2>, Channel0, pac::ADC1, PeripheralToMemory), //ADC1
(Stream0<DMA2>, Channel3, pac::SPI1, PeripheralToMemory), //SPI1_RX
(Stream1<DMA2>, Channel5, pac::USART6, PeripheralToMemory), //USART6_RX
(Stream1<DMA2>, Channel5, Rx<pac::USART6>, PeripheralToMemory), //USART6_RX
(Stream2<DMA2>, Channel3, pac::SPI1, PeripheralToMemory), //SPI1_RX
(Stream2<DMA2>, Channel4, pac::USART1, PeripheralToMemory), //USART1_RX
(Stream2<DMA2>, Channel4, Rx<pac::USART1>, PeripheralToMemory), //USART1_RX
(Stream2<DMA2>, Channel5, pac::USART6, PeripheralToMemory), //USART6_RX
(Stream2<DMA2>, Channel5, Rx<pac::USART6>, PeripheralToMemory), //USART6_RX
(Stream4<DMA2>, Channel0, pac::ADC1, PeripheralToMemory), //ADC1
(Stream5<DMA2>, Channel4, pac::USART1, PeripheralToMemory), //USART1_RX
(Stream5<DMA2>, Channel4, Rx<pac::USART1>, PeripheralToMemory), //USART1_RX
(Stream6<DMA2>, Channel5, pac::USART6, MemoryToPeripheral), //USART6_TX
(Stream6<DMA2>, Channel5, Tx<pac::USART6>, MemoryToPeripheral), //USART6_TX
(Stream7<DMA2>, Channel4, pac::USART1, MemoryToPeripheral), //USART1_TX
(Stream7<DMA2>, Channel4, Tx<pac::USART1>, MemoryToPeripheral), //USART1_TX
(Stream7<DMA2>, Channel5, pac::USART6, MemoryToPeripheral), //USART6_TX
(Stream7<DMA2>, Channel5, Tx<pac::USART6>, MemoryToPeripheral), //USART6_TX
(
Stream0<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream1<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream2<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream3<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream4<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream5<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream6<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream7<DMA2>,
Channel0,
MemoryToMemory<u8>,
MemoryToMemory<u8>
),
(
Stream0<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream1<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream2<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream3<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream4<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream5<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream6<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream7<DMA2>,
Channel0,
MemoryToMemory<u16>,
MemoryToMemory<u16>
),
(
Stream0<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream1<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream2<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream3<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream4<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream5<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream6<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
(
Stream7<DMA2>,
Channel0,
MemoryToMemory<u32>,
MemoryToMemory<u32>
),
);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f410",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(CCR1<pac::TIM1>, ccr1, u16),
(CCR2<pac::TIM1>, ccr2, u16),
(CCR3<pac::TIM1>, ccr3, u16),
(CCR4<pac::TIM1>, ccr4, u16),
(DMAR<pac::TIM1>, dmar, u16),
(CCR1<pac::TIM5>, ccr1, u16),
(CCR2<pac::TIM5>, ccr2, u16),
(CCR3<pac::TIM5>, ccr3, u16),
(CCR4<pac::TIM5>, ccr4, u16),
(DMAR<pac::TIM5>, dmar, u16),
(pac::ADC1, dr, u16),
(pac::I2C1, dr, u8),
(pac::I2C2, dr, u8),
(pac::SPI1, dr, u8),
(pac::SPI1, dr, u16),
(pac::SPI2, dr, u8),
(pac::SPI2, dr, u16),
(pac::USART1, dr, u8),
(pac::USART2, dr, u8),
(pac::USART6, dr, u8),
);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f446",
))]
dma_map!(
(Stream1<DMA1>, Channel1, pac::I2C3, PeripheralToMemory), //I2C3_RX
(Stream2<DMA1>, Channel3, pac::I2C3, PeripheralToMemory), //I2C3_RX:DMA_CHANNEL_3
);
#[cfg(any(feature = "stm32f401", feature = "stm32f411",))]
dma_map!(
(Stream1<DMA1>, Channel3, CCR3<pac::TIM2>, MemoryToPeripheral), //TIM2_CH3
(Stream1<DMA1>, Channel3, CCR3<pac::TIM2>, PeripheralToMemory), //TIM2_CH3
(Stream1<DMA1>, Channel3, DMAR<pac::TIM2>, MemoryToPeripheral), //TIM2_UP
(Stream1<DMA1>, Channel3, DMAR<pac::TIM2>, PeripheralToMemory), //TIM2_UP
(Stream7<DMA1>, Channel3, CCR4<pac::TIM2>, MemoryToPeripheral), //TIM2_CH4
(Stream7<DMA1>, Channel3, CCR4<pac::TIM2>, PeripheralToMemory), //TIM2_CH4
(Stream7<DMA1>, Channel3, DMAR<pac::TIM2>, MemoryToPeripheral), //TIM2_UP
(Stream7<DMA1>, Channel3, DMAR<pac::TIM2>, PeripheralToMemory), //TIM2_UP
);
#[cfg(any(feature = "stm32f401", feature = "stm32f411",))]
address!((CCR3<pac::TIM2>, ccr3, u16), (DMAR<pac::TIM2>, dmar, u16),);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
))]
dma_map!((Stream5<DMA1>, Channel6, pac::I2C3, MemoryToPeripheral),); //I2C3_TX:DMA_CHANNEL_6);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream6<DMA1>, Channel1, pac::I2C1, MemoryToPeripheral), //I2C1_TX
(Stream7<DMA1>, Channel1, pac::I2C1, MemoryToPeripheral), //I2C1_TX
(Stream3<DMA2>, Channel3, pac::SPI1, MemoryToPeripheral), //SPI1_TX
(Stream5<DMA2>, Channel3, pac::SPI1, MemoryToPeripheral), //SPI1_TX
);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream0<DMA2>, Channel4, pac::SPI4, PeripheralToMemory), //SPI4_RX
(Stream1<DMA2>, Channel4, pac::SPI4, MemoryToPeripheral), //SPI4_TX
(Stream3<DMA2>, Channel5, pac::SPI4, PeripheralToMemory), //SPI4_RX:DMA_CHANNEL_5
(Stream4<DMA2>, Channel5, pac::SPI4, MemoryToPeripheral), //SPI4_TX:DMA_CHANNEL_5
);
#[cfg(any(
feature = "stm32f401",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::SPI4, dr, u8),
(pac::SPI4, dr, u16),
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream0<DMA1>, Channel4, pac::UART5, PeripheralToMemory), //UART5_RX
(Stream2<DMA1>, Channel4, pac::UART4, PeripheralToMemory), //UART4_RX
(Stream4<DMA1>, Channel4, pac::UART4, MemoryToPeripheral), //UART4_TX
//(Stream6<DMA1>, Channel7, pac::DAC2, MemoryToPeripheral), //DAC2
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::UART4, dr, u8),
(pac::UART5, dr, u8),
//(pac::DAC, ??),
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream1<DMA1>, Channel3, DMAR<pac::TIM2>, MemoryToPeripheral), //TIM2_UP
(Stream1<DMA1>, Channel3, DMAR<pac::TIM2>, PeripheralToMemory), //TIM2_UP
(Stream1<DMA1>, Channel3, CCR3<pac::TIM2>, MemoryToPeripheral), //TIM2_CH3
(Stream1<DMA1>, Channel3, CCR3<pac::TIM2>, PeripheralToMemory), //TIM2_CH3
//(pac::DMA1, Stream2, Channel1, DMAR<pac::TIM7>, MemoryToPeripheral), //TIM7_UP //dmar register appears to be missing
//(pac::DMA1, Stream2, Channel1, DMAR<pac::TIM7>, PeripheralToMemory), //TIM7_UP //dmar register appears to be missing
//(pac::DMA1, Stream4, Channel1, DMAR<pac::TIM7>, MemoryToPeripheral), //TIM7_UP //dmar register appears to be missing
//(pac::DMA1, Stream4, Channel1, DMAR<pac::TIM7>, PeripheralToMemory), //TIM7_UP //dmar register appears to be missing
(Stream7<DMA1>, Channel3, DMAR<pac::TIM2>, MemoryToPeripheral), //TIM2_UP
(Stream7<DMA1>, Channel3, DMAR<pac::TIM2>, PeripheralToMemory), //TIM2_UP
(Stream7<DMA1>, Channel3, CCR4<pac::TIM2>, MemoryToPeripheral), //TIM2_CH4
(Stream7<DMA1>, Channel3, CCR4<pac::TIM2>, PeripheralToMemory), //TIM2_CH4
(Stream1<DMA2>, Channel7, DMAR<pac::TIM8>, MemoryToPeripheral), //TIM8_UP
(Stream1<DMA2>, Channel7, DMAR<pac::TIM8>, PeripheralToMemory), //TIM8_UP
(Stream2<DMA2>, Channel0, CCR1<pac::TIM8>, MemoryToPeripheral), //TIM8_CH1
(Stream2<DMA2>, Channel0, CCR1<pac::TIM8>, PeripheralToMemory), //TIM8_CH1
(Stream2<DMA2>, Channel0, CCR2<pac::TIM8>, MemoryToPeripheral), //TIM8_CH2
(Stream2<DMA2>, Channel0, CCR2<pac::TIM8>, PeripheralToMemory), //TIM8_CH2
(Stream2<DMA2>, Channel0, CCR3<pac::TIM8>, MemoryToPeripheral), //TIM8_CH3
(Stream2<DMA2>, Channel0, CCR3<pac::TIM8>, PeripheralToMemory), //TIM8_CH3
(Stream2<DMA2>, Channel7, CCR1<pac::TIM8>, MemoryToPeripheral), //TIM8_CH1
(Stream2<DMA2>, Channel7, CCR1<pac::TIM8>, PeripheralToMemory), //TIM8_CH1
(Stream3<DMA2>, Channel7, CCR2<pac::TIM8>, MemoryToPeripheral), //TIM8_CH2
(Stream3<DMA2>, Channel7, CCR2<pac::TIM8>, PeripheralToMemory), //TIM8_CH2
(Stream4<DMA2>, Channel7, CCR3<pac::TIM8>, MemoryToPeripheral), //TIM8_CH3
(Stream4<DMA2>, Channel7, CCR3<pac::TIM8>, PeripheralToMemory), //TIM8_CH3
(Stream7<DMA2>, Channel7, CCR4<pac::TIM8>, MemoryToPeripheral), //TIM8_CH4
(Stream7<DMA2>, Channel7, CCR4<pac::TIM8>, PeripheralToMemory), //TIM8_CH4
(Stream7<DMA2>, Channel7, DMAR<pac::TIM8>, MemoryToPeripheral), //TIM8_COM/TRIG
(Stream7<DMA2>, Channel7, DMAR<pac::TIM8>, PeripheralToMemory), //TIM8_COM/TRIG
(Stream1<DMA1>, Channel4, pac::USART3, PeripheralToMemory), //USART3_RX
(Stream3<DMA1>, Channel4, pac::USART3, MemoryToPeripheral), //USART3_TX
(Stream4<DMA1>, Channel7, pac::USART3, MemoryToPeripheral), //USART3_TX:DMA_CHANNEL_7
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(CCR1<pac::TIM8>, ccr1, u16),
(CCR2<pac::TIM8>, ccr2, u16),
(CCR3<pac::TIM8>, ccr3, u16),
(CCR4<pac::TIM8>, ccr4, u16),
(DMAR<pac::TIM8>, dmar, u16),
(CCR3<pac::TIM2>, ccr3, u16),
(DMAR<pac::TIM2>, dmar, u16),
//(DMAR<pac::TIM7>, dmar), //Missing?
(pac::USART3, dr, u8),
);
/*
DMAR register appears to be missing from TIM6 derived timers on these devices
Not sure how _UP is supposed to work without DMAR or if this is just an SVD issue
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f410",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(pac::DMA1, Stream1, Channel7, DMAR<pac::TIM6>, MemoryToPeripheral), //TIM6_UP
(pac::DMA1, Stream1, Channel7, DMAR<pac::TIM6>, PeripheralToMemory), //TIM6_UP
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f410",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(DMAR<pac::TIM6>, dmar),
);
*/
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream2<DMA1>, Channel3, pac::I2C3, PeripheralToMemory), //I2C3_RX
(Stream5<DMA2>, Channel2, pac::CRYP, PeripheralToMemory), //CRYP_OUT
(Stream6<DMA2>, Channel2, pac::CRYP, MemoryToPeripheral), //CRYP_IN
(Stream7<DMA2>, Channel2, pac::HASH, MemoryToPeripheral), //HASH_IN
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
address!((pac::HASH, din, u32), (pac::CRYP, din, u32),);
/* Not sure how DAC works with DMA
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f410",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(pac::DMA1, Stream5, Channel7, pac::DAC, MemoryToPeripheral), //DAC1
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f410",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::DAC, ??),
);
*/
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream7<DMA1>, Channel4, pac::UART5, MemoryToPeripheral), //UART5_TX
(Stream0<DMA2>, Channel2, pac::ADC3, PeripheralToMemory), //ADC3
(Stream1<DMA2>, Channel1, pac::DCMI, PeripheralToMemory), //DCMI
(Stream1<DMA2>, Channel2, pac::ADC3, PeripheralToMemory), //ADC3
(Stream2<DMA2>, Channel1, pac::ADC2, PeripheralToMemory), //ADC2
(Stream3<DMA2>, Channel1, pac::ADC2, PeripheralToMemory), //ADC2
(Stream7<DMA2>, Channel1, pac::DCMI, PeripheralToMemory), //DCMI
);
#[cfg(any(
feature = "stm32f417",
feature = "stm32f415",
feature = "stm32f405",
feature = "stm32f407",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::ADC2, dr, u16),
(pac::ADC3, dr, u16),
(pac::DCMI, dr, u32),
);
/* FMPI2C missing from peripheral crates (?)
#[cfg(any(
feature = "stm32f410",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
))]
dma_map!(
(pac::DMA1, Stream0, Channel7, pac::FMPI2C1, PeripheralToMemory), //FMPI2C1_RX
(pac::DMA1, Stream1, Channel2, pac::FMPI2C1, MemoryToPeripheral), //FMPI2C1_TX
(pac::DMA1, Stream3, Channel1, pac::FMPI2C1, PeripheralToMemory), //FMPI2C1_RX:DMA_CHANNEL_1
(pac::DMA1, Stream7, Channel4, pac::FMPI2C1, MemoryToPeripheral), //FMPI2C1_TX:DMA_CHANNEL_4
);
// TODO: Probably need to create other type for tx_dr and rx_dr
#[cfg(any(
feature = "stm32f410",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
))]
address!(
(pac::FMPI2C1, dr),
);
*/
#[cfg(any(
feature = "stm32f410",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
))]
dma_map!(
(Stream1<DMA1>, Channel0, pac::I2C1, MemoryToPeripheral), //I2C1_TX
(Stream6<DMA1>, Channel1, pac::I2C1, MemoryToPeripheral), //I2C1_TX:DMA_CHANNEL_1
(Stream7<DMA1>, Channel1, pac::I2C1, MemoryToPeripheral), //I2C1_TX:DMA_CHANNEL_1
(Stream7<DMA1>, Channel6, pac::USART2, PeripheralToMemory), //USART2_RX:DMA_CHANNEL_6
(Stream2<DMA2>, Channel2, pac::SPI1, MemoryToPeripheral), //SPI1_TX
(Stream3<DMA2>, Channel3, pac::SPI1, MemoryToPeripheral), //SPI1_TX:DMA_CHANNEL_3
(Stream5<DMA2>, Channel3, pac::SPI1, MemoryToPeripheral), //SPI1_TX:DMA_CHANNEL_3
(Stream5<DMA2>, Channel5, pac::SPI5, MemoryToPeripheral), //SPI5_TX:DMA_CHANNEL_5
);
#[cfg(any(
feature = "stm32f410",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream3<DMA2>, Channel2, pac::SPI5, PeripheralToMemory), //SPI5_RX
(Stream4<DMA2>, Channel2, pac::SPI5, MemoryToPeripheral), //SPI5_TX
(Stream5<DMA2>, Channel7, pac::SPI5, PeripheralToMemory), //SPI5_RX:DMA_CHANNEL_7
(Stream6<DMA2>, Channel7, pac::SPI5, MemoryToPeripheral), //SPI5_TX:DMA_CHANNEL_7
);
#[cfg(any(
feature = "stm32f410",
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::SPI5, dr, u8),
(pac::SPI5, dr, u16),
);
#[cfg(any(
feature = "stm32f411",
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
))]
dma_map!((Stream4<DMA2>, Channel4, pac::SPI4, PeripheralToMemory),); //SPI4_RX);
/* TODO: DFSDM support
#[cfg(feature = "stm32f412")]
dma_map!(
(Stream0<pac::DMA2>, Channel7, pac::DFSDM, PeripheralToMemory), //DFSDM1_FLT0
(Stream1<pac::DMA2>, Channel3, pac::DFSDM, PeripheralToMemory), //DFSDM1_FLT1
(Stream4<pac::DMA2>, Channel3, pac::DFSDM, PeripheralToMemory), //DFSDM1_FLT1
(Stream6<pac::DMA2>, Channel3, pac::DFSDM, PeripheralToMemory), //DFSDM1_FLT0:DMA_CHANNEL_3
);
#[cfg(feature = "stm32f412")]
address!((pac::DFSDM, dr),);
#[cfg(any(feature = "stm32f413", feature = "stm32f423"))]
dma_map!(
(
Stream0<pac::DMA2>,
Channel7,
pac::DFSDM1,
PeripheralToMemory
), //DFSDM1_FLT0
(
Stream1<pac::DMA2>,
Channel3,
pac::DFSDM1,
PeripheralToMemory
), //DFSDM1_FLT1
(
Stream4<pac::DMA2>,
Channel3,
pac::DFSDM1,
PeripheralToMemory
), //DFSDM1_FLT1
(
Stream6<pac::DMA2>,
Channel3,
pac::DFSDM1,
PeripheralToMemory
), //DFSDM1_FLT0:DMA_CHANNEL_3
(
Stream0<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT0
(
Stream1<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT1
(
Stream2<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT2
(
Stream3<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT3
(
Stream4<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT0
(
Stream5<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT1
(
Stream6<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT2
(
Stream7<pac::DMA2>,
Channel8,
pac::DFSDM2,
PeripheralToMemory
), //DFSDM2_FLT3
);
#[cfg(any(feature = "stm32f413", feature = "stm32f423"))]
address!((pac::DFSDM1, dr), (pac::DFSDM2, dr),);
*/
#[cfg(any(
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream7<DMA2>, Channel3, pac::QUADSPI, MemoryToPeripheral), //QUADSPI
(Stream7<DMA2>, Channel3, pac::QUADSPI, PeripheralToMemory), //QUADSPI
);
#[cfg(any(
feature = "stm32f412",
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!((pac::QUADSPI, dr, u32),);
#[cfg(any(
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream0<DMA1>, Channel5, pac::UART8, MemoryToPeripheral), //UART8_TX
(Stream1<DMA1>, Channel5, pac::UART7, MemoryToPeripheral), //UART7_TX
(Stream3<DMA1>, Channel5, pac::UART7, PeripheralToMemory), //UART7_RX
(Stream6<DMA1>, Channel5, pac::UART8, PeripheralToMemory), //UART8_RX
);
#[cfg(any(
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
address!((pac::UART7, dr, u8), (pac::UART8, dr, u8),);
#[cfg(any(feature = "stm32f413", feature = "stm32f423",))]
dma_map!(
(Stream7<DMA1>, Channel8, pac::UART5, MemoryToPeripheral), //UART5_TX
(Stream0<DMA2>, Channel1, pac::UART9, MemoryToPeripheral), //UART9_TX
(Stream0<DMA2>, Channel5, pac::UART10, PeripheralToMemory), //UART10_RX
(Stream3<DMA2>, Channel9, pac::UART10, PeripheralToMemory), //UART10_RX:DMA_CHANNEL_9
(Stream5<DMA2>, Channel9, pac::UART10, MemoryToPeripheral), //UART10_TX
(Stream7<DMA2>, Channel0, pac::UART9, PeripheralToMemory), //UART9_RX
(Stream7<DMA2>, Channel6, pac::UART10, MemoryToPeripheral), //UART10_TX:DMA_CHANNEL_6
//(pac::DMA2, Stream6, Channel2, IN<pac::AES>, MemoryToPeripheral), //AES_IN
//(pac::DMA2, Stream5, Channel2, OUT<pac::AES>, PeripheralToMemory), //AES_OUT
);
#[cfg(any(feature = "stm32f413", feature = "stm32f423",))]
address!(
//(IN<pac::AES>, dinr),
//(OUT<pac::AES>, doutr),
(pac::UART9, dr, u8),
(pac::UART10, dr, u8),
);
/* Not sure how SAI works
#[cfg(any(
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(pac::DMA2, Stream1, Channel0, pac::SAI, MemoryToPeripheral), //SAI1_A
(pac::DMA2, Stream1, Channel0, pac::SAI, PeripheralToMemory), //SAI1_A
(pac::DMA2, Stream3, Channel0, pac::SAI, MemoryToPeripheral), //SAI1_A
(pac::DMA2, Stream3, Channel0, pac::SAI, PeripheralToMemory), //SAI1_A
(pac::DMA2, Stream4, Channel1, pac::SAI, MemoryToPeripheral), //SAI1_B
(pac::DMA2, Stream4, Channel1, pac::SAI, PeripheralToMemory), //SAI1_B
(pac::DMA2, Stream5, Channel0, pac::SAI, MemoryToPeripheral), //SAI1_B:DMA_CHANNEL_0
(pac::DMA2, Stream5, Channel0, pac::SAI, PeripheralToMemory), //SAI1_B:DMA_CHANNEL_0
);
#[cfg(any(
feature = "stm32f413",
feature = "stm32f423",
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f446",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::SAI, dr),
);
*/
#[cfg(any(
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
dma_map!(
(Stream5<DMA2>, Channel1, pac::SPI6, MemoryToPeripheral), //SPI6_TX
(Stream6<DMA2>, Channel1, pac::SPI6, PeripheralToMemory), //SPI6_RX
);
#[cfg(any(
feature = "stm32f427",
feature = "stm32f439",
feature = "stm32f437",
feature = "stm32f429",
feature = "stm32f469",
feature = "stm32f479",
))]
address!(
(pac::SPI6, dr, u8),
(pac::SPI6, dr, u16),
);
/*
#[cfg(any(
feature = "stm32f446",
))]
dma_map!(
(pac::DMA1, Stream1, Channel0, pac::SPDIFRX, PeripheralToMemory), //SPDIF_RX_DT
(pac::DMA1, Stream2, Channel2, pac::FMPI2C1, PeripheralToMemory), //FMPI2C1_RX
(pac::DMA1, Stream5, Channel2, pac::FMPI2C1, MemoryToPeripheral), //FMPI2C1_TX
(pac::DMA1, Stream6, Channel0, pac::SPDIFRX, PeripheralToMemory), //SPDIF_RX_CS
(pac::DMA2, Stream4, Channel3, pac::SAI2, MemoryToPeripheral), //SAI2_A
(pac::DMA2, Stream4, Channel3, pac::SAI2, PeripheralToMemory), //SAI2_A
(pac::DMA2, Stream6, Channel3, pac::SAI2, MemoryToPeripheral), //SAI2_B
(pac::DMA2, Stream6, Channel3, pac::SAI2, PeripheralToMemory), //SAI2_B
(pac::DMA2, Stream7, Channel0, pac::SAI2, MemoryToPeripheral), //SAI2_B:DMA_CHANNEL_0
(pac::DMA2, Stream7, Channel0, pac::SAI2, PeripheralToMemory), //SAI2_B:DMA_CHANNEL_0
);
#[cfg(any(
feature = "stm32f446",
))]
address!(
(pac::SPDIFRX, ??),
(pac::FMPI2C1, ??),
(pac::SAI2, ??),
);
*/
| 32.919332 | 141 | 0.628248 |
0afa438f7246560a1569f15899d5da0aa653d9fa | 6,189 |
use api::eds_grpc::EndpointDiscoveryService;
use futures::Future;
use config::Config;
use std::sync::Arc;
use consul::Client as ConsulClient;
#[derive(Clone)]
pub struct Service {
pub config: Config,
pub consul: ConsulClient,
}
use api::discovery::{DiscoveryRequest, DiscoveryResponse};
use grpcio::{RpcStatus, RpcStatusCode, UnarySinkResult};
impl EndpointDiscoveryService for Service {
/// The resource_names field in DiscoveryRequest specifies a list of clusters
/// to subscribe to updates for.
fn stream_endpoints(
&self,
ctx: ::grpcio::RpcContext,
stream: ::grpcio::RequestStream<::api::discovery::DiscoveryRequest>,
sink: ::grpcio::DuplexSink<::api::discovery::DiscoveryResponse>,
) {
}
fn fetch_endpoints(
&self,
ctx: ::grpcio::RpcContext,
req: DiscoveryRequest,
sink: ::grpcio::UnarySink<DiscoveryResponse>,
) {
// req is:
// pub version_info: ::std::string::String,
// pub node: ::protobuf::SingularPtrField<super::base::Node>,
// pub resource_names: ::protobuf::RepeatedField<::std::string::String>,
// pub type_url: ::std::string::String,
// pub response_nonce: ::std::string::String,
// let y = match resp {
// Ok(x) => sink.success(x),
// Err(_) => sink.fail(RpcStatus::new(RpcStatusCode::Internal, None)),
// };
// let f = y.map_err(move |e| error!("failed to reply {:?}: {:?}", req, e));
let resp = fetch_endpoints();
let f = sink.success(resp).map_err(move |e| {
error!("failed to reply {:?}: {:?}", req, e)
});
ctx.spawn(f)
}
/// Advanced API to allow for multi-dimensional load balancing by remote
/// server. For receiving LB assignments, the steps are:
/// 1, The management server is configured with per cluster/zone/load metric
/// capacity configuration. The capacity configuration definition is
/// outside of the scope of this document.
/// 2. Envoy issues a standard {Stream,Fetch}Endpoints request for the clusters
/// to balance.
///
/// Independently, Envoy will initiate a StreamLoadStats bidi stream with a
/// management server:
/// 1. Once a connection establishes, the management server publishes a
/// LoadStatsResponse for all clusters it is interested in learning load
/// stats about.
/// 2. For each cluster, Envoy load balances incoming traffic to upstream hosts
/// based on per-zone weights and/or per-instance weights (if specified)
/// based on intra-zone LbPolicy. This information comes from the above
/// {Stream,Fetch}Endpoints.
/// 3. When upstream hosts reply, they optionally add header <define header
/// name> with ASCII representation of EndpointLoadMetricStats.
/// 4. Envoy aggregates load reports over the period of time given to it in
/// LoadStatsResponse.load_reporting_interval. This includes aggregation
/// stats Envoy maintains by itself (total_requests, rpc_errors etc.) as
/// well as load metrics from upstream hosts.
/// 5. When the timer of load_reporting_interval expires, Envoy sends new
/// LoadStatsRequest filled with load reports for each cluster.
/// 6. The management server uses the load reports from all reported Envoys
/// from around the world, computes global assignment and prepares traffic
/// assignment destined for each zone Envoys are located in. Goto 2.
fn stream_load_stats(
&self,
ctx: ::grpcio::RpcContext,
stream: ::grpcio::RequestStream<::api::eds::LoadStatsRequest>,
sink: ::grpcio::DuplexSink<::api::eds::LoadStatsResponse>,
) {
}
}
use consul::catalog::Catalog;
use protobuf::{Message, RepeatedField};
use protobuf::error::ProtobufError;
use protobuf::well_known_types::Any;
use super::MessageType;
use std::string::ToString;
use api::base::Locality;
use api::eds::{LocalityLbEndpoints, ClusterLoadAssignment};
// TODO: This is going to need to do something more useful that return
// hardcoded values. This should really talk to consul or whatever
// abstraction we figure out.
fn fetch_endpoints() -> DiscoveryResponse {
let mut loc = Locality::new();
loc.set_region("us-west-1".to_string());
loc.set_zone("a".to_string());
let mut az1 = LocalityLbEndpoints::new();
az1.set_locality(loc);
let mut azs = Vec::new();
azs.push(az1);
let mut cla = ClusterLoadAssignment::new();
cla.set_cluster_name("foo".to_string());
cla.set_endpoints(RepeatedField::from_vec(azs));
let mut items = Vec::new();
items.push(cla);
create_discovery_response(items, MessageType::ClusterLoadAssignment)
}
/// here we're taking any `A` that has a `::protobuf::Message` implementation, such that
/// we can encode the response (using protobuf); its turtles all the way down.
/// TODO: currently this function assumes success, this should be refactored to properly
/// handle bad results and take action accordingly.
fn create_discovery_response<A: Message>(
r: Vec<A>,
nested_type_url: MessageType,
) -> DiscoveryResponse {
let serialized: Vec<Any> = r.iter()
.map(|x| pack_to_any(x.write_to_bytes(), nested_type_url.clone()))
.collect();
let repeated = RepeatedField::from_vec(serialized);
let mut d = DiscoveryResponse::new();
d.set_canary(false);
//TODO we'll need to set a version here that is the md5 of the payload to faithfully
// represent the 'version' to Envoy, but for now we're just hardcoding it, because fuck it.
d.set_version_info("1".to_string());
// This should really be an Enum
d.set_type_url(MessageType::DiscoveryResponse.to_string());
d.set_resources(repeated);
d
}
fn pack_to_any(r: Result<Vec<u8>, ProtobufError>, turl: MessageType) -> Any {
match r {
Ok(bytes) => any_from_bytes(bytes, turl),
Err(_) => Any::new(),
}
}
fn any_from_bytes(bytes: Vec<u8>, turl: MessageType) -> Any {
let mut a = Any::new();
a.set_value(bytes);
a.set_type_url(turl.to_string());
a
}
| 37.509091 | 95 | 0.666828 |
e8b4500c565be958e29b34c010c2a91e70cfd6a0 | 33,768 | // Copyright 2017 TiKV Project Authors. Licensed under Apache-2.0.
use std::fs::File;
use std::io::Read;
use std::path::PathBuf;
use slog::Level;
use batch_system::Config as BatchSystemConfig;
use collections::HashSet;
use encryption::{EncryptionConfig, FileConfig, MasterKeyConfig};
use engine_rocks::config::{BlobRunMode, CompressionType, LogLevel};
use engine_rocks::raw::{
CompactionPriority, DBCompactionStyle, DBCompressionType, DBRateLimiterMode, DBRecoveryMode,
};
use engine_traits::config::PerfLevel;
use file_system::{IOPriority, IORateLimitMode};
use kvproto::encryptionpb::EncryptionMethod;
use pd_client::Config as PdConfig;
use raftstore::coprocessor::{Config as CopConfig, ConsistencyCheckMethod};
use raftstore::store::Config as RaftstoreConfig;
use security::SecurityConfig;
use tikv::config::*;
use tikv::import::Config as ImportConfig;
use tikv::server::config::GrpcCompressionType;
use tikv::server::gc_worker::GcConfig;
use tikv::server::lock_manager::Config as PessimisticTxnConfig;
use tikv::server::Config as ServerConfig;
use tikv::storage::config::{BlockCacheConfig, Config as StorageConfig, IORateLimitConfig};
use tikv_util::config::{LogFormat, OptionReadableSize, ReadableDuration, ReadableSize};
mod dynamic;
mod test_config_client;
#[test]
fn test_toml_serde() {
let value = TiKvConfig::default();
let dump = toml::to_string_pretty(&value).unwrap();
let load = toml::from_str(&dump).unwrap();
assert_eq!(value, load);
}
// Read a file in project directory. It is similar to `include_str!`,
// but `include_str!` a large string literal increases compile time.
// See more: https://github.com/rust-lang/rust/issues/39352
fn read_file_in_project_dir(path: &str) -> String {
let mut p = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
p.push(path);
let mut f = File::open(p).unwrap();
let mut buffer = String::new();
f.read_to_string(&mut buffer).unwrap();
buffer
}
#[test]
fn test_serde_custom_tikv_config() {
let mut value = TiKvConfig::default();
value.log_level = Level::Debug;
value.log_file = "foo".to_owned();
value.log_format = LogFormat::Json;
value.slow_log_file = "slow_foo".to_owned();
value.slow_log_threshold = ReadableDuration::secs(1);
value.abort_on_panic = true;
value.memory_usage_limit = OptionReadableSize(Some(ReadableSize::gb(10)));
value.memory_usage_high_water = 0.65;
value.server = ServerConfig {
cluster_id: 0, // KEEP IT ZERO, it is skipped by serde.
addr: "example.com:443".to_owned(),
labels: map! { "a".to_owned() => "b".to_owned() },
advertise_addr: "example.com:443".to_owned(),
status_addr: "example.com:443".to_owned(),
advertise_status_addr: "example.com:443".to_owned(),
status_thread_pool_size: 1,
max_grpc_send_msg_len: 6 * (1 << 20),
raft_client_grpc_send_msg_buffer: 1234 * 1024,
raft_client_queue_size: 1234,
raft_msg_max_batch_size: 123,
concurrent_send_snap_limit: 4,
concurrent_recv_snap_limit: 4,
grpc_compression_type: GrpcCompressionType::Gzip,
grpc_concurrency: 123,
grpc_concurrent_stream: 1_234,
grpc_memory_pool_quota: ReadableSize(123_456),
grpc_raft_conn_num: 123,
grpc_stream_initial_window_size: ReadableSize(12_345),
grpc_keepalive_time: ReadableDuration::secs(3),
grpc_keepalive_timeout: ReadableDuration::secs(60),
end_point_concurrency: None,
end_point_max_tasks: None,
end_point_stack_size: None,
end_point_recursion_limit: 100,
end_point_stream_channel_size: 16,
end_point_batch_row_limit: 64,
end_point_stream_batch_row_limit: 4096,
end_point_enable_batch_if_possible: true,
end_point_request_max_handle_duration: ReadableDuration::secs(12),
end_point_max_concurrency: 10,
snap_max_write_bytes_per_sec: ReadableSize::mb(10),
snap_max_total_size: ReadableSize::gb(10),
stats_concurrency: 10,
heavy_load_threshold: 1000,
heavy_load_wait_duration: ReadableDuration::millis(2),
enable_request_batch: false,
background_thread_count: 999,
raft_client_backoff_step: ReadableDuration::secs(1),
end_point_slow_log_threshold: ReadableDuration::secs(1),
forward_max_connections_per_address: 5,
reject_messages_on_memory_ratio: 0.8,
};
value.readpool = ReadPoolConfig {
unified: UnifiedReadPoolConfig {
min_thread_count: 5,
max_thread_count: 10,
stack_size: ReadableSize::mb(20),
max_tasks_per_worker: 2200,
},
storage: StorageReadPoolConfig {
use_unified_pool: Some(true),
high_concurrency: 1,
normal_concurrency: 3,
low_concurrency: 7,
max_tasks_per_worker_high: 1000,
max_tasks_per_worker_normal: 1500,
max_tasks_per_worker_low: 2500,
stack_size: ReadableSize::mb(20),
},
coprocessor: CoprReadPoolConfig {
use_unified_pool: Some(false),
high_concurrency: 2,
normal_concurrency: 4,
low_concurrency: 6,
max_tasks_per_worker_high: 2000,
max_tasks_per_worker_normal: 1000,
max_tasks_per_worker_low: 3000,
stack_size: ReadableSize::mb(12),
},
};
value.metric = MetricConfig {
interval: ReadableDuration::secs(15),
address: "".to_string(),
job: "tikv_1".to_owned(),
};
let mut apply_batch_system = BatchSystemConfig::default();
apply_batch_system.max_batch_size = Some(22);
apply_batch_system.pool_size = 4;
apply_batch_system.reschedule_duration = ReadableDuration::secs(3);
let mut store_batch_system = BatchSystemConfig::default();
store_batch_system.max_batch_size = Some(21);
store_batch_system.pool_size = 3;
store_batch_system.reschedule_duration = ReadableDuration::secs(2);
value.raft_store = RaftstoreConfig {
prevote: false,
raftdb_path: "/var".to_owned(),
capacity: ReadableSize(123),
raft_base_tick_interval: ReadableDuration::secs(12),
raft_heartbeat_ticks: 1,
raft_election_timeout_ticks: 12,
raft_min_election_timeout_ticks: 14,
raft_max_election_timeout_ticks: 20,
raft_max_size_per_msg: ReadableSize::mb(12),
raft_max_inflight_msgs: 123,
raft_entry_max_size: ReadableSize::mb(12),
raft_log_gc_tick_interval: ReadableDuration::secs(12),
raft_log_gc_threshold: 12,
raft_log_gc_count_limit: 12,
raft_log_gc_size_limit: ReadableSize::kb(1),
raft_log_reserve_max_ticks: 100,
raft_engine_purge_interval: ReadableDuration::minutes(20),
raft_entry_cache_life_time: ReadableDuration::secs(12),
raft_reject_transfer_leader_duration: ReadableDuration::secs(3),
split_region_check_tick_interval: ReadableDuration::secs(12),
region_split_check_diff: ReadableSize::mb(20),
region_compact_check_interval: ReadableDuration::secs(12),
clean_stale_peer_delay: ReadableDuration::secs(0),
region_compact_check_step: 1_234,
region_compact_min_tombstones: 999,
region_compact_tombstones_percent: 33,
pd_heartbeat_tick_interval: ReadableDuration::minutes(12),
pd_store_heartbeat_tick_interval: ReadableDuration::secs(12),
notify_capacity: 12_345,
snap_mgr_gc_tick_interval: ReadableDuration::minutes(12),
snap_gc_timeout: ReadableDuration::hours(12),
messages_per_tick: 12_345,
max_peer_down_duration: ReadableDuration::minutes(12),
max_leader_missing_duration: ReadableDuration::hours(12),
abnormal_leader_missing_duration: ReadableDuration::hours(6),
peer_stale_state_check_interval: ReadableDuration::hours(2),
leader_transfer_max_log_lag: 123,
snap_apply_batch_size: ReadableSize::mb(12),
lock_cf_compact_interval: ReadableDuration::minutes(12),
lock_cf_compact_bytes_threshold: ReadableSize::mb(123),
consistency_check_interval: ReadableDuration::secs(12),
report_region_flow_interval: ReadableDuration::minutes(12),
raft_store_max_leader_lease: ReadableDuration::secs(12),
right_derive_when_split: false,
allow_remove_leader: true,
merge_max_log_gap: 3,
merge_check_tick_interval: ReadableDuration::secs(11),
use_delete_range: true,
cleanup_import_sst_interval: ReadableDuration::minutes(12),
region_max_size: ReadableSize(0),
region_split_size: ReadableSize(0),
local_read_batch_size: 33,
apply_batch_system,
store_batch_system,
future_poll_size: 2,
hibernate_regions: false,
dev_assert: true,
apply_yield_duration: ReadableDuration::millis(333),
perf_level: PerfLevel::Disable,
evict_cache_on_memory_ratio: 0.8,
};
value.pd = PdConfig::new(vec!["example.com:443".to_owned()]);
let titan_cf_config = TitanCfConfig {
min_blob_size: ReadableSize(2018),
blob_file_compression: CompressionType::Zstd,
blob_cache_size: ReadableSize::gb(12),
min_gc_batch_size: ReadableSize::kb(12),
max_gc_batch_size: ReadableSize::mb(12),
discardable_ratio: 0.00156,
sample_ratio: 0.982,
merge_small_file_threshold: ReadableSize::kb(21),
blob_run_mode: BlobRunMode::Fallback,
level_merge: true,
range_merge: true,
max_sorted_runs: 100,
gc_merge_rewrite: true,
};
let titan_db_config = TitanDBConfig {
enabled: true,
dirname: "bar".to_owned(),
disable_gc: false,
max_background_gc: 9,
purge_obsolete_files_period: ReadableDuration::secs(1),
};
value.rocksdb = DbConfig {
wal_recovery_mode: DBRecoveryMode::AbsoluteConsistency,
wal_dir: "/var".to_owned(),
wal_ttl_seconds: 1,
wal_size_limit: ReadableSize::kb(1),
max_total_wal_size: ReadableSize::gb(1),
max_background_jobs: 12,
max_background_flushes: 4,
max_manifest_file_size: ReadableSize::mb(12),
create_if_missing: false,
max_open_files: 12_345,
enable_statistics: false,
stats_dump_period: ReadableDuration::minutes(12),
compaction_readahead_size: ReadableSize::kb(1),
info_log_max_size: ReadableSize::kb(1),
info_log_roll_time: ReadableDuration::secs(12),
info_log_keep_log_file_num: 1000,
info_log_dir: "/var".to_owned(),
info_log_level: LogLevel::Info,
rate_bytes_per_sec: ReadableSize::kb(1),
rate_limiter_refill_period: ReadableDuration::millis(10),
rate_limiter_mode: DBRateLimiterMode::AllIo,
auto_tuned: None,
rate_limiter_auto_tuned: false,
bytes_per_sync: ReadableSize::mb(1),
wal_bytes_per_sync: ReadableSize::kb(32),
max_sub_compactions: 12,
writable_file_max_buffer_size: ReadableSize::mb(12),
use_direct_io_for_flush_and_compaction: true,
enable_pipelined_write: false,
enable_multi_batch_write: false,
enable_unordered_write: true,
defaultcf: DefaultCfConfig {
block_size: ReadableSize::kb(12),
block_cache_size: ReadableSize::gb(12),
disable_block_cache: false,
cache_index_and_filter_blocks: false,
pin_l0_filter_and_index_blocks: false,
use_bloom_filter: false,
optimize_filters_for_hits: false,
whole_key_filtering: true,
bloom_filter_bits_per_key: 123,
block_based_bloom_filter: true,
read_amp_bytes_per_bit: 0,
compression_per_level: [
DBCompressionType::No,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Zstd,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Lz4,
],
write_buffer_size: ReadableSize::mb(1),
max_write_buffer_number: 12,
min_write_buffer_number_to_merge: 12,
max_bytes_for_level_base: ReadableSize::kb(12),
target_file_size_base: ReadableSize::kb(123),
level0_file_num_compaction_trigger: 123,
level0_slowdown_writes_trigger: 123,
level0_stop_writes_trigger: 123,
max_compaction_bytes: ReadableSize::gb(1),
compaction_pri: CompactionPriority::MinOverlappingRatio,
dynamic_level_bytes: true,
num_levels: 4,
max_bytes_for_level_multiplier: 8,
compaction_style: DBCompactionStyle::Universal,
disable_auto_compactions: true,
soft_pending_compaction_bytes_limit: ReadableSize::gb(12),
hard_pending_compaction_bytes_limit: ReadableSize::gb(12),
force_consistency_checks: true,
titan: titan_cf_config.clone(),
prop_size_index_distance: 4000000,
prop_keys_index_distance: 40000,
enable_doubly_skiplist: false,
enable_compaction_guard: false,
compaction_guard_min_output_file_size: ReadableSize::mb(12),
compaction_guard_max_output_file_size: ReadableSize::mb(34),
bottommost_level_compression: DBCompressionType::Disable,
bottommost_zstd_compression_dict_size: 1024,
bottommost_zstd_compression_sample_size: 1024,
},
writecf: WriteCfConfig {
block_size: ReadableSize::kb(12),
block_cache_size: ReadableSize::gb(12),
disable_block_cache: false,
cache_index_and_filter_blocks: false,
pin_l0_filter_and_index_blocks: false,
use_bloom_filter: false,
optimize_filters_for_hits: true,
whole_key_filtering: true,
bloom_filter_bits_per_key: 123,
block_based_bloom_filter: true,
read_amp_bytes_per_bit: 0,
compression_per_level: [
DBCompressionType::No,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Zstd,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Lz4,
],
write_buffer_size: ReadableSize::mb(1),
max_write_buffer_number: 12,
min_write_buffer_number_to_merge: 12,
max_bytes_for_level_base: ReadableSize::kb(12),
target_file_size_base: ReadableSize::kb(123),
level0_file_num_compaction_trigger: 123,
level0_slowdown_writes_trigger: 123,
level0_stop_writes_trigger: 123,
max_compaction_bytes: ReadableSize::gb(1),
compaction_pri: CompactionPriority::MinOverlappingRatio,
dynamic_level_bytes: true,
num_levels: 4,
max_bytes_for_level_multiplier: 8,
compaction_style: DBCompactionStyle::Universal,
disable_auto_compactions: true,
soft_pending_compaction_bytes_limit: ReadableSize::gb(12),
hard_pending_compaction_bytes_limit: ReadableSize::gb(12),
force_consistency_checks: true,
titan: TitanCfConfig {
min_blob_size: ReadableSize(1024), // default value
blob_file_compression: CompressionType::Lz4,
blob_cache_size: ReadableSize::mb(0),
min_gc_batch_size: ReadableSize::mb(16),
max_gc_batch_size: ReadableSize::mb(64),
discardable_ratio: 0.5,
sample_ratio: 0.1,
merge_small_file_threshold: ReadableSize::mb(8),
blob_run_mode: BlobRunMode::ReadOnly,
level_merge: false,
range_merge: true,
max_sorted_runs: 20,
gc_merge_rewrite: false,
},
prop_size_index_distance: 4000000,
prop_keys_index_distance: 40000,
enable_doubly_skiplist: true,
enable_compaction_guard: false,
compaction_guard_min_output_file_size: ReadableSize::mb(12),
compaction_guard_max_output_file_size: ReadableSize::mb(34),
bottommost_level_compression: DBCompressionType::Zstd,
bottommost_zstd_compression_dict_size: 0,
bottommost_zstd_compression_sample_size: 0,
},
lockcf: LockCfConfig {
block_size: ReadableSize::kb(12),
block_cache_size: ReadableSize::gb(12),
disable_block_cache: false,
cache_index_and_filter_blocks: false,
pin_l0_filter_and_index_blocks: false,
use_bloom_filter: false,
optimize_filters_for_hits: true,
whole_key_filtering: true,
bloom_filter_bits_per_key: 123,
block_based_bloom_filter: true,
read_amp_bytes_per_bit: 0,
compression_per_level: [
DBCompressionType::No,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Zstd,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Lz4,
],
write_buffer_size: ReadableSize::mb(1),
max_write_buffer_number: 12,
min_write_buffer_number_to_merge: 12,
max_bytes_for_level_base: ReadableSize::kb(12),
target_file_size_base: ReadableSize::kb(123),
level0_file_num_compaction_trigger: 123,
level0_slowdown_writes_trigger: 123,
level0_stop_writes_trigger: 123,
max_compaction_bytes: ReadableSize::gb(1),
compaction_pri: CompactionPriority::MinOverlappingRatio,
dynamic_level_bytes: true,
num_levels: 4,
max_bytes_for_level_multiplier: 8,
compaction_style: DBCompactionStyle::Universal,
disable_auto_compactions: true,
soft_pending_compaction_bytes_limit: ReadableSize::gb(12),
hard_pending_compaction_bytes_limit: ReadableSize::gb(12),
force_consistency_checks: true,
titan: TitanCfConfig {
min_blob_size: ReadableSize(1024), // default value
blob_file_compression: CompressionType::Lz4,
blob_cache_size: ReadableSize::mb(0),
min_gc_batch_size: ReadableSize::mb(16),
max_gc_batch_size: ReadableSize::mb(64),
discardable_ratio: 0.5,
sample_ratio: 0.1,
merge_small_file_threshold: ReadableSize::mb(8),
blob_run_mode: BlobRunMode::ReadOnly, // default value
level_merge: false,
range_merge: true,
max_sorted_runs: 20,
gc_merge_rewrite: false,
},
prop_size_index_distance: 4000000,
prop_keys_index_distance: 40000,
enable_doubly_skiplist: true,
enable_compaction_guard: true,
compaction_guard_min_output_file_size: ReadableSize::mb(12),
compaction_guard_max_output_file_size: ReadableSize::mb(34),
bottommost_level_compression: DBCompressionType::Disable,
bottommost_zstd_compression_dict_size: 0,
bottommost_zstd_compression_sample_size: 0,
},
raftcf: RaftCfConfig {
block_size: ReadableSize::kb(12),
block_cache_size: ReadableSize::gb(12),
disable_block_cache: false,
cache_index_and_filter_blocks: false,
pin_l0_filter_and_index_blocks: false,
use_bloom_filter: false,
optimize_filters_for_hits: false,
whole_key_filtering: true,
bloom_filter_bits_per_key: 123,
block_based_bloom_filter: true,
read_amp_bytes_per_bit: 0,
compression_per_level: [
DBCompressionType::No,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Zstd,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Lz4,
],
write_buffer_size: ReadableSize::mb(1),
max_write_buffer_number: 12,
min_write_buffer_number_to_merge: 12,
max_bytes_for_level_base: ReadableSize::kb(12),
target_file_size_base: ReadableSize::kb(123),
level0_file_num_compaction_trigger: 123,
level0_slowdown_writes_trigger: 123,
level0_stop_writes_trigger: 123,
max_compaction_bytes: ReadableSize::gb(1),
compaction_pri: CompactionPriority::MinOverlappingRatio,
dynamic_level_bytes: true,
num_levels: 4,
max_bytes_for_level_multiplier: 8,
compaction_style: DBCompactionStyle::Universal,
disable_auto_compactions: true,
soft_pending_compaction_bytes_limit: ReadableSize::gb(12),
hard_pending_compaction_bytes_limit: ReadableSize::gb(12),
force_consistency_checks: true,
titan: TitanCfConfig {
min_blob_size: ReadableSize(1024), // default value
blob_file_compression: CompressionType::Lz4,
blob_cache_size: ReadableSize::mb(0),
min_gc_batch_size: ReadableSize::mb(16),
max_gc_batch_size: ReadableSize::mb(64),
discardable_ratio: 0.5,
sample_ratio: 0.1,
merge_small_file_threshold: ReadableSize::mb(8),
blob_run_mode: BlobRunMode::ReadOnly, // default value
level_merge: false,
range_merge: true,
max_sorted_runs: 20,
gc_merge_rewrite: false,
},
prop_size_index_distance: 4000000,
prop_keys_index_distance: 40000,
enable_doubly_skiplist: true,
enable_compaction_guard: true,
compaction_guard_min_output_file_size: ReadableSize::mb(12),
compaction_guard_max_output_file_size: ReadableSize::mb(34),
bottommost_level_compression: DBCompressionType::Disable,
bottommost_zstd_compression_dict_size: 0,
bottommost_zstd_compression_sample_size: 0,
},
titan: titan_db_config.clone(),
};
value.raftdb = RaftDbConfig {
info_log_level: LogLevel::Info,
wal_recovery_mode: DBRecoveryMode::SkipAnyCorruptedRecords,
wal_dir: "/var".to_owned(),
wal_ttl_seconds: 1,
wal_size_limit: ReadableSize::kb(12),
max_total_wal_size: ReadableSize::gb(1),
max_background_jobs: 12,
max_background_flushes: 4,
max_manifest_file_size: ReadableSize::mb(12),
create_if_missing: false,
max_open_files: 12_345,
enable_statistics: false,
stats_dump_period: ReadableDuration::minutes(12),
compaction_readahead_size: ReadableSize::kb(1),
info_log_max_size: ReadableSize::kb(1),
info_log_roll_time: ReadableDuration::secs(1),
info_log_keep_log_file_num: 1000,
info_log_dir: "/var".to_owned(),
max_sub_compactions: 12,
writable_file_max_buffer_size: ReadableSize::mb(12),
use_direct_io_for_flush_and_compaction: true,
enable_pipelined_write: false,
enable_unordered_write: false,
allow_concurrent_memtable_write: false,
bytes_per_sync: ReadableSize::mb(1),
wal_bytes_per_sync: ReadableSize::kb(32),
defaultcf: RaftDefaultCfConfig {
block_size: ReadableSize::kb(12),
block_cache_size: ReadableSize::gb(12),
disable_block_cache: false,
cache_index_and_filter_blocks: false,
pin_l0_filter_and_index_blocks: false,
use_bloom_filter: false,
optimize_filters_for_hits: false,
whole_key_filtering: true,
bloom_filter_bits_per_key: 123,
block_based_bloom_filter: true,
read_amp_bytes_per_bit: 0,
compression_per_level: [
DBCompressionType::No,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Zstd,
DBCompressionType::No,
DBCompressionType::Zstd,
DBCompressionType::Lz4,
],
write_buffer_size: ReadableSize::mb(1),
max_write_buffer_number: 12,
min_write_buffer_number_to_merge: 12,
max_bytes_for_level_base: ReadableSize::kb(12),
target_file_size_base: ReadableSize::kb(123),
level0_file_num_compaction_trigger: 123,
level0_slowdown_writes_trigger: 123,
level0_stop_writes_trigger: 123,
max_compaction_bytes: ReadableSize::gb(1),
compaction_pri: CompactionPriority::MinOverlappingRatio,
dynamic_level_bytes: true,
num_levels: 4,
max_bytes_for_level_multiplier: 8,
compaction_style: DBCompactionStyle::Universal,
disable_auto_compactions: true,
soft_pending_compaction_bytes_limit: ReadableSize::gb(12),
hard_pending_compaction_bytes_limit: ReadableSize::gb(12),
force_consistency_checks: true,
titan: titan_cf_config,
prop_size_index_distance: 4000000,
prop_keys_index_distance: 40000,
enable_doubly_skiplist: true,
enable_compaction_guard: true,
compaction_guard_min_output_file_size: ReadableSize::mb(12),
compaction_guard_max_output_file_size: ReadableSize::mb(34),
bottommost_level_compression: DBCompressionType::Disable,
bottommost_zstd_compression_dict_size: 0,
bottommost_zstd_compression_sample_size: 0,
},
titan: titan_db_config,
};
value.raft_engine.enable = true;
value.raft_engine.mut_config().dir = "test-dir".to_owned();
value.storage = StorageConfig {
data_dir: "/var".to_owned(),
gc_ratio_threshold: 1.2,
max_key_size: 8192,
scheduler_concurrency: 123,
scheduler_worker_pool_size: 1,
scheduler_pending_write_threshold: ReadableSize::kb(123),
reserve_space: ReadableSize::gb(10),
enable_async_apply_prewrite: true,
enable_ttl: true,
ttl_check_poll_interval: ReadableDuration::hours(0),
block_cache: BlockCacheConfig {
shared: true,
capacity: OptionReadableSize(Some(ReadableSize::gb(40))),
num_shard_bits: 10,
strict_capacity_limit: true,
high_pri_pool_ratio: 0.8,
memory_allocator: Some(String::from("nodump")),
},
io_rate_limit: IORateLimitConfig {
max_bytes_per_sec: ReadableSize::mb(1000),
mode: IORateLimitMode::AllIo,
strict: true,
foreground_read_priority: IOPriority::Low,
foreground_write_priority: IOPriority::Low,
flush_priority: IOPriority::Low,
level_zero_compaction_priority: IOPriority::Low,
compaction_priority: IOPriority::High,
replication_priority: IOPriority::Low,
load_balance_priority: IOPriority::Low,
gc_priority: IOPriority::High,
import_priority: IOPriority::High,
export_priority: IOPriority::High,
other_priority: IOPriority::Low,
},
};
value.coprocessor = CopConfig {
split_region_on_table: false,
batch_split_limit: 1,
region_max_size: ReadableSize::mb(12),
region_split_size: ReadableSize::mb(12),
region_max_keys: 100000,
region_split_keys: 100000,
consistency_check_method: ConsistencyCheckMethod::Raw,
perf_level: PerfLevel::EnableTime,
};
let mut cert_allowed_cn = HashSet::default();
cert_allowed_cn.insert("example.tikv.com".to_owned());
value.security = SecurityConfig {
ca_path: "invalid path".to_owned(),
cert_path: "invalid path".to_owned(),
key_path: "invalid path".to_owned(),
override_ssl_target: "".to_owned(),
cert_allowed_cn,
redact_info_log: Some(true),
encryption: EncryptionConfig {
data_encryption_method: EncryptionMethod::Aes128Ctr,
data_key_rotation_period: ReadableDuration::days(14),
enable_file_dictionary_log: false,
file_dictionary_rewrite_threshold: 123456,
master_key: MasterKeyConfig::File {
config: FileConfig {
path: "/master/key/path".to_owned(),
},
},
previous_master_key: MasterKeyConfig::Plaintext,
},
};
value.backup = BackupConfig {
num_threads: 456,
batch_size: 7,
sst_max_size: ReadableSize::mb(789),
};
value.import = ImportConfig {
num_threads: 123,
stream_channel_window: 123,
import_mode_timeout: ReadableDuration::secs(1453),
};
value.panic_when_unexpected_key_or_data = true;
value.enable_io_snoop = false;
value.gc = GcConfig {
ratio_threshold: 1.2,
batch_keys: 256,
max_write_bytes_per_sec: ReadableSize::mb(10),
enable_compaction_filter: false,
compaction_filter_skip_version_check: true,
};
value.pessimistic_txn = PessimisticTxnConfig {
wait_for_lock_timeout: ReadableDuration::millis(10),
wake_up_delay_duration: ReadableDuration::millis(100),
pipelined: false,
};
value.cdc = CdcConfig {
min_ts_interval: ReadableDuration::secs(4),
old_value_cache_size: 512,
hibernate_regions_compatible: false,
incremental_scan_threads: 3,
incremental_scan_concurrency: 4,
incremental_scan_speed_limit: ReadableSize(7),
old_value_cache_memory_quota: ReadableSize::mb(14),
sink_memory_quota: ReadableSize::mb(7),
};
value.resolved_ts = ResolvedTsConfig {
enable: true,
advance_ts_interval: ReadableDuration::secs(5),
scan_lock_pool_size: 1,
};
let custom = read_file_in_project_dir("integrations/config/test-custom.toml");
let load = toml::from_str(&custom).unwrap();
if value != load {
diff_config(&value, &load);
}
let dump = toml::to_string_pretty(&load).unwrap();
let load_from_dump = toml::from_str(&dump).unwrap();
if load != load_from_dump {
diff_config(&load, &load_from_dump);
}
}
fn diff_config(lhs: &TiKvConfig, rhs: &TiKvConfig) {
let lhs_str = format!("{:?}", lhs);
let rhs_str = format!("{:?}", rhs);
fn find_index(l: impl Iterator<Item = (u8, u8)>) -> usize {
let it = l
.enumerate()
.take_while(|(_, (l, r))| l == r)
.filter(|(_, (l, _))| *l == b' ');
let mut last = None;
let mut second = None;
for a in it {
second = last;
last = Some(a);
}
second.map_or(0, |(i, _)| i)
}
let cpl = find_index(lhs_str.bytes().zip(rhs_str.bytes()));
let csl = find_index(lhs_str.bytes().rev().zip(rhs_str.bytes().rev()));
if cpl + csl > lhs_str.len() || cpl + csl > rhs_str.len() {
assert_eq!(lhs, rhs);
}
let lhs_diff = String::from_utf8_lossy(&lhs_str.as_bytes()[cpl..lhs_str.len() - csl]);
let rhs_diff = String::from_utf8_lossy(&rhs_str.as_bytes()[cpl..rhs_str.len() - csl]);
panic!(
"config not matched:\nlhs: ...{}...,\nrhs: ...{}...",
lhs_diff, rhs_diff
);
}
#[test]
fn test_serde_default_config() {
let cfg: TiKvConfig = toml::from_str("").unwrap();
assert_eq!(cfg, TiKvConfig::default());
let content = read_file_in_project_dir("integrations/config/test-default.toml");
let cfg: TiKvConfig = toml::from_str(&content).unwrap();
assert_eq!(cfg, TiKvConfig::default());
}
#[test]
fn test_readpool_default_config() {
let content = r#"
[readpool.unified]
max-thread-count = 1
"#;
let cfg: TiKvConfig = toml::from_str(content).unwrap();
let mut expected = TiKvConfig::default();
expected.readpool.unified.max_thread_count = 1;
assert_eq!(cfg, expected);
}
#[test]
fn test_do_not_use_unified_readpool_with_legacy_config() {
let content = r#"
[readpool.storage]
normal-concurrency = 1
[readpool.coprocessor]
normal-concurrency = 1
"#;
let cfg: TiKvConfig = toml::from_str(content).unwrap();
assert!(!cfg.readpool.is_unified_pool_enabled());
}
#[test]
fn test_block_cache_backward_compatible() {
let content = read_file_in_project_dir("integrations/config/test-cache-compatible.toml");
let mut cfg: TiKvConfig = toml::from_str(&content).unwrap();
assert!(cfg.storage.block_cache.shared);
assert!(cfg.storage.block_cache.capacity.0.is_none());
cfg.compatible_adjust();
assert!(cfg.storage.block_cache.capacity.0.is_some());
assert_eq!(
cfg.storage.block_cache.capacity.0.unwrap().0,
cfg.rocksdb.defaultcf.block_cache_size.0
+ cfg.rocksdb.writecf.block_cache_size.0
+ cfg.rocksdb.lockcf.block_cache_size.0
+ cfg.raftdb.defaultcf.block_cache_size.0
);
}
| 42.528967 | 96 | 0.647773 |
1877c8d31f211ab004b244797c2a462548caae5a | 9,545 | //! A resource manager to load textures.
use image::{self, DynamicImage};
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use std::collections::HashMap;
use std::path::Path;
use std::rc::Rc;
use context::{Context, Texture};
#[path = "../error.rs"]
mod error;
/// Wrapping parameters for a texture.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum TextureWrapping {
/// Repeats the texture when a texture coordinate is out of bounds.
Repeat,
/// Repeats the mirrored texture when a texture coordinate is out of bounds.
MirroredRepeat,
/// Repeats the nearest edge point texture color when a texture coordinate is out of bounds.
ClampToEdge,
}
impl Into<u32> for TextureWrapping {
#[inline]
fn into(self) -> u32 {
match self {
TextureWrapping::Repeat => Context::REPEAT,
TextureWrapping::MirroredRepeat => Context::MIRRORED_REPEAT,
TextureWrapping::ClampToEdge => Context::CLAMP_TO_EDGE,
}
}
}
impl Texture {
/// Allocates a new texture on the gpu. The texture is not configured.
pub fn new() -> Rc<Texture> {
let tex = verify!(Context::get()
.create_texture()
.expect("Could not create texture."));
Rc::new(tex)
}
/// Sets the wrapping of this texture along the `s` texture coordinate.
pub fn set_wrapping_s(&mut self, wrapping: TextureWrapping) {
let ctxt = Context::get();
verify!(ctxt.bind_texture(Context::TEXTURE_2D, Some(&self)));
let wrap: u32 = wrapping.into();
verify!(ctxt.tex_parameteri(Context::TEXTURE_2D, Context::TEXTURE_WRAP_S, wrap as i32));
}
/// Sets the wrapping of this texture along the `t` texture coordinate.
pub fn set_wrapping_t(&mut self, wrapping: TextureWrapping) {
let ctxt = Context::get();
verify!(ctxt.bind_texture(Context::TEXTURE_2D, Some(&self)));
let wrap: u32 = wrapping.into();
verify!(ctxt.tex_parameteri(Context::TEXTURE_2D, Context::TEXTURE_WRAP_T, wrap as i32));
}
}
impl Drop for Texture {
fn drop(&mut self) {
unsafe {
let ctxt = Context::get();
if ctxt.is_texture(Some(self)) {
verify!(Context::get().delete_texture(Some(self)));
}
}
}
}
thread_local!(static KEY_TEXTURE_MANAGER: RefCell<TextureManager> = RefCell::new(TextureManager::new()));
/// The texture manager.
///
/// It keeps a cache of already-loaded textures, and can load new textures.
pub struct TextureManager {
default_texture: Rc<Texture>,
textures: HashMap<String, (Rc<Texture>, (u32, u32))>,
}
impl TextureManager {
/// Creates a new texture manager.
pub fn new() -> TextureManager {
let ctxt = Context::get();
let default_tex = Texture::new();
let default_tex_pixels: [u8; 12] = [255; 12];
verify!(ctxt.active_texture(Context::TEXTURE0));
verify!(ctxt.bind_texture(Context::TEXTURE_2D, Some(&*default_tex)));
// verify!(ctxt.tex_parameteri(Context::TEXTURE_2D, Context::TEXTURE_BASE_LEVEL, 0));
// verify!(ctxt.tex_parameteri(Context::TEXTURE_2D, Context::TEXTURE_MAX_LEVEL, 0));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_WRAP_S,
Context::REPEAT as i32
));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_WRAP_T,
Context::REPEAT as i32
));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_MAG_FILTER,
Context::LINEAR as i32
));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_MIN_FILTER,
Context::LINEAR_MIPMAP_LINEAR as i32
));
verify!(ctxt.tex_image2d(
Context::TEXTURE_2D,
0,
Context::RGB as i32,
1,
1,
0,
Context::RGB,
Some(&default_tex_pixels)
));
TextureManager {
textures: HashMap::new(),
default_texture: default_tex,
}
}
/// Mutably applies a function to the texture manager.
pub fn get_global_manager<T, F: FnMut(&mut TextureManager) -> T>(mut f: F) -> T {
KEY_TEXTURE_MANAGER.with(|manager| f(&mut *manager.borrow_mut()))
}
/// Gets the default, completely white, texture.
pub fn get_default(&self) -> Rc<Texture> {
self.default_texture.clone()
}
/// Get a texture with the specified name. Returns `None` if the texture is not registered.
pub fn get(&mut self, name: &str) -> Option<Rc<Texture>> {
self.textures.get(&name.to_string()).map(|t| t.0.clone())
}
/// Get a texture (and its size) with the specified name. Returns `None` if the texture is not registered.
pub fn get_with_size(&mut self, name: &str) -> Option<(Rc<Texture>, (u32, u32))> {
self.textures
.get(&name.to_string())
.map(|t| (t.0.clone(), t.1))
}
/// Allocates a new texture that is not yet configured.
///
/// If a texture with same name exists, nothing is created and the old texture is returned.
pub fn add_empty(&mut self, name: &str) -> Rc<Texture> {
match self.textures.entry(name.to_string()) {
Entry::Occupied(entry) => entry.into_mut().0.clone(),
Entry::Vacant(entry) => entry.insert((Texture::new(), (0, 0))).0.clone(),
}
}
/// Allocates a new texture read from a `DynamicImage` object.
///
/// If a texture with same name exists, nothing is created and the old texture is returned.
pub fn add_image(&mut self, dynamic_image: DynamicImage, name: &str) -> Rc<Texture> {
self.textures
.entry(name.to_string())
.or_insert_with(|| TextureManager::load_texture_into_context(dynamic_image).unwrap())
.0
.clone()
}
/// Allocates a new texture and tries to decode it from bytes array
/// Panics if unable to do so
/// If a texture with same name exists, nothing is created and the old texture is returned.
pub fn add_image_from_memory(&mut self, image_data: &[u8], name: &str) -> Rc<Texture> {
self.add_image(
image::load_from_memory(image_data).expect("Invalid data"),
name,
)
}
/// Allocates a new texture read from a file.
fn load_texture_from_file(path: &Path) -> (Rc<Texture>, (u32, u32)) {
TextureManager::load_texture_into_context(image::open(path).unwrap())
.expect(path.to_str().unwrap())
}
fn load_texture_into_context(
dynamic_image: DynamicImage,
) -> Result<(Rc<Texture>, (u32, u32)), &'static str> {
let ctxt = Context::get();
let tex = Texture::new();
let width;
let height;
unsafe {
verify!(ctxt.active_texture(Context::TEXTURE0));
verify!(ctxt.bind_texture(Context::TEXTURE_2D, Some(&*tex)));
match dynamic_image {
DynamicImage::ImageRgb8(image) => {
width = image.width();
height = image.height();
verify!(ctxt.tex_image2d(
Context::TEXTURE_2D,
0,
Context::RGB as i32,
image.width() as i32,
image.height() as i32,
0,
Context::RGB,
Some(&image.into_raw()[..])
));
}
DynamicImage::ImageRgba8(image) => {
width = image.width();
height = image.height();
verify!(ctxt.tex_image2d(
Context::TEXTURE_2D,
0,
Context::RGBA as i32,
image.width() as i32,
image.height() as i32,
0,
Context::RGBA,
Some(&image.into_raw()[..])
));
}
_ => {
return Err("Failed to load texture, unsuported pixel format.");
}
}
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_WRAP_S,
Context::CLAMP_TO_EDGE as i32
));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_WRAP_T,
Context::CLAMP_TO_EDGE as i32
));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_MIN_FILTER,
Context::LINEAR as i32
));
verify!(ctxt.tex_parameteri(
Context::TEXTURE_2D,
Context::TEXTURE_MAG_FILTER,
Context::LINEAR as i32
));
}
Ok((tex, (width, height)))
}
/// Allocates a new texture read from a file. If a texture with same name exists, nothing is
/// created and the old texture is returned.
pub fn add(&mut self, path: &Path, name: &str) -> Rc<Texture> {
self.textures
.entry(name.to_string())
.or_insert_with(|| TextureManager::load_texture_from_file(path))
.0
.clone()
}
}
| 34.96337 | 110 | 0.557779 |
0a65d7620728eeeca260a7bfab609416c0828c24 | 143,588 | /*!
# DX11 backend internals.
## Pipeline Layout
In D3D11 there are tables of CBVs, SRVs, UAVs, and samplers.
Each descriptor type can take 1 or two of those entry points.
The descriptor pool is just and array of handles, belonging to descriptor set 1, descriptor set 2, etc.
Each range of descriptors in a descriptor set area of the pool is split into shader stages,
which in turn is split into CBS/SRV/UAV/Sampler parts. That allows binding a descriptor set as a list
of continuous descriptor ranges (per type, per shader stage).
!*/
//#[deny(missing_docs)]
#[macro_use]
extern crate bitflags;
#[macro_use]
extern crate log;
use auxil::ShaderStage;
use hal::{
adapter, buffer, command, format, image, memory, pass, pso, query, queue, window, DrawCount,
IndexCount, InstanceCount, Limits, TaskCount, VertexCount, VertexOffset, WorkGroupCount,
};
use range_alloc::RangeAllocator;
use crate::{
device::DepthStencilState,
debug::set_debug_name,
};
use winapi::{shared::{
dxgi::{IDXGIAdapter, IDXGIFactory, IDXGISwapChain},
dxgiformat,
minwindef::{FALSE, HMODULE, UINT},
windef::{HWND, RECT},
winerror,
}, um::{d3d11, d3d11_1, d3dcommon, winuser::GetClientRect}, Interface as _};
use wio::com::ComPtr;
use arrayvec::ArrayVec;
use parking_lot::{Condvar, Mutex, RwLock};
use std::{borrow::Borrow, cell::RefCell, fmt, mem, ops::Range, os::raw::c_void, ptr, sync::{Arc, Weak}};
macro_rules! debug_scope {
($context:expr, $($arg:tt)+) => ({
#[cfg(debug_assertions)]
{
$crate::debug::DebugScope::with_name(
$context,
format_args!($($arg)+),
)
}
#[cfg(not(debug_assertions))]
{
()
}
});
}
macro_rules! debug_marker {
($context:expr, $($arg:tt)+) => ({
#[cfg(debug_assertions)]
{
$crate::debug::debug_marker(
$context,
&format!($($arg)+),
);
}
});
}
mod conv;
mod debug;
mod device;
mod dxgi;
mod internal;
mod shader;
type CreateFun = unsafe extern "system" fn(
*mut IDXGIAdapter,
UINT,
HMODULE,
UINT,
*const UINT,
UINT,
UINT,
*mut *mut d3d11::ID3D11Device,
*mut UINT,
*mut *mut d3d11::ID3D11DeviceContext,
) -> winerror::HRESULT;
#[derive(Clone)]
pub(crate) struct ViewInfo {
resource: *mut d3d11::ID3D11Resource,
kind: image::Kind,
caps: image::ViewCapabilities,
view_kind: image::ViewKind,
format: dxgiformat::DXGI_FORMAT,
levels: Range<image::Level>,
layers: Range<image::Layer>,
}
impl fmt::Debug for ViewInfo {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("ViewInfo")
}
}
#[derive(Debug)]
pub struct Instance {
pub(crate) factory: ComPtr<IDXGIFactory>,
pub(crate) dxgi_version: dxgi::DxgiVersion,
library_d3d11: Arc<libloading::Library>,
library_dxgi: libloading::Library,
}
unsafe impl Send for Instance {}
unsafe impl Sync for Instance {}
impl Instance {
pub fn create_surface_from_hwnd(&self, hwnd: *mut c_void) -> Surface {
Surface {
factory: self.factory.clone(),
wnd_handle: hwnd as *mut _,
presentation: None,
}
}
}
fn get_features(
_device: ComPtr<d3d11::ID3D11Device>,
feature_level: d3dcommon::D3D_FEATURE_LEVEL,
) -> hal::Features {
let mut features = hal::Features::empty()
| hal::Features::ROBUST_BUFFER_ACCESS // TODO: verify
| hal::Features::INSTANCE_RATE
| hal::Features::INDEPENDENT_BLENDING // TODO: verify
| hal::Features::SAMPLER_BORDER_COLOR
| hal::Features::SAMPLER_MIP_LOD_BIAS
| hal::Features::SAMPLER_MIRROR_CLAMP_EDGE
| hal::Features::SAMPLER_ANISOTROPY
| hal::Features::DEPTH_CLAMP
| hal::Features::NDC_Y_UP;
features.set(
hal::Features::TEXTURE_DESCRIPTOR_ARRAY
| hal::Features::FULL_DRAW_INDEX_U32
| hal::Features::GEOMETRY_SHADER,
feature_level >= d3dcommon::D3D_FEATURE_LEVEL_10_0
);
features.set(
hal::Features::IMAGE_CUBE_ARRAY,
feature_level >= d3dcommon::D3D_FEATURE_LEVEL_10_1
);
features.set(
hal::Features::VERTEX_STORES_AND_ATOMICS
| hal::Features::FRAGMENT_STORES_AND_ATOMICS
| hal::Features::FORMAT_BC
| hal::Features::TESSELLATION_SHADER
| hal::Features::DRAW_INDIRECT_FIRST_INSTANCE,
feature_level >= d3dcommon::D3D_FEATURE_LEVEL_11_0
);
features.set(
hal::Features::LOGIC_OP, // TODO: Optional at 10_0 -> 11_0
feature_level >= d3dcommon::D3D_FEATURE_LEVEL_11_1
);
features
}
const MAX_PUSH_CONSTANT_SIZE: usize = 256;
fn get_limits(feature_level: d3dcommon::D3D_FEATURE_LEVEL) -> hal::Limits {
let max_texture_uv_dimension = match feature_level {
d3dcommon::D3D_FEATURE_LEVEL_9_1 | d3dcommon::D3D_FEATURE_LEVEL_9_2 => 2048,
d3dcommon::D3D_FEATURE_LEVEL_9_3 => 4096,
d3dcommon::D3D_FEATURE_LEVEL_10_0 | d3dcommon::D3D_FEATURE_LEVEL_10_1 => 8192,
d3dcommon::D3D_FEATURE_LEVEL_11_0 | d3dcommon::D3D_FEATURE_LEVEL_11_1 | _ => 16384,
};
let max_texture_w_dimension = match feature_level {
d3dcommon::D3D_FEATURE_LEVEL_9_1
| d3dcommon::D3D_FEATURE_LEVEL_9_2
| d3dcommon::D3D_FEATURE_LEVEL_9_3 => 256,
d3dcommon::D3D_FEATURE_LEVEL_10_0
| d3dcommon::D3D_FEATURE_LEVEL_10_1
| d3dcommon::D3D_FEATURE_LEVEL_11_0
| d3dcommon::D3D_FEATURE_LEVEL_11_1
| _ => 2048,
};
let max_texture_cube_dimension = match feature_level {
d3dcommon::D3D_FEATURE_LEVEL_9_1
| d3dcommon::D3D_FEATURE_LEVEL_9_2 => 512,
_ => max_texture_uv_dimension,
};
let max_image_uav = 2;
let max_buffer_uav = d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT as usize - max_image_uav;
let max_input_slots = match feature_level {
d3dcommon::D3D_FEATURE_LEVEL_9_1
| d3dcommon::D3D_FEATURE_LEVEL_9_2
| d3dcommon::D3D_FEATURE_LEVEL_9_3
| d3dcommon::D3D_FEATURE_LEVEL_10_0 => 16,
d3dcommon::D3D_FEATURE_LEVEL_10_1
| d3dcommon::D3D_FEATURE_LEVEL_11_0
| d3dcommon::D3D_FEATURE_LEVEL_11_1
| _ => 32,
};
let max_color_attachments = match feature_level {
d3dcommon::D3D_FEATURE_LEVEL_9_1
| d3dcommon::D3D_FEATURE_LEVEL_9_2
| d3dcommon::D3D_FEATURE_LEVEL_9_3
| d3dcommon::D3D_FEATURE_LEVEL_10_0 => 4,
d3dcommon::D3D_FEATURE_LEVEL_10_1
| d3dcommon::D3D_FEATURE_LEVEL_11_0
| d3dcommon::D3D_FEATURE_LEVEL_11_1
| _ => 8,
};
// https://docs.microsoft.com/en-us/windows/win32/api/d3d11/nf-d3d11-id3d11device-checkmultisamplequalitylevels#remarks
// for more information.
let max_samples = match feature_level {
d3dcommon::D3D_FEATURE_LEVEL_9_1
| d3dcommon::D3D_FEATURE_LEVEL_9_2
| d3dcommon::D3D_FEATURE_LEVEL_9_3
| d3dcommon::D3D_FEATURE_LEVEL_10_0 => 0b0001, // Conservative, MSAA isn't required.
d3dcommon::D3D_FEATURE_LEVEL_10_1 => 0b0101, // Optimistic, 4xMSAA is required on all formats _but_ RGBA32.
d3dcommon::D3D_FEATURE_LEVEL_11_0
| d3dcommon::D3D_FEATURE_LEVEL_11_1
| _ => 0b1101, // Optimistic, 8xMSAA and 4xMSAA is required on all formats _but_ RGBA32 which requires 4x.
};
let max_constant_buffers = d3d11::D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT - 1;
hal::Limits {
max_image_1d_size: max_texture_uv_dimension,
max_image_2d_size: max_texture_uv_dimension,
max_image_3d_size: max_texture_w_dimension,
max_image_cube_size: max_texture_cube_dimension,
max_image_array_layers: max_texture_cube_dimension as _,
max_per_stage_descriptor_samplers: d3d11::D3D11_COMMONSHADER_SAMPLER_SLOT_COUNT as _,
// Leave top buffer for push constants
max_per_stage_descriptor_uniform_buffers: max_constant_buffers as _,
max_per_stage_descriptor_storage_buffers: max_buffer_uav,
max_per_stage_descriptor_storage_images: max_image_uav,
max_per_stage_descriptor_sampled_images: d3d11::D3D11_COMMONSHADER_INPUT_RESOURCE_REGISTER_COUNT as _,
max_descriptor_set_uniform_buffers_dynamic: max_constant_buffers as _,
max_descriptor_set_storage_buffers_dynamic: 0, // TODO: Implement dynamic offsets for storage buffers
max_texel_elements: max_texture_uv_dimension as _, //TODO
max_patch_size: d3d11::D3D11_IA_PATCH_MAX_CONTROL_POINT_COUNT as _,
max_viewports: d3d11::D3D11_VIEWPORT_AND_SCISSORRECT_OBJECT_COUNT_PER_PIPELINE as _,
max_viewport_dimensions: [d3d11::D3D11_VIEWPORT_BOUNDS_MAX; 2],
max_framebuffer_extent: hal::image::Extent {
//TODO
width: 4096,
height: 4096,
depth: 1,
},
max_compute_work_group_count: [
d3d11::D3D11_CS_DISPATCH_MAX_THREAD_GROUPS_PER_DIMENSION,
d3d11::D3D11_CS_DISPATCH_MAX_THREAD_GROUPS_PER_DIMENSION,
d3d11::D3D11_CS_DISPATCH_MAX_THREAD_GROUPS_PER_DIMENSION,
],
max_compute_work_group_invocations: d3d11::D3D11_CS_THREAD_GROUP_MAX_THREADS_PER_GROUP as _,
max_compute_work_group_size: [
d3d11::D3D11_CS_THREAD_GROUP_MAX_X,
d3d11::D3D11_CS_THREAD_GROUP_MAX_Y,
d3d11::D3D11_CS_THREAD_GROUP_MAX_Z,
], // TODO
max_vertex_input_attribute_offset: 255, // TODO
max_vertex_input_attributes: max_input_slots,
max_vertex_input_binding_stride: d3d11::D3D11_REQ_MULTI_ELEMENT_STRUCTURE_SIZE_IN_BYTES as _,
max_vertex_input_bindings: d3d11::D3D11_IA_VERTEX_INPUT_RESOURCE_SLOT_COUNT as _, // TODO: verify same as attributes
max_vertex_output_components: d3d11::D3D11_VS_OUTPUT_REGISTER_COUNT as _, // TODO
min_texel_buffer_offset_alignment: 1, // TODO
min_uniform_buffer_offset_alignment: 16,
min_storage_buffer_offset_alignment: 16, // TODO
framebuffer_color_sample_counts: max_samples,
framebuffer_depth_sample_counts: max_samples,
framebuffer_stencil_sample_counts: max_samples,
max_color_attachments,
buffer_image_granularity: 1,
non_coherent_atom_size: 1, // TODO
max_sampler_anisotropy: 16.,
optimal_buffer_copy_offset_alignment: 1, // TODO
// buffer -> image and image -> buffer paths use compute shaders that, at maximum, read 4 pixels from the buffer
// at a time, so need an alignment of at least 4.
optimal_buffer_copy_pitch_alignment: 4,
min_vertex_input_binding_stride_alignment: 1,
max_push_constants_size: MAX_PUSH_CONSTANT_SIZE,
max_uniform_buffer_range: 1 << 16,
..hal::Limits::default() //TODO
}
}
fn get_format_properties(
device: ComPtr<d3d11::ID3D11Device>,
) -> [format::Properties; format::NUM_FORMATS] {
let mut format_properties = [format::Properties::default(); format::NUM_FORMATS];
for (i, props) in &mut format_properties.iter_mut().enumerate().skip(1) {
let format: format::Format = unsafe { mem::transmute(i as u32) };
let dxgi_format = match conv::map_format(format) {
Some(format) => format,
None => continue,
};
let mut support = d3d11::D3D11_FEATURE_DATA_FORMAT_SUPPORT {
InFormat: dxgi_format,
OutFormatSupport: 0,
};
let mut support_2 = d3d11::D3D11_FEATURE_DATA_FORMAT_SUPPORT2 {
InFormat: dxgi_format,
OutFormatSupport2: 0,
};
let hr = unsafe {
device.CheckFeatureSupport(
d3d11::D3D11_FEATURE_FORMAT_SUPPORT,
&mut support as *mut _ as *mut _,
mem::size_of::<d3d11::D3D11_FEATURE_DATA_FORMAT_SUPPORT>() as UINT,
)
};
if hr == winerror::S_OK {
let can_buffer = 0 != support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_BUFFER;
let can_image = 0
!= support.OutFormatSupport
& (d3d11::D3D11_FORMAT_SUPPORT_TEXTURE1D
| d3d11::D3D11_FORMAT_SUPPORT_TEXTURE2D
| d3d11::D3D11_FORMAT_SUPPORT_TEXTURE3D
| d3d11::D3D11_FORMAT_SUPPORT_TEXTURECUBE);
let can_linear = can_image && !format.surface_desc().is_compressed();
if can_image {
props.optimal_tiling |=
format::ImageFeature::SAMPLED | format::ImageFeature::BLIT_SRC;
}
if can_linear {
props.linear_tiling |=
format::ImageFeature::SAMPLED | format::ImageFeature::BLIT_SRC;
}
if support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_IA_VERTEX_BUFFER != 0 {
props.buffer_features |= format::BufferFeature::VERTEX;
}
if support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_SHADER_SAMPLE != 0 {
props.optimal_tiling |= format::ImageFeature::SAMPLED_LINEAR;
}
if support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_RENDER_TARGET != 0 {
props.optimal_tiling |=
format::ImageFeature::COLOR_ATTACHMENT | format::ImageFeature::BLIT_DST;
if can_linear {
props.linear_tiling |=
format::ImageFeature::COLOR_ATTACHMENT | format::ImageFeature::BLIT_DST;
}
}
if support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_BLENDABLE != 0 {
props.optimal_tiling |= format::ImageFeature::COLOR_ATTACHMENT_BLEND;
}
if support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_DEPTH_STENCIL != 0 {
props.optimal_tiling |= format::ImageFeature::DEPTH_STENCIL_ATTACHMENT;
}
if support.OutFormatSupport & d3d11::D3D11_FORMAT_SUPPORT_SHADER_LOAD != 0 {
//TODO: check d3d12::D3D12_FORMAT_SUPPORT2_UAV_TYPED_LOAD ?
if can_buffer {
props.buffer_features |= format::BufferFeature::UNIFORM_TEXEL;
}
}
let hr = unsafe {
device.CheckFeatureSupport(
d3d11::D3D11_FEATURE_FORMAT_SUPPORT2,
&mut support_2 as *mut _ as *mut _,
mem::size_of::<d3d11::D3D11_FEATURE_DATA_FORMAT_SUPPORT2>() as UINT,
)
};
if hr == winerror::S_OK {
if support_2.OutFormatSupport2 & d3d11::D3D11_FORMAT_SUPPORT2_UAV_ATOMIC_ADD != 0 {
//TODO: other atomic flags?
if can_buffer {
props.buffer_features |= format::BufferFeature::STORAGE_TEXEL_ATOMIC;
}
if can_image {
props.optimal_tiling |= format::ImageFeature::STORAGE_ATOMIC;
}
}
if support_2.OutFormatSupport2 & d3d11::D3D11_FORMAT_SUPPORT2_UAV_TYPED_STORE != 0 {
if can_buffer {
props.buffer_features |= format::BufferFeature::STORAGE_TEXEL;
}
if can_image {
props.optimal_tiling |= format::ImageFeature::STORAGE;
}
}
}
}
//TODO: blits, linear tiling
}
format_properties
}
impl hal::Instance<Backend> for Instance {
fn create(_: &str, _: u32) -> Result<Self, hal::UnsupportedBackend> {
// TODO: get the latest factory we can find
match dxgi::get_dxgi_factory() {
Ok((library_dxgi, factory, dxgi_version)) => {
info!("DXGI version: {:?}", dxgi_version);
let library_d3d11 = Arc::new(
libloading::Library::new("d3d11.dll").map_err(|_| hal::UnsupportedBackend)?,
);
Ok(Instance {
factory,
dxgi_version,
library_d3d11,
library_dxgi,
})
}
Err(hr) => {
info!("Failed on factory creation: {:?}", hr);
Err(hal::UnsupportedBackend)
}
}
}
fn enumerate_adapters(&self) -> Vec<adapter::Adapter<Backend>> {
let mut adapters = Vec::new();
let mut idx = 0;
let func: libloading::Symbol<CreateFun> =
match unsafe { self.library_d3d11.get(b"D3D11CreateDevice") } {
Ok(func) => func,
Err(e) => {
error!("Unable to get device creation function: {:?}", e);
return Vec::new();
}
};
while let Ok((adapter, info)) =
dxgi::get_adapter(idx, self.factory.as_raw(), self.dxgi_version)
{
idx += 1;
use hal::memory::Properties;
// TODO: move into function?
let (device, feature_level) = {
let feature_level = get_feature_level(&func, adapter.as_raw());
let mut device = ptr::null_mut();
let hr = unsafe {
func(
adapter.as_raw() as *mut _,
d3dcommon::D3D_DRIVER_TYPE_UNKNOWN,
ptr::null_mut(),
0,
[feature_level].as_ptr(),
1,
d3d11::D3D11_SDK_VERSION,
&mut device as *mut *mut _ as *mut *mut _,
ptr::null_mut(),
ptr::null_mut(),
)
};
if !winerror::SUCCEEDED(hr) {
continue;
}
(
unsafe { ComPtr::<d3d11::ID3D11Device>::from_raw(device) },
feature_level,
)
};
let memory_properties = adapter::MemoryProperties {
memory_types: vec![
adapter::MemoryType {
properties: Properties::DEVICE_LOCAL,
heap_index: 0,
},
adapter::MemoryType {
properties: Properties::CPU_VISIBLE
| Properties::COHERENT
| Properties::CPU_CACHED,
heap_index: 1,
},
adapter::MemoryType {
properties: Properties::CPU_VISIBLE | Properties::CPU_CACHED,
heap_index: 1,
},
],
// TODO: would using *VideoMemory and *SystemMemory from
// DXGI_ADAPTER_DESC be too optimistic? :)
memory_heaps: vec![
adapter::MemoryHeap {
size: !0,
flags: memory::HeapFlags::DEVICE_LOCAL,
},
adapter::MemoryHeap {
size: !0,
flags: memory::HeapFlags::empty(),
},
],
};
let limits = get_limits(feature_level);
let features = get_features(device.clone(), feature_level);
let format_properties = get_format_properties(device.clone());
let hints = hal::Hints::BASE_VERTEX_INSTANCE_DRAWING;
let physical_device = PhysicalDevice {
adapter,
library_d3d11: Arc::clone(&self.library_d3d11),
features,
hints,
limits,
memory_properties,
format_properties,
};
info!("{:#?}", info);
adapters.push(adapter::Adapter {
info,
physical_device,
queue_families: vec![QueueFamily],
});
}
adapters
}
unsafe fn create_surface(
&self,
has_handle: &impl raw_window_handle::HasRawWindowHandle,
) -> Result<Surface, hal::window::InitError> {
match has_handle.raw_window_handle() {
raw_window_handle::RawWindowHandle::Windows(handle) => {
Ok(self.create_surface_from_hwnd(handle.hwnd))
}
_ => Err(hal::window::InitError::UnsupportedWindowHandle),
}
}
unsafe fn destroy_surface(&self, _surface: Surface) {
// TODO: Implement Surface cleanup
}
}
pub struct PhysicalDevice {
adapter: ComPtr<IDXGIAdapter>,
library_d3d11: Arc<libloading::Library>,
features: hal::Features,
hints: hal::Hints,
limits: hal::Limits,
memory_properties: adapter::MemoryProperties,
format_properties: [format::Properties; format::NUM_FORMATS],
}
impl fmt::Debug for PhysicalDevice {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("PhysicalDevice")
}
}
unsafe impl Send for PhysicalDevice {}
unsafe impl Sync for PhysicalDevice {}
// TODO: does the adapter we get earlier matter for feature level?
fn get_feature_level(func: &CreateFun, adapter: *mut IDXGIAdapter) -> d3dcommon::D3D_FEATURE_LEVEL {
let requested_feature_levels = [
d3dcommon::D3D_FEATURE_LEVEL_11_1,
d3dcommon::D3D_FEATURE_LEVEL_11_0,
d3dcommon::D3D_FEATURE_LEVEL_10_1,
d3dcommon::D3D_FEATURE_LEVEL_10_0,
d3dcommon::D3D_FEATURE_LEVEL_9_3,
d3dcommon::D3D_FEATURE_LEVEL_9_2,
d3dcommon::D3D_FEATURE_LEVEL_9_1,
];
let mut feature_level = d3dcommon::D3D_FEATURE_LEVEL_9_1;
let hr = unsafe {
func(
adapter,
d3dcommon::D3D_DRIVER_TYPE_UNKNOWN,
ptr::null_mut(),
0,
requested_feature_levels[..].as_ptr(),
requested_feature_levels.len() as _,
d3d11::D3D11_SDK_VERSION,
ptr::null_mut(),
&mut feature_level as *mut _,
ptr::null_mut(),
)
};
if !winerror::SUCCEEDED(hr) {
// if there is no 11.1 runtime installed, requesting
// `D3D_FEATURE_LEVEL_11_1` will return E_INVALIDARG so we just retry
// without that
if hr == winerror::E_INVALIDARG {
let hr = unsafe {
func(
adapter,
d3dcommon::D3D_DRIVER_TYPE_UNKNOWN,
ptr::null_mut(),
0,
requested_feature_levels[1..].as_ptr(),
(requested_feature_levels.len() - 1) as _,
d3d11::D3D11_SDK_VERSION,
ptr::null_mut(),
&mut feature_level as *mut _,
ptr::null_mut(),
)
};
if !winerror::SUCCEEDED(hr) {
// TODO: device might not support any feature levels?
unimplemented!();
}
}
}
feature_level
}
// TODO: PhysicalDevice
impl adapter::PhysicalDevice<Backend> for PhysicalDevice {
unsafe fn open(
&self,
families: &[(&QueueFamily, &[queue::QueuePriority])],
requested_features: hal::Features,
) -> Result<adapter::Gpu<Backend>, hal::device::CreationError> {
let func: libloading::Symbol<CreateFun> =
self.library_d3d11.get(b"D3D11CreateDevice").unwrap();
let (device, cxt) = {
if !self.features().contains(requested_features) {
return Err(hal::device::CreationError::MissingFeature);
}
let feature_level = get_feature_level(&func, self.adapter.as_raw());
let mut returned_level = d3dcommon::D3D_FEATURE_LEVEL_9_1;
#[cfg(debug_assertions)]
let create_flags = d3d11::D3D11_CREATE_DEVICE_DEBUG;
#[cfg(not(debug_assertions))]
let create_flags = 0;
// TODO: request debug device only on debug config?
let mut device: *mut d3d11::ID3D11Device = ptr::null_mut();
let mut cxt = ptr::null_mut();
let hr = func(
self.adapter.as_raw() as *mut _,
d3dcommon::D3D_DRIVER_TYPE_UNKNOWN,
ptr::null_mut(),
create_flags,
[feature_level].as_ptr(),
1,
d3d11::D3D11_SDK_VERSION,
&mut device as *mut *mut _ as *mut *mut _,
&mut returned_level as *mut _,
&mut cxt as *mut *mut _ as *mut *mut _,
);
// NOTE: returns error if adapter argument is non-null and driver
// type is not unknown; or if debug device is requested but not
// present
if !winerror::SUCCEEDED(hr) {
return Err(hal::device::CreationError::InitializationFailed);
}
info!("feature level={:x}=FL{}_{}", feature_level, feature_level >> 12, feature_level >> 8 & 0xF);
(ComPtr::from_raw(device), ComPtr::from_raw(cxt))
};
let device1 = device.cast::<d3d11_1::ID3D11Device1>().ok();
let device = device::Device::new(
device,
device1,
cxt,
requested_features,
self.memory_properties.clone(),
);
// TODO: deferred context => 1 cxt/queue?
let queue_groups = families
.into_iter()
.map(|&(_family, prio)| {
assert_eq!(prio.len(), 1);
let mut group = queue::QueueGroup::new(queue::QueueFamilyId(0));
// TODO: multiple queues?
let queue = CommandQueue {
context: device.context.clone(),
};
group.add_queue(queue);
group
})
.collect();
Ok(adapter::Gpu {
device,
queue_groups,
})
}
fn format_properties(&self, fmt: Option<format::Format>) -> format::Properties {
let idx = fmt.map(|fmt| fmt as usize).unwrap_or(0);
self.format_properties[idx]
}
fn image_format_properties(
&self,
format: format::Format,
dimensions: u8,
tiling: image::Tiling,
usage: image::Usage,
view_caps: image::ViewCapabilities,
) -> Option<image::FormatProperties> {
conv::map_format(format)?; //filter out unknown formats
let supported_usage = {
use hal::image::Usage as U;
let format_props = &self.format_properties[format as usize];
let props = match tiling {
image::Tiling::Optimal => format_props.optimal_tiling,
image::Tiling::Linear => format_props.linear_tiling,
};
let mut flags = U::empty();
// Note: these checks would have been nicer if we had explicit BLIT usage
if props.contains(format::ImageFeature::BLIT_SRC) {
flags |= U::TRANSFER_SRC;
}
if props.contains(format::ImageFeature::BLIT_DST) {
flags |= U::TRANSFER_DST;
}
if props.contains(format::ImageFeature::SAMPLED) {
flags |= U::SAMPLED;
}
if props.contains(format::ImageFeature::STORAGE) {
flags |= U::STORAGE;
}
if props.contains(format::ImageFeature::COLOR_ATTACHMENT) {
flags |= U::COLOR_ATTACHMENT;
}
if props.contains(format::ImageFeature::DEPTH_STENCIL_ATTACHMENT) {
flags |= U::DEPTH_STENCIL_ATTACHMENT;
}
flags
};
if !supported_usage.contains(usage) {
return None;
}
let max_resource_size =
(d3d11::D3D11_REQ_RESOURCE_SIZE_IN_MEGABYTES_EXPRESSION_A_TERM as usize) << 20;
Some(match tiling {
image::Tiling::Optimal => image::FormatProperties {
max_extent: match dimensions {
1 => image::Extent {
width: d3d11::D3D11_REQ_TEXTURE1D_U_DIMENSION,
height: 1,
depth: 1,
},
2 => image::Extent {
width: d3d11::D3D11_REQ_TEXTURE2D_U_OR_V_DIMENSION,
height: d3d11::D3D11_REQ_TEXTURE2D_U_OR_V_DIMENSION,
depth: 1,
},
3 => image::Extent {
width: d3d11::D3D11_REQ_TEXTURE3D_U_V_OR_W_DIMENSION,
height: d3d11::D3D11_REQ_TEXTURE3D_U_V_OR_W_DIMENSION,
depth: d3d11::D3D11_REQ_TEXTURE3D_U_V_OR_W_DIMENSION,
},
_ => return None,
},
max_levels: d3d11::D3D11_REQ_MIP_LEVELS as _,
max_layers: match dimensions {
1 => d3d11::D3D11_REQ_TEXTURE1D_ARRAY_AXIS_DIMENSION as _,
2 => d3d11::D3D11_REQ_TEXTURE2D_ARRAY_AXIS_DIMENSION as _,
_ => return None,
},
sample_count_mask: if dimensions == 2
&& !view_caps.contains(image::ViewCapabilities::KIND_CUBE)
&& (usage.contains(image::Usage::COLOR_ATTACHMENT)
| usage.contains(image::Usage::DEPTH_STENCIL_ATTACHMENT))
{
0x3F //TODO: use D3D12_FEATURE_DATA_FORMAT_SUPPORT
} else {
0x1
},
max_resource_size,
},
image::Tiling::Linear => image::FormatProperties {
max_extent: match dimensions {
2 => image::Extent {
width: d3d11::D3D11_REQ_TEXTURE2D_U_OR_V_DIMENSION,
height: d3d11::D3D11_REQ_TEXTURE2D_U_OR_V_DIMENSION,
depth: 1,
},
_ => return None,
},
max_levels: 1,
max_layers: 1,
sample_count_mask: 0x1,
max_resource_size,
},
})
}
fn memory_properties(&self) -> adapter::MemoryProperties {
self.memory_properties.clone()
}
fn features(&self) -> hal::Features {
self.features
}
fn hints(&self) -> hal::Hints {
self.hints
}
fn limits(&self) -> Limits {
self.limits
}
}
struct Presentation {
swapchain: ComPtr<IDXGISwapChain>,
view: ComPtr<d3d11::ID3D11RenderTargetView>,
format: format::Format,
size: window::Extent2D,
mode: window::PresentMode,
image: Arc<Image>,
is_init: bool,
}
pub struct Surface {
pub(crate) factory: ComPtr<IDXGIFactory>,
wnd_handle: HWND,
presentation: Option<Presentation>,
}
impl fmt::Debug for Surface {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Surface")
}
}
unsafe impl Send for Surface {}
unsafe impl Sync for Surface {}
impl window::Surface<Backend> for Surface {
fn supports_queue_family(&self, _queue_family: &QueueFamily) -> bool {
true
}
fn capabilities(&self, _physical_device: &PhysicalDevice) -> window::SurfaceCapabilities {
let current_extent = unsafe {
let mut rect: RECT = mem::zeroed();
assert_ne!(
0,
GetClientRect(self.wnd_handle as *mut _, &mut rect as *mut RECT)
);
Some(window::Extent2D {
width: (rect.right - rect.left) as u32,
height: (rect.bottom - rect.top) as u32,
})
};
// TODO: flip swap effects require dx11.1/windows8
// NOTE: some swap effects affect msaa capabilities..
// TODO: _DISCARD swap effects can only have one image?
window::SurfaceCapabilities {
present_modes: window::PresentMode::IMMEDIATE
| window::PresentMode::FIFO,
composite_alpha_modes: window::CompositeAlphaMode::OPAQUE, //TODO
image_count: 1..=16, // TODO:
current_extent,
extents: window::Extent2D {
width: 16,
height: 16,
}..=window::Extent2D {
width: 4096,
height: 4096,
},
max_image_layers: 1,
usage: image::Usage::COLOR_ATTACHMENT,
}
}
fn supported_formats(&self, _physical_device: &PhysicalDevice) -> Option<Vec<format::Format>> {
Some(vec![
format::Format::Bgra8Srgb,
format::Format::Bgra8Unorm,
format::Format::Rgba8Srgb,
format::Format::Rgba8Unorm,
format::Format::A2b10g10r10Unorm,
format::Format::Rgba16Sfloat,
])
}
}
#[derive(Debug)]
pub struct SwapchainImage {
image: Arc<Image>,
view: ImageView,
}
impl Borrow<Image> for SwapchainImage {
fn borrow(&self) -> &Image {
&*self.image
}
}
impl Borrow<ImageView> for SwapchainImage {
fn borrow(&self) -> &ImageView {
&self.view
}
}
impl window::PresentationSurface<Backend> for Surface {
type SwapchainImage = SwapchainImage;
unsafe fn configure_swapchain(
&mut self,
device: &device::Device,
config: window::SwapchainConfig,
) -> Result<(), window::CreationError> {
assert!(image::Usage::COLOR_ATTACHMENT.contains(config.image_usage));
let swapchain = match self.presentation.take() {
Some(present) => {
if present.format == config.format && present.size == config.extent {
self.presentation = Some(present);
return Ok(());
}
let non_srgb_format = conv::map_format_nosrgb(config.format).unwrap();
drop(present.view);
let result = present.swapchain.ResizeBuffers(
config.image_count,
config.extent.width,
config.extent.height,
non_srgb_format,
0,
);
if result != winerror::S_OK {
error!("ResizeBuffers failed with 0x{:x}", result as u32);
return Err(window::CreationError::WindowInUse(hal::device::WindowInUse));
}
present.swapchain
}
None => {
let (swapchain, _) =
device.create_swapchain_impl(&config, self.wnd_handle, self.factory.clone())?;
swapchain
}
};
// Disable automatic Alt+Enter handling by DXGI.
const DXGI_MWA_NO_WINDOW_CHANGES: u32 = 1;
const DXGI_MWA_NO_ALT_ENTER: u32 = 2;
self.factory.MakeWindowAssociation(self.wnd_handle, DXGI_MWA_NO_WINDOW_CHANGES | DXGI_MWA_NO_ALT_ENTER);
let mut resource: *mut d3d11::ID3D11Resource = ptr::null_mut();
assert_eq!(
winerror::S_OK,
swapchain.GetBuffer(
0 as _,
&d3d11::ID3D11Resource::uuidof(),
&mut resource as *mut *mut _ as *mut *mut _,
)
);
set_debug_name(&*resource, "Swapchain Image");
let kind = image::Kind::D2(config.extent.width, config.extent.height, 1, 1);
let format = conv::map_format(config.format).unwrap();
let decomposed_format = conv::DecomposedDxgiFormat::from_dxgi_format(format);
let view_info = ViewInfo {
resource,
kind,
caps: image::ViewCapabilities::empty(),
view_kind: image::ViewKind::D2,
format: decomposed_format.rtv.unwrap(),
levels: 0..1,
layers: 0..1,
};
let view = device.view_image_as_render_target(&view_info).unwrap();
set_debug_name(&view, "Swapchain Image View");
self.presentation = Some(Presentation {
swapchain,
view,
format: config.format,
size: config.extent,
mode: config.present_mode,
image: Arc::new(Image {
kind,
usage: config.image_usage,
format: config.format,
view_caps: image::ViewCapabilities::empty(),
decomposed_format,
mip_levels: 1,
internal: InternalImage {
raw: resource,
copy_srv: None, //TODO
srv: None, //TODO
unordered_access_views: Vec::new(),
depth_stencil_views: Vec::new(),
render_target_views: Vec::new(),
debug_name: None,
},
bind: conv::map_image_usage(config.image_usage, config.format.surface_desc()),
requirements: memory::Requirements {
size: 0,
alignment: 1,
type_mask: 0,
},
}),
is_init: true,
});
Ok(())
}
unsafe fn unconfigure_swapchain(&mut self, _device: &device::Device) {
self.presentation = None;
}
unsafe fn acquire_image(
&mut self,
_timeout_ns: u64, //TODO: use the timeout
) -> Result<(SwapchainImage, Option<window::Suboptimal>), window::AcquireError> {
let present = self.presentation.as_ref().unwrap();
let swapchain_image = SwapchainImage {
image: Arc::clone(&present.image),
view: ImageView {
subresource: d3d11::D3D11CalcSubresource(0, 0, 1),
format: present.format,
rtv_handle: Some(present.view.as_raw()),
dsv_handle: None,
srv_handle: None,
uav_handle: None,
rodsv_handle: None,
owned: false,
},
};
Ok((swapchain_image, None))
}
}
#[derive(Debug, Clone, Copy)]
pub struct QueueFamily;
impl queue::QueueFamily for QueueFamily {
fn queue_type(&self) -> queue::QueueType {
queue::QueueType::General
}
fn max_queues(&self) -> usize {
1
}
fn id(&self) -> queue::QueueFamilyId {
queue::QueueFamilyId(0)
}
}
#[derive(Clone)]
pub struct CommandQueue {
context: ComPtr<d3d11::ID3D11DeviceContext>,
}
impl fmt::Debug for CommandQueue {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("CommandQueue")
}
}
unsafe impl Send for CommandQueue {}
unsafe impl Sync for CommandQueue {}
impl queue::CommandQueue<Backend> for CommandQueue {
unsafe fn submit<'a, T, Ic, S, Iw, Is>(
&mut self,
submission: queue::Submission<Ic, Iw, Is>,
fence: Option<&Fence>,
) where
T: 'a + Borrow<CommandBuffer>,
Ic: IntoIterator<Item = &'a T>,
S: 'a + Borrow<Semaphore>,
Iw: IntoIterator<Item = (&'a S, pso::PipelineStage)>,
Is: IntoIterator<Item = &'a S>,
{
let _scope = debug_scope!(&self.context, "Submit(fence={:?})", fence);
for cmd_buf in submission.command_buffers {
let cmd_buf = cmd_buf.borrow();
let _scope = debug_scope!(
&self.context,
"CommandBuffer ({}/{})",
cmd_buf.flush_coherent_memory.len(),
cmd_buf.invalidate_coherent_memory.len()
);
{
let _scope = debug_scope!(&self.context, "Pre-Exec: Flush");
for sync in &cmd_buf.flush_coherent_memory {
sync.do_flush(&self.context);
}
}
self.context
.ExecuteCommandList(cmd_buf.as_raw_list().as_raw(), FALSE);
{
let _scope = debug_scope!(&self.context, "Post-Exec: Invalidate");
for sync in &cmd_buf.invalidate_coherent_memory {
sync.do_invalidate(&self.context);
}
}
}
if let Some(fence) = fence {
*fence.mutex.lock() = true;
fence.condvar.notify_all();
}
}
unsafe fn present(
&mut self,
surface: &mut Surface,
_image: SwapchainImage,
_wait_semaphore: Option<&Semaphore>,
) -> Result<Option<window::Suboptimal>, window::PresentError> {
let mut presentation = surface.presentation.as_mut().unwrap();
let (interval, flags) = match presentation.mode {
window::PresentMode::IMMEDIATE => (0, 0),
//Note: this ends up not presenting anything for some reason
//window::PresentMode::MAILBOX if !presentation.is_init => (1, DXGI_PRESENT_DO_NOT_SEQUENCE),
window::PresentMode::FIFO => (1, 0),
_ => (0, 0),
};
presentation.is_init = false;
presentation.swapchain.Present(interval, flags);
Ok(None)
}
fn wait_idle(&self) -> Result<(), hal::device::OutOfMemory> {
// unimplemented!()
Ok(())
}
}
#[derive(Debug)]
pub struct AttachmentClear {
subpass_id: Option<pass::SubpassId>,
attachment_id: usize,
raw: command::AttachmentClear,
}
#[derive(Debug)]
pub struct RenderPassCache {
pub render_pass: RenderPass,
pub framebuffer: Framebuffer,
pub attachment_clear_values: Vec<AttachmentClear>,
pub target_rect: pso::Rect,
pub current_subpass: pass::SubpassId,
}
impl RenderPassCache {
pub fn start_subpass(
&mut self,
internal: &internal::Internal,
context: &ComPtr<d3d11::ID3D11DeviceContext>,
cache: &mut CommandBufferState,
) {
let attachments = self
.attachment_clear_values
.iter()
.filter(|clear| clear.subpass_id == Some(self.current_subpass))
.map(|clear| clear.raw);
cache.dirty_flag.insert(
DirtyStateFlag::GRAPHICS_PIPELINE
| DirtyStateFlag::DEPTH_STENCIL_STATE
| DirtyStateFlag::PIPELINE_PS
| DirtyStateFlag::VIEWPORTS
| DirtyStateFlag::RENDER_TARGETS_AND_UAVS,
);
internal.clear_attachments(
context,
attachments,
&[pso::ClearRect {
rect: self.target_rect,
layers: 0..1,
}],
&self,
);
let subpass = &self.render_pass.subpasses[self.current_subpass as usize];
let color_views = subpass
.color_attachments
.iter()
.map(|&(id, _)| {
self.framebuffer.attachments[id]
.rtv_handle
.unwrap()
})
.collect::<Vec<_>>();
let (ds_view, rods_view) = match subpass.depth_stencil_attachment {
Some((id, _)) => {
let attachment = &self.framebuffer.attachments[id];
let ds_view = attachment
.dsv_handle
.unwrap();
let rods_view = attachment
.rodsv_handle
.unwrap();
(Some(ds_view), Some(rods_view))
},
None => (None, None),
};
cache.set_render_targets(&color_views, ds_view, rods_view);
cache.bind(context);
}
fn resolve_msaa(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
let subpass: &SubpassDesc = &self.render_pass.subpasses[self.current_subpass as usize];
for (&(color_id, _), &(resolve_id, _)) in subpass.color_attachments.iter().zip(subpass.resolve_attachments.iter()) {
if color_id == pass::ATTACHMENT_UNUSED || resolve_id == pass::ATTACHMENT_UNUSED {
continue;
}
let color_framebuffer = &self.framebuffer.attachments[color_id];
let resolve_framebuffer = &self.framebuffer.attachments[resolve_id];
let mut color_resource: *mut d3d11::ID3D11Resource = ptr::null_mut();
let mut resolve_resource: *mut d3d11::ID3D11Resource = ptr::null_mut();
unsafe {
(&*color_framebuffer.rtv_handle.expect("Framebuffer must have COLOR_ATTACHMENT usage")).GetResource(&mut color_resource as *mut *mut _);
(&*resolve_framebuffer.rtv_handle.expect("Resolve texture must have COLOR_ATTACHMENT usage")).GetResource(&mut resolve_resource as *mut *mut _);
context.ResolveSubresource(
resolve_resource,
resolve_framebuffer.subresource,
color_resource,
color_framebuffer.subresource,
conv::map_format(color_framebuffer.format).unwrap()
);
(&*color_resource).Release();
(&*resolve_resource).Release();
}
}
}
pub fn next_subpass(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
self.resolve_msaa(context);
self.current_subpass += 1;
}
}
bitflags! {
struct DirtyStateFlag : u32 {
const RENDER_TARGETS_AND_UAVS = (1 << 1);
const VERTEX_BUFFERS = (1 << 2);
const GRAPHICS_PIPELINE = (1 << 3);
const PIPELINE_GS = (1 << 4);
const PIPELINE_HS = (1 << 5);
const PIPELINE_DS = (1 << 6);
const PIPELINE_PS = (1 << 7);
const VIEWPORTS = (1 << 8);
const BLEND_STATE = (1 << 9);
const DEPTH_STENCIL_STATE = (1 << 10);
}
}
pub struct CommandBufferState {
dirty_flag: DirtyStateFlag,
render_target_len: u32,
render_targets: [*mut d3d11::ID3D11RenderTargetView; 8],
uav_len: u32,
uavs: [*mut d3d11::ID3D11UnorderedAccessView; d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT as _],
depth_target: Option<*mut d3d11::ID3D11DepthStencilView>,
readonly_depth_target: Option<*mut d3d11::ID3D11DepthStencilView>,
depth_target_read_only: bool,
graphics_pipeline: Option<GraphicsPipeline>,
// a bitmask that keeps track of what vertex buffer bindings have been "bound" into
// our vec
bound_bindings: u32,
// a bitmask that hold the required binding slots to be bound for the currently
// bound pipeline
required_bindings: Option<u32>,
// the highest binding number in currently bound pipeline
max_bindings: Option<u32>,
viewports: Vec<d3d11::D3D11_VIEWPORT>,
vertex_buffers: Vec<*mut d3d11::ID3D11Buffer>,
vertex_offsets: Vec<u32>,
vertex_strides: Vec<u32>,
blend_factor: Option<[f32; 4]>,
// we can only support one face (rather, both faces must have the same value)
stencil_ref: Option<pso::StencilValue>,
stencil_read_mask: Option<pso::StencilValue>,
stencil_write_mask: Option<pso::StencilValue>,
current_blend: Option<*mut d3d11::ID3D11BlendState>,
}
impl fmt::Debug for CommandBufferState {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("CommandBufferState")
}
}
impl CommandBufferState {
fn new() -> Self {
CommandBufferState {
dirty_flag: DirtyStateFlag::empty(),
render_target_len: 0,
render_targets: [ptr::null_mut(); 8],
uav_len: 0,
uavs: [ptr::null_mut(); 8],
depth_target: None,
readonly_depth_target: None,
depth_target_read_only: false,
graphics_pipeline: None,
bound_bindings: 0,
required_bindings: None,
max_bindings: None,
viewports: Vec::new(),
vertex_buffers: Vec::new(),
vertex_offsets: Vec::new(),
vertex_strides: Vec::new(),
blend_factor: None,
stencil_ref: None,
stencil_read_mask: None,
stencil_write_mask: None,
current_blend: None,
}
}
fn clear(&mut self) {
self.render_target_len = 0;
self.uav_len = 0;
self.depth_target = None;
self.readonly_depth_target = None;
self.depth_target_read_only = false;
self.graphics_pipeline = None;
self.bound_bindings = 0;
self.required_bindings = None;
self.max_bindings = None;
self.viewports.clear();
self.vertex_buffers.clear();
self.vertex_offsets.clear();
self.vertex_strides.clear();
self.blend_factor = None;
self.stencil_ref = None;
self.stencil_read_mask = None;
self.stencil_write_mask = None;
self.current_blend = None;
}
pub fn set_vertex_buffer(
&mut self,
index: usize,
offset: u32,
buffer: *mut d3d11::ID3D11Buffer,
) {
self.bound_bindings |= 1 << index as u32;
if index >= self.vertex_buffers.len() {
self.vertex_buffers.push(buffer);
self.vertex_offsets.push(offset);
} else {
self.vertex_buffers[index] = buffer;
self.vertex_offsets[index] = offset;
}
self.dirty_flag.insert(DirtyStateFlag::VERTEX_BUFFERS);
}
pub fn bind_vertex_buffers(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
if !self.dirty_flag.contains(DirtyStateFlag::VERTEX_BUFFERS) {
return;
}
if let Some(binding_count) = self.max_bindings {
if self.vertex_buffers.len() >= binding_count as usize
&& self.vertex_strides.len() >= binding_count as usize
{
unsafe {
context.IASetVertexBuffers(
0,
binding_count,
self.vertex_buffers.as_ptr(),
self.vertex_strides.as_ptr(),
self.vertex_offsets.as_ptr(),
);
}
self.dirty_flag.remove(DirtyStateFlag::VERTEX_BUFFERS);
}
}
}
pub fn set_viewports(&mut self, viewports: &[d3d11::D3D11_VIEWPORT]) {
self.viewports.clear();
self.viewports.extend(viewports);
self.dirty_flag.insert(DirtyStateFlag::VIEWPORTS);
}
pub fn bind_viewports(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
if !self.dirty_flag.contains(DirtyStateFlag::VIEWPORTS) {
return;
}
if let Some(ref pipeline) = self.graphics_pipeline {
if let Some(ref viewport) = pipeline.baked_states.viewport {
unsafe {
context.RSSetViewports(1, [conv::map_viewport(&viewport)].as_ptr());
}
} else {
unsafe {
context.RSSetViewports(self.viewports.len() as u32, self.viewports.as_ptr());
}
}
} else {
unsafe {
context.RSSetViewports(self.viewports.len() as u32, self.viewports.as_ptr());
}
}
self.dirty_flag.remove(DirtyStateFlag::VIEWPORTS);
}
pub fn set_render_targets(
&mut self,
render_targets: &[*mut d3d11::ID3D11RenderTargetView],
depth_target: Option<*mut d3d11::ID3D11DepthStencilView>,
readonly_depth_target: Option<*mut d3d11::ID3D11DepthStencilView>
) {
for (idx, &rt) in render_targets.iter().enumerate() {
self.render_targets[idx] = rt;
}
self.render_target_len = render_targets.len() as u32;
self.depth_target = depth_target;
self.readonly_depth_target = readonly_depth_target;
self.dirty_flag.insert(DirtyStateFlag::RENDER_TARGETS_AND_UAVS);
}
pub fn bind_render_targets(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
if !self.dirty_flag.contains(DirtyStateFlag::RENDER_TARGETS_AND_UAVS) {
return;
}
let depth_target = if self.depth_target_read_only {
self.readonly_depth_target
} else {
self.depth_target
}.unwrap_or(ptr::null_mut());
let uav_start_index = d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT - self.uav_len;
unsafe {
if self.uav_len > 0 {
context.OMSetRenderTargetsAndUnorderedAccessViews(
self.render_target_len,
self.render_targets.as_ptr(),
depth_target,
uav_start_index,
self.uav_len,
&self.uavs[uav_start_index as usize] as *const *mut _,
ptr::null(),
)
} else {
context.OMSetRenderTargets(
self.render_target_len,
self.render_targets.as_ptr(),
depth_target,
)
};
}
self.dirty_flag.remove(DirtyStateFlag::RENDER_TARGETS_AND_UAVS);
}
pub fn set_blend_factor(&mut self, factor: [f32; 4]) {
self.blend_factor = Some(factor);
self.dirty_flag.insert(DirtyStateFlag::BLEND_STATE);
}
pub fn bind_blend_state(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
if let Some(blend) = self.current_blend {
let blend_color = if let Some(ref pipeline) = self.graphics_pipeline {
pipeline
.baked_states
.blend_color
.or(self.blend_factor)
.unwrap_or([0f32; 4])
} else {
self.blend_factor.unwrap_or([0f32; 4])
};
// TODO: MSAA
unsafe {
context.OMSetBlendState(blend, &blend_color, !0);
}
self.dirty_flag.remove(DirtyStateFlag::BLEND_STATE);
}
}
pub fn set_stencil_ref(&mut self, value: pso::StencilValue) {
self.stencil_ref = Some(value);
self.dirty_flag.insert(DirtyStateFlag::DEPTH_STENCIL_STATE);
}
pub fn bind_depth_stencil_state(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
if !self.dirty_flag.contains(DirtyStateFlag::DEPTH_STENCIL_STATE) {
return;
}
let pipeline = match self.graphics_pipeline {
Some(ref pipeline) => pipeline,
None => return,
};
if let Some(ref state) = pipeline.depth_stencil_state {
let stencil_ref = state.stencil_ref.static_or(self.stencil_ref.unwrap_or(0));
unsafe {
context.OMSetDepthStencilState(state.raw.as_raw(), stencil_ref);
}
}
self.dirty_flag.remove(DirtyStateFlag::DEPTH_STENCIL_STATE)
}
pub fn set_graphics_pipeline(&mut self, pipeline: GraphicsPipeline) {
let prev = self.graphics_pipeline.take();
let mut prev_has_ps = false;
let mut prev_has_gs = false;
let mut prev_has_ds = false;
let mut prev_has_hs = false;
if let Some(p) = prev {
prev_has_ps = p.ps.is_some();
prev_has_gs = p.gs.is_some();
prev_has_ds = p.ds.is_some();
prev_has_hs = p.hs.is_some();
}
if prev_has_ps || pipeline.ps.is_some() {
self.dirty_flag.insert(DirtyStateFlag::PIPELINE_PS);
}
if prev_has_gs || pipeline.gs.is_some() {
self.dirty_flag.insert(DirtyStateFlag::PIPELINE_GS);
}
if prev_has_ds || pipeline.ds.is_some() {
self.dirty_flag.insert(DirtyStateFlag::PIPELINE_DS);
}
if prev_has_hs || pipeline.hs.is_some() {
self.dirty_flag.insert(DirtyStateFlag::PIPELINE_HS);
}
// If we don't have depth stencil state, we use the old value, so we don't bother changing anything.
let depth_target_read_only =
pipeline
.depth_stencil_state
.as_ref()
.map_or(self.depth_target_read_only, |ds| ds.read_only);
if self.depth_target_read_only != depth_target_read_only {
self.depth_target_read_only = depth_target_read_only;
self.dirty_flag.insert(DirtyStateFlag::RENDER_TARGETS_AND_UAVS);
}
self.dirty_flag.insert(DirtyStateFlag::GRAPHICS_PIPELINE | DirtyStateFlag::DEPTH_STENCIL_STATE);
self.graphics_pipeline = Some(pipeline);
}
pub fn bind_graphics_pipeline(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
if !self.dirty_flag.contains(DirtyStateFlag::GRAPHICS_PIPELINE) {
return;
}
if let Some(ref pipeline) = self.graphics_pipeline {
self.vertex_strides.clear();
self.vertex_strides.extend(&pipeline.strides);
self.required_bindings = Some(pipeline.required_bindings);
self.max_bindings = Some(pipeline.max_vertex_bindings);
};
self.bind_vertex_buffers(context);
if let Some(ref pipeline) = self.graphics_pipeline {
unsafe {
context.IASetPrimitiveTopology(pipeline.topology);
context.IASetInputLayout(pipeline.input_layout.as_raw());
context.VSSetShader(pipeline.vs.as_raw(), ptr::null_mut(), 0);
if self.dirty_flag.contains(DirtyStateFlag::PIPELINE_PS) {
let ps = pipeline.ps.as_ref().map_or(ptr::null_mut(), |ps| ps.as_raw());
context.PSSetShader(ps, ptr::null_mut(), 0);
self.dirty_flag.remove(DirtyStateFlag::PIPELINE_PS)
}
if self.dirty_flag.contains(DirtyStateFlag::PIPELINE_GS) {
let gs = pipeline.gs.as_ref().map_or(ptr::null_mut(), |gs| gs.as_raw());
context.GSSetShader(gs, ptr::null_mut(), 0);
self.dirty_flag.remove(DirtyStateFlag::PIPELINE_GS)
}
if self.dirty_flag.contains(DirtyStateFlag::PIPELINE_HS) {
let hs = pipeline.hs.as_ref().map_or(ptr::null_mut(), |hs| hs.as_raw());
context.HSSetShader(hs, ptr::null_mut(), 0);
self.dirty_flag.remove(DirtyStateFlag::PIPELINE_HS)
}
if self.dirty_flag.contains(DirtyStateFlag::PIPELINE_DS) {
let ds = pipeline.ds.as_ref().map_or(ptr::null_mut(), |ds| ds.as_raw());
context.DSSetShader(ds, ptr::null_mut(), 0);
self.dirty_flag.remove(DirtyStateFlag::PIPELINE_DS)
}
context.RSSetState(pipeline.rasterizer_state.as_raw());
if let Some(ref viewport) = pipeline.baked_states.viewport {
context.RSSetViewports(1, [conv::map_viewport(&viewport)].as_ptr());
}
if let Some(ref scissor) = pipeline.baked_states.scissor {
context.RSSetScissorRects(1, [conv::map_rect(&scissor)].as_ptr());
}
self.current_blend = Some(pipeline.blend_state.as_raw());
}
};
self.bind_blend_state(context);
self.bind_depth_stencil_state(context);
self.dirty_flag.remove(DirtyStateFlag::GRAPHICS_PIPELINE);
}
pub fn bind(&mut self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
self.bind_render_targets(context);
self.bind_graphics_pipeline(context);
self.bind_vertex_buffers(context);
self.bind_viewports(context);
}
}
type PerConstantBufferVec<T> = ArrayVec<[T; d3d11::D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT as _]>;
fn generate_graphics_dynamic_constant_buffer_offsets<'a>(
bindings: impl IntoIterator<Item = &'a pso::DescriptorSetLayoutBinding>,
offset_iter: &mut impl Iterator<Item = u32>,
context1_some: bool,
) -> (PerConstantBufferVec<UINT>, PerConstantBufferVec<UINT>) {
let mut vs_offsets = ArrayVec::new();
let mut fs_offsets = ArrayVec::new();
let mut exists_dynamic_constant_buffer = false;
for binding in bindings.into_iter() {
match binding.ty {
pso::DescriptorType::Buffer {
format: pso::BufferDescriptorFormat::Structured {
dynamic_offset: true
},
ty: pso::BufferDescriptorType::Uniform,
} => {
let offset = offset_iter.next().unwrap();
if binding.stage_flags.contains(pso::ShaderStageFlags::VERTEX) {
vs_offsets.push(offset / 16)
};
if binding.stage_flags.contains(pso::ShaderStageFlags::FRAGMENT) {
fs_offsets.push(offset / 16)
};
exists_dynamic_constant_buffer = true;
}
pso::DescriptorType::Buffer {
format: pso::BufferDescriptorFormat::Structured {
dynamic_offset: false
},
ty: pso::BufferDescriptorType::Uniform,
} => {
if binding.stage_flags.contains(pso::ShaderStageFlags::VERTEX) {
vs_offsets.push(0)
};
if binding.stage_flags.contains(pso::ShaderStageFlags::FRAGMENT) {
fs_offsets.push(0)
};
}
pso::DescriptorType::Buffer {
ty: pso::BufferDescriptorType::Storage { .. },
format: pso::BufferDescriptorFormat::Structured {
dynamic_offset: true
},
} => {
// TODO: Storage buffer offsets require new buffer views with correct sizes.
// Might also require D3D11_BUFFEREX_SRV to act like RBA is happening.
let _ = offset_iter.next().unwrap();
warn!("Dynamic offsets into storage buffers are currently unsupported on DX11.");
}
_ => {}
}
}
if exists_dynamic_constant_buffer && !context1_some {
warn!("D3D11.1 runtime required for dynamic offsets into constant buffers. Offsets will be ignored.");
}
(vs_offsets, fs_offsets)
}
fn generate_compute_dynamic_constant_buffer_offsets<'a>(
bindings: impl IntoIterator<Item = &'a pso::DescriptorSetLayoutBinding>,
offset_iter: &mut impl Iterator<Item = u32>,
context1_some: bool,
) -> PerConstantBufferVec<UINT> {
let mut cs_offsets = ArrayVec::new();
let mut exists_dynamic_constant_buffer = false;
for binding in bindings.into_iter() {
match binding.ty {
pso::DescriptorType::Buffer {
format: pso::BufferDescriptorFormat::Structured {
dynamic_offset: true
},
ty: pso::BufferDescriptorType::Uniform,
} => {
let offset = offset_iter.next().unwrap();
if binding.stage_flags.contains(pso::ShaderStageFlags::COMPUTE) {
cs_offsets.push(offset / 16)
};
exists_dynamic_constant_buffer = true;
}
pso::DescriptorType::Buffer {
format: pso::BufferDescriptorFormat::Structured {
dynamic_offset: false
},
ty: pso::BufferDescriptorType::Uniform,
} => {
if binding.stage_flags.contains(pso::ShaderStageFlags::COMPUTE) {
cs_offsets.push(0)
};
}
pso::DescriptorType::Buffer {
ty: pso::BufferDescriptorType::Storage { .. },
format: pso::BufferDescriptorFormat::Structured {
dynamic_offset: true
},
} => {
// TODO: Storage buffer offsets require new buffer views with correct sizes.
// Might also require D3D11_BUFFEREX_SRV to act like RBA is happening.
let _ = offset_iter.next().unwrap();
warn!("Dynamic offsets into storage buffers are currently unsupported on DX11.");
}
_ => {}
}
}
if exists_dynamic_constant_buffer && !context1_some {
warn!("D3D11.1 runtime required for dynamic offsets into constant buffers. Offsets will be ignored.");
}
cs_offsets
}
pub struct CommandBuffer {
internal: Arc<internal::Internal>,
context: ComPtr<d3d11::ID3D11DeviceContext>,
context1: Option<ComPtr<d3d11_1::ID3D11DeviceContext1>>,
list: RefCell<Option<ComPtr<d3d11::ID3D11CommandList>>>,
// since coherent memory needs to be synchronized at submission, we need to gather up all
// coherent resources that are used in the command buffer and flush/invalidate them accordingly
// before executing.
flush_coherent_memory: Vec<MemoryFlush>,
invalidate_coherent_memory: Vec<MemoryInvalidate>,
// holds information about the active render pass
render_pass_cache: Option<RenderPassCache>,
// Have to update entire push constant buffer at once, keep whole buffer data local.
push_constant_data: [u32; MAX_PUSH_CONSTANT_SIZE / 4],
push_constant_buffer: ComPtr<d3d11::ID3D11Buffer>,
cache: CommandBufferState,
one_time_submit: bool,
debug_name: Option<String>,
debug_scopes: Vec<Option<debug::DebugScope>>,
}
impl fmt::Debug for CommandBuffer {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("CommandBuffer")
}
}
unsafe impl Send for CommandBuffer {}
unsafe impl Sync for CommandBuffer {}
impl CommandBuffer {
fn create_deferred(
device: &d3d11::ID3D11Device,
device1: Option<&d3d11_1::ID3D11Device1>,
internal: Arc<internal::Internal>,
) -> Self {
let (context, context1) = if let Some(device1) = device1 {
let mut context1: *mut d3d11_1::ID3D11DeviceContext1 = ptr::null_mut();
let hr =
unsafe { device1.CreateDeferredContext1(0, &mut context1 as *mut *mut _) };
assert_eq!(hr, winerror::S_OK);
let context1 = unsafe { ComPtr::from_raw(context1) };
let context = context1.cast::<d3d11::ID3D11DeviceContext>().unwrap();
(context, Some(context1))
} else {
let mut context: *mut d3d11::ID3D11DeviceContext = ptr::null_mut();
let hr =
unsafe { device.CreateDeferredContext(0, &mut context as *mut *mut _) };
assert_eq!(hr, winerror::S_OK);
let context = unsafe { ComPtr::from_raw(context) };
(context, None)
};
let push_constant_buffer = {
let desc = d3d11::D3D11_BUFFER_DESC {
ByteWidth: MAX_PUSH_CONSTANT_SIZE as _,
Usage: d3d11::D3D11_USAGE_DEFAULT,
BindFlags: d3d11::D3D11_BIND_CONSTANT_BUFFER,
CPUAccessFlags: 0,
MiscFlags: 0,
StructureByteStride: 0,
};
let mut buffer: *mut d3d11::ID3D11Buffer = ptr::null_mut();
let hr = unsafe {
device.CreateBuffer(&desc as *const _, ptr::null_mut(), &mut buffer as *mut _)
};
assert_eq!(hr, winerror::S_OK);
unsafe { ComPtr::from_raw(buffer) }
};
let push_constant_data = [0_u32; 64];
CommandBuffer {
internal,
context,
context1,
list: RefCell::new(None),
flush_coherent_memory: Vec::new(),
invalidate_coherent_memory: Vec::new(),
render_pass_cache: None,
push_constant_data,
push_constant_buffer,
cache: CommandBufferState::new(),
one_time_submit: false,
debug_name: None,
debug_scopes: Vec::new(),
}
}
fn as_raw_list(&self) -> ComPtr<d3d11::ID3D11CommandList> {
if self.one_time_submit {
self.list.replace(None).unwrap()
} else {
self.list.borrow().clone().unwrap()
}
}
fn defer_coherent_flush(&mut self, buffer: &Buffer) {
if !self
.flush_coherent_memory
.iter()
.any(|m| m.buffer == buffer.internal.raw)
{
self.flush_coherent_memory.push(MemoryFlush {
host_memory: buffer.memory_ptr,
sync_range: SyncRange::Whole,
buffer: buffer.internal.raw,
});
}
}
fn defer_coherent_invalidate(&mut self, buffer: &Buffer) {
if !self
.invalidate_coherent_memory
.iter()
.any(|m| m.buffer == buffer.internal.raw)
{
self.invalidate_coherent_memory.push(MemoryInvalidate {
working_buffer: Some(self.internal.working_buffer.clone()),
working_buffer_size: self.internal.working_buffer_size,
host_memory: buffer.memory_ptr,
host_sync_range: buffer.bound_range.clone(),
buffer_sync_range: buffer.bound_range.clone(),
buffer: buffer.internal.raw,
});
}
}
fn reset(&mut self) {
self.flush_coherent_memory.clear();
self.invalidate_coherent_memory.clear();
self.render_pass_cache = None;
self.cache.clear();
self.debug_scopes.clear();
}
}
impl command::CommandBuffer<Backend> for CommandBuffer {
unsafe fn begin(
&mut self,
flags: command::CommandBufferFlags,
_info: command::CommandBufferInheritanceInfo<Backend>,
) {
self.one_time_submit = flags.contains(command::CommandBufferFlags::ONE_TIME_SUBMIT);
self.reset();
// Push constants are at the top register to allow them to be bound only once.
let raw_push_constant_buffer = self.push_constant_buffer.as_raw();
self.context.VSSetConstantBuffers(
d3d11::D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT - 1,
1,
&raw_push_constant_buffer as *const _
);
self.context.PSSetConstantBuffers(
d3d11::D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT - 1,
1,
&raw_push_constant_buffer as *const _
);
self.context.CSSetConstantBuffers(
d3d11::D3D11_COMMONSHADER_CONSTANT_BUFFER_API_SLOT_COUNT - 1,
1,
&raw_push_constant_buffer as *const _
);
}
unsafe fn finish(&mut self) {
let mut list: *mut d3d11::ID3D11CommandList = ptr::null_mut();
let hr = self
.context
.FinishCommandList(FALSE, &mut list as *mut *mut _);
assert_eq!(hr, winerror::S_OK);
if let Some(ref name) = self.debug_name {
set_debug_name(&*list, name);
}
self.list.replace(Some(ComPtr::from_raw(list)));
}
unsafe fn reset(&mut self, _release_resources: bool) {
self.reset();
}
unsafe fn begin_render_pass<T>(
&mut self,
render_pass: &RenderPass,
framebuffer: &Framebuffer,
target_rect: pso::Rect,
clear_values: T,
_first_subpass: command::SubpassContents,
) where
T: IntoIterator,
T::Item: Borrow<command::ClearValue>,
{
use pass::AttachmentLoadOp as Alo;
let mut clear_iter = clear_values.into_iter();
let mut attachment_clears = Vec::new();
for (idx, attachment) in render_pass.attachments.iter().enumerate() {
//let attachment = render_pass.attachments[attachment_ref];
let format = attachment.format.unwrap();
let subpass_id = render_pass
.subpasses
.iter()
.position(|sp| sp.is_using(idx))
.map(|i| i as pass::SubpassId);
if attachment.has_clears() {
let value = *clear_iter.next().unwrap().borrow();
match (attachment.ops.load, attachment.stencil_ops.load) {
(Alo::Clear, Alo::Clear) if format.is_depth() => {
attachment_clears.push(AttachmentClear {
subpass_id,
attachment_id: idx,
raw: command::AttachmentClear::DepthStencil {
depth: Some(value.depth_stencil.depth),
stencil: Some(value.depth_stencil.stencil),
},
});
}
(Alo::Clear, Alo::Clear) => {
attachment_clears.push(AttachmentClear {
subpass_id,
attachment_id: idx,
raw: command::AttachmentClear::Color {
index: idx,
value: value.color,
},
});
attachment_clears.push(AttachmentClear {
subpass_id,
attachment_id: idx,
raw: command::AttachmentClear::DepthStencil {
depth: None,
stencil: Some(value.depth_stencil.stencil),
},
});
}
(Alo::Clear, _) if format.is_depth() => {
attachment_clears.push(AttachmentClear {
subpass_id,
attachment_id: idx,
raw: command::AttachmentClear::DepthStencil {
depth: Some(value.depth_stencil.depth),
stencil: None,
},
});
}
(Alo::Clear, _) => {
attachment_clears.push(AttachmentClear {
subpass_id,
attachment_id: idx,
raw: command::AttachmentClear::Color {
index: idx,
value: value.color,
},
});
}
(_, Alo::Clear) => {
attachment_clears.push(AttachmentClear {
subpass_id,
attachment_id: idx,
raw: command::AttachmentClear::DepthStencil {
depth: None,
stencil: Some(value.depth_stencil.stencil),
},
});
}
_ => {}
}
}
}
self.render_pass_cache = Some(RenderPassCache {
render_pass: render_pass.clone(),
framebuffer: framebuffer.clone(),
attachment_clear_values: attachment_clears,
target_rect,
current_subpass: 0,
});
if let Some(ref mut current_render_pass) = self.render_pass_cache {
current_render_pass.start_subpass(&self.internal, &self.context, &mut self.cache);
}
}
unsafe fn next_subpass(&mut self, _contents: command::SubpassContents) {
if let Some(ref mut current_render_pass) = self.render_pass_cache {
current_render_pass.next_subpass(&self.context);
current_render_pass.start_subpass(&self.internal, &self.context, &mut self.cache);
}
}
unsafe fn end_render_pass(&mut self) {
if let Some(ref mut current_render_pass) = self.render_pass_cache {
current_render_pass.resolve_msaa(&self.context);
}
self.context
.OMSetRenderTargets(8, [ptr::null_mut(); 8].as_ptr(), ptr::null_mut());
self.render_pass_cache = None;
}
unsafe fn pipeline_barrier<'a, T>(
&mut self,
_stages: Range<pso::PipelineStage>,
_dependencies: memory::Dependencies,
_barriers: T,
) where
T: IntoIterator,
T::Item: Borrow<memory::Barrier<'a, Backend>>,
{
// TODO: should we track and assert on resource states?
// unimplemented!()
}
unsafe fn clear_image<T>(
&mut self,
image: &Image,
_: image::Layout,
value: command::ClearValue,
subresource_ranges: T,
) where
T: IntoIterator,
T::Item: Borrow<image::SubresourceRange>,
{
for range in subresource_ranges {
let range = range.borrow();
let num_levels = range.resolve_level_count(image.mip_levels);
let num_layers = range.resolve_layer_count(image.kind.num_layers());
let mut depth_stencil_flags = 0;
if range.aspects.contains(format::Aspects::DEPTH) {
depth_stencil_flags |= d3d11::D3D11_CLEAR_DEPTH;
}
if range.aspects.contains(format::Aspects::STENCIL) {
depth_stencil_flags |= d3d11::D3D11_CLEAR_STENCIL;
}
// TODO: clear Int/Uint depending on format
for rel_layer in 0..num_layers {
for rel_level in 0..num_levels {
let level = range.level_start + rel_level;
let layer = range.layer_start + rel_layer;
if range.aspects.contains(format::Aspects::COLOR) {
self.context.ClearRenderTargetView(
image.get_rtv(level, layer).unwrap().as_raw(),
&value.color.float32,
);
} else {
self.context.ClearDepthStencilView(
image.get_dsv(level, layer).unwrap().as_raw(),
depth_stencil_flags,
value.depth_stencil.depth,
value.depth_stencil.stencil as _,
);
}
}
}
}
}
unsafe fn clear_attachments<T, U>(&mut self, clears: T, rects: U)
where
T: IntoIterator,
T::Item: Borrow<command::AttachmentClear>,
U: IntoIterator,
U::Item: Borrow<pso::ClearRect>,
{
if let Some(ref pass) = self.render_pass_cache {
self.cache.dirty_flag.insert(
DirtyStateFlag::GRAPHICS_PIPELINE
| DirtyStateFlag::DEPTH_STENCIL_STATE
| DirtyStateFlag::PIPELINE_PS
| DirtyStateFlag::VIEWPORTS
| DirtyStateFlag::RENDER_TARGETS_AND_UAVS,
);
self.internal
.clear_attachments(&self.context, clears, rects, pass);
self.cache.bind(&self.context);
} else {
panic!("`clear_attachments` can only be called inside a renderpass")
}
}
unsafe fn resolve_image<T>(
&mut self,
_src: &Image,
_src_layout: image::Layout,
_dst: &Image,
_dst_layout: image::Layout,
_regions: T,
) where
T: IntoIterator,
T::Item: Borrow<command::ImageResolve>,
{
unimplemented!()
}
unsafe fn blit_image<T>(
&mut self,
src: &Image,
_src_layout: image::Layout,
dst: &Image,
_dst_layout: image::Layout,
filter: image::Filter,
regions: T,
) where
T: IntoIterator,
T::Item: Borrow<command::ImageBlit>,
{
self.cache
.dirty_flag
.insert(DirtyStateFlag::GRAPHICS_PIPELINE | DirtyStateFlag::PIPELINE_PS);
self.internal
.blit_2d_image(&self.context, src, dst, filter, regions);
self.cache.bind(&self.context);
}
unsafe fn bind_index_buffer(&mut self, ibv: buffer::IndexBufferView<Backend>) {
self.context.IASetIndexBuffer(
ibv.buffer.internal.raw,
conv::map_index_type(ibv.index_type),
ibv.range.offset as u32,
);
}
unsafe fn bind_vertex_buffers<I, T>(&mut self, first_binding: pso::BufferIndex, buffers: I)
where
I: IntoIterator<Item = (T, buffer::SubRange)>,
T: Borrow<Buffer>,
{
for (i, (buf, sub)) in buffers.into_iter().enumerate() {
let idx = i + first_binding as usize;
let buf = buf.borrow();
if buf.is_coherent {
self.defer_coherent_flush(buf);
}
self.cache
.set_vertex_buffer(idx, sub.offset as u32, buf.internal.raw);
}
self.cache.bind_vertex_buffers(&self.context);
}
unsafe fn set_viewports<T>(&mut self, _first_viewport: u32, viewports: T)
where
T: IntoIterator,
T::Item: Borrow<pso::Viewport>,
{
let viewports = viewports
.into_iter()
.map(|v| {
let v = v.borrow();
conv::map_viewport(v)
})
.collect::<Vec<_>>();
// TODO: DX only lets us set all VPs at once, so cache in slice?
self.cache.set_viewports(&viewports);
self.cache.bind_viewports(&self.context);
}
unsafe fn set_scissors<T>(&mut self, _first_scissor: u32, scissors: T)
where
T: IntoIterator,
T::Item: Borrow<pso::Rect>,
{
let scissors = scissors
.into_iter()
.map(|s| {
let s = s.borrow();
conv::map_rect(s)
})
.collect::<Vec<_>>();
// TODO: same as for viewports
self.context
.RSSetScissorRects(scissors.len() as _, scissors.as_ptr());
}
unsafe fn set_blend_constants(&mut self, color: pso::ColorValue) {
self.cache.set_blend_factor(color);
self.cache.bind_blend_state(&self.context);
}
unsafe fn set_stencil_reference(&mut self, _faces: pso::Face, value: pso::StencilValue) {
self.cache.set_stencil_ref(value);
self.cache.bind_depth_stencil_state(&self.context);
}
unsafe fn set_stencil_read_mask(&mut self, _faces: pso::Face, value: pso::StencilValue) {
self.cache.stencil_read_mask = Some(value);
}
unsafe fn set_stencil_write_mask(&mut self, _faces: pso::Face, value: pso::StencilValue) {
self.cache.stencil_write_mask = Some(value);
}
unsafe fn set_depth_bounds(&mut self, _bounds: Range<f32>) {
unimplemented!()
}
unsafe fn set_line_width(&mut self, width: f32) {
validate_line_width(width);
}
unsafe fn set_depth_bias(&mut self, _depth_bias: pso::DepthBias) {
// TODO:
// unimplemented!()
}
unsafe fn bind_graphics_pipeline(&mut self, pipeline: &GraphicsPipeline) {
self.cache.set_graphics_pipeline(pipeline.clone());
self.cache.bind(&self.context);
}
unsafe fn bind_graphics_descriptor_sets<'a, I, J>(
&mut self,
layout: &PipelineLayout,
first_set: usize,
sets: I,
offsets: J,
) where
I: IntoIterator,
I::Item: Borrow<DescriptorSet>,
J: IntoIterator,
J::Item: Borrow<command::DescriptorSetOffset>,
{
let _scope = debug_scope!(&self.context, "BindGraphicsDescriptorSets");
// TODO: find a better solution to invalidating old bindings..
let nulls = [ptr::null_mut(); d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT as usize];
self.context.CSSetUnorderedAccessViews(
0,
d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT,
nulls.as_ptr(),
ptr::null_mut(),
);
let mut offset_iter = offsets.into_iter().map(|o: J::Item| *o.borrow());
for (set, info) in sets.into_iter().zip(&layout.sets[first_set..]) {
let set: &DescriptorSet = set.borrow();
{
let coherent_buffers = set.coherent_buffers.lock();
for sync in coherent_buffers.flush_coherent_buffers.borrow().iter() {
// TODO: merge sync range if a flush already exists
if !self
.flush_coherent_memory
.iter()
.any(|m| m.buffer == sync.device_buffer)
{
self.flush_coherent_memory.push(MemoryFlush {
host_memory: sync.host_ptr,
sync_range: sync.range.clone(),
buffer: sync.device_buffer,
});
}
}
for sync in coherent_buffers.invalidate_coherent_buffers.borrow().iter() {
if !self
.invalidate_coherent_memory
.iter()
.any(|m| m.buffer == sync.device_buffer)
{
self.invalidate_coherent_memory.push(MemoryInvalidate {
working_buffer: Some(self.internal.working_buffer.clone()),
working_buffer_size: self.internal.working_buffer_size,
host_memory: sync.host_ptr,
host_sync_range: sync.range.clone(),
buffer_sync_range: sync.range.clone(),
buffer: sync.device_buffer,
});
}
}
}
let (vs_offsets, fs_offsets) = generate_graphics_dynamic_constant_buffer_offsets(
&*set.layout.bindings,
&mut offset_iter,
self.context1.is_some()
);
if let Some(rd) = info.registers.vs.c.as_some() {
let start_slot = rd.res_index as u32;
let num_buffers = rd.count as u32;
let constant_buffers = set.handles.offset(rd.pool_offset as isize);
if let Some(ref context1) = self.context1 {
// This call with offsets won't work right with command list emulation
// unless we reset the first and last constant buffers to null.
if self.internal.command_list_emulation {
let null_cbuf = [ptr::null_mut::<d3d11::ID3D11Buffer>()];
context1.VSSetConstantBuffers(start_slot, 1, &null_cbuf as *const _);
if num_buffers > 1 {
context1.VSSetConstantBuffers(start_slot + num_buffers - 1, 1, &null_cbuf as *const _);
}
}
// TODO: This should be the actual buffer length for RBA purposes,
// but that information isn't easily accessible here.
context1.VSSetConstantBuffers1(
start_slot,
num_buffers,
constant_buffers as *const *mut _,
vs_offsets.as_ptr(),
self.internal.constant_buffer_count_buffer.as_ptr(),
);
} else {
self.context.VSSetConstantBuffers(
start_slot,
num_buffers,
constant_buffers as *const *mut _
);
}
}
if let Some(rd) = info.registers.vs.t.as_some() {
self.context.VSSetShaderResources(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
);
}
if let Some(rd) = info.registers.vs.s.as_some() {
self.context.VSSetSamplers(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
);
}
if let Some(rd) = info.registers.ps.c.as_some() {
let start_slot = rd.res_index as u32;
let num_buffers = rd.count as u32;
let constant_buffers = set.handles.offset(rd.pool_offset as isize);
if let Some(ref context1) = self.context1 {
// This call with offsets won't work right with command list emulation
// unless we reset the first and last constant buffers to null.
if self.internal.command_list_emulation {
let null_cbuf = [ptr::null_mut::<d3d11::ID3D11Buffer>()];
context1.PSSetConstantBuffers(start_slot, 1, &null_cbuf as *const _);
if num_buffers > 1 {
context1.PSSetConstantBuffers(start_slot + num_buffers - 1, 1, &null_cbuf as *const _);
}
}
context1.PSSetConstantBuffers1(
start_slot,
num_buffers,
constant_buffers as *const *mut _,
fs_offsets.as_ptr(),
self.internal.constant_buffer_count_buffer.as_ptr(),
);
} else {
self.context.PSSetConstantBuffers(
start_slot,
num_buffers,
constant_buffers as *const *mut _
);
}
}
if let Some(rd) = info.registers.ps.t.as_some() {
self.context.PSSetShaderResources(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
);
}
if let Some(rd) = info.registers.ps.s.as_some() {
self.context.PSSetSamplers(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
);
}
// UAVs going to the graphics pipeline are always treated as pixel shader bindings.
if let Some(rd) = info.registers.ps.u.as_some() {
// We bind UAVs in inverse order from the top to prevent invalidation
// when the render target count changes.
for idx in (0..(rd.count)).rev() {
let ptr = (*set.handles.offset(rd.pool_offset as isize + idx as isize)).0;
let uav_register = d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT - 1 - rd.res_index as u32 - idx as u32;
self.cache.uavs[uav_register as usize] = ptr as *mut _;
}
self.cache.uav_len = (rd.res_index + rd.count) as u32;
self.cache.dirty_flag.insert(DirtyStateFlag::RENDER_TARGETS_AND_UAVS);
}
}
self.cache.bind_render_targets(&self.context);
}
unsafe fn bind_compute_pipeline(&mut self, pipeline: &ComputePipeline) {
self.context
.CSSetShader(pipeline.cs.as_raw(), ptr::null_mut(), 0);
}
unsafe fn bind_compute_descriptor_sets<I, J>(
&mut self,
layout: &PipelineLayout,
first_set: usize,
sets: I,
offsets: J,
) where
I: IntoIterator,
I::Item: Borrow<DescriptorSet>,
J: IntoIterator,
J::Item: Borrow<command::DescriptorSetOffset>,
{
let _scope = debug_scope!(&self.context, "BindComputeDescriptorSets");
let nulls = [ptr::null_mut(); d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT as usize];
self.context.CSSetUnorderedAccessViews(
0,
d3d11::D3D11_PS_CS_UAV_REGISTER_COUNT,
nulls.as_ptr(),
ptr::null_mut(),
);
let mut offset_iter = offsets.into_iter().map(|o: J::Item| *o.borrow());
for (set, info) in sets.into_iter().zip(&layout.sets[first_set..]) {
let set: &DescriptorSet = set.borrow();
{
let coherent_buffers = set.coherent_buffers.lock();
for sync in coherent_buffers.flush_coherent_buffers.borrow().iter() {
if !self
.flush_coherent_memory
.iter()
.any(|m| m.buffer == sync.device_buffer)
{
self.flush_coherent_memory.push(MemoryFlush {
host_memory: sync.host_ptr,
sync_range: sync.range.clone(),
buffer: sync.device_buffer,
});
}
}
for sync in coherent_buffers.invalidate_coherent_buffers.borrow().iter() {
if !self
.invalidate_coherent_memory
.iter()
.any(|m| m.buffer == sync.device_buffer)
{
self.invalidate_coherent_memory.push(MemoryInvalidate {
working_buffer: Some(self.internal.working_buffer.clone()),
working_buffer_size: self.internal.working_buffer_size,
host_memory: sync.host_ptr,
host_sync_range: sync.range.clone(),
buffer_sync_range: sync.range.clone(),
buffer: sync.device_buffer,
});
}
}
}
let cs_offsets = generate_compute_dynamic_constant_buffer_offsets(
&*set.layout.bindings,
&mut offset_iter,
self.context1.is_some()
);
if let Some(rd) = info.registers.cs.c.as_some() {
let start_slot = rd.res_index as u32;
let num_buffers = rd.count as u32;
let constant_buffers = set.handles.offset(rd.pool_offset as isize);
if let Some(ref context1) = self.context1 {
// This call with offsets won't work right with command list emulation
// unless we reset the first and last constant buffers to null.
if self.internal.command_list_emulation {
let null_cbuf = [ptr::null_mut::<d3d11::ID3D11Buffer>()];
context1.CSSetConstantBuffers(start_slot, 1, &null_cbuf as *const _);
if num_buffers > 1 {
context1.CSSetConstantBuffers(start_slot + num_buffers - 1, 1, &null_cbuf as *const _);
}
}
// TODO: This should be the actual buffer length for RBA purposes,
// but that information isn't easily accessible here.
context1.CSSetConstantBuffers1(
start_slot,
num_buffers,
constant_buffers as *const *mut _,
cs_offsets.as_ptr(),
self.internal.constant_buffer_count_buffer.as_ptr(),
);
} else {
self.context.CSSetConstantBuffers(
start_slot,
num_buffers,
constant_buffers as *const *mut _
);
}
}
if let Some(rd) = info.registers.cs.t.as_some() {
self.context.CSSetShaderResources(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
);
}
if let Some(rd) = info.registers.cs.u.as_some() {
self.context.CSSetUnorderedAccessViews(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
ptr::null_mut(),
);
}
if let Some(rd) = info.registers.cs.s.as_some() {
self.context.CSSetSamplers(
rd.res_index as u32,
rd.count as u32,
set.handles.offset(rd.pool_offset as isize) as *const *mut _,
);
}
}
}
unsafe fn dispatch(&mut self, count: WorkGroupCount) {
self.context.Dispatch(count[0], count[1], count[2]);
}
unsafe fn dispatch_indirect(&mut self, _buffer: &Buffer, _offset: buffer::Offset) {
unimplemented!()
}
unsafe fn fill_buffer(&mut self, _buffer: &Buffer, _sub: buffer::SubRange, _data: u32) {
unimplemented!()
}
unsafe fn update_buffer(&mut self, _buffer: &Buffer, _offset: buffer::Offset, _data: &[u8]) {
unimplemented!()
}
unsafe fn copy_buffer<T>(&mut self, src: &Buffer, dst: &Buffer, regions: T)
where
T: IntoIterator,
T::Item: Borrow<command::BufferCopy>,
{
if src.is_coherent {
self.defer_coherent_flush(src);
}
for region in regions.into_iter() {
let info = region.borrow();
let dst_box = d3d11::D3D11_BOX {
left: info.src as _,
top: 0,
front: 0,
right: (info.src + info.size) as _,
bottom: 1,
back: 1,
};
self.context.CopySubresourceRegion(
dst.internal.raw as _,
0,
info.dst as _,
0,
0,
src.internal.raw as _,
0,
&dst_box,
);
if let Some(disjoint_cb) = dst.internal.disjoint_cb {
self.context.CopySubresourceRegion(
disjoint_cb as _,
0,
info.dst as _,
0,
0,
src.internal.raw as _,
0,
&dst_box,
);
}
}
}
unsafe fn copy_image<T>(
&mut self,
src: &Image,
_: image::Layout,
dst: &Image,
_: image::Layout,
regions: T,
) where
T: IntoIterator,
T::Item: Borrow<command::ImageCopy>,
{
self.internal
.copy_image_2d(&self.context, src, dst, regions);
}
unsafe fn copy_buffer_to_image<T>(
&mut self,
buffer: &Buffer,
image: &Image,
_: image::Layout,
regions: T,
) where
T: IntoIterator,
T::Item: Borrow<command::BufferImageCopy>,
{
if buffer.is_coherent {
self.defer_coherent_flush(buffer);
}
self.internal
.copy_buffer_to_image(&self.context, buffer, image, regions);
}
unsafe fn copy_image_to_buffer<T>(
&mut self,
image: &Image,
_: image::Layout,
buffer: &Buffer,
regions: T,
) where
T: IntoIterator,
T::Item: Borrow<command::BufferImageCopy>,
{
if buffer.is_coherent {
self.defer_coherent_invalidate(buffer);
}
self.internal
.copy_image_to_buffer(&self.context, image, buffer, regions);
}
unsafe fn draw(&mut self, vertices: Range<VertexCount>, instances: Range<InstanceCount>) {
self.context.DrawInstanced(
vertices.end - vertices.start,
instances.end - instances.start,
vertices.start,
instances.start,
);
}
unsafe fn draw_indexed(
&mut self,
indices: Range<IndexCount>,
base_vertex: VertexOffset,
instances: Range<InstanceCount>,
) {
self.context.DrawIndexedInstanced(
indices.end - indices.start,
instances.end - instances.start,
indices.start,
base_vertex,
instances.start,
);
}
unsafe fn draw_indirect(
&mut self,
buffer: &Buffer,
offset: buffer::Offset,
draw_count: DrawCount,
_stride: u32,
) {
assert_eq!(draw_count, 1, "DX11 doesn't support MULTI_DRAW_INDIRECT");
self.context.DrawInstancedIndirect(
buffer.internal.raw,
offset as _,
);
}
unsafe fn draw_indexed_indirect(
&mut self,
buffer: &Buffer,
offset: buffer::Offset,
draw_count: DrawCount,
_stride: u32,
) {
assert_eq!(draw_count, 1, "DX11 doesn't support MULTI_DRAW_INDIRECT");
self.context.DrawIndexedInstancedIndirect(
buffer.internal.raw,
offset as _,
);
}
unsafe fn draw_indirect_count(
&mut self,
_buffer: &Buffer,
_offset: buffer::Offset,
_count_buffer: &Buffer,
_count_buffer_offset: buffer::Offset,
_max_draw_count: u32,
_stride: u32,
) {
panic!("DX11 doesn't support DRAW_INDIRECT_COUNT")
}
unsafe fn draw_indexed_indirect_count(
&mut self,
_buffer: &Buffer,
_offset: buffer::Offset,
_count_buffer: &Buffer,
_count_buffer_offset: buffer::Offset,
_max_draw_count: u32,
_stride: u32,
) {
panic!("DX11 doesn't support DRAW_INDIRECT_COUNT")
}
unsafe fn draw_mesh_tasks(&mut self, _: TaskCount, _: TaskCount) {
panic!("DX11 doesn't support MESH_SHADERS")
}
unsafe fn draw_mesh_tasks_indirect(
&mut self,
_: &Buffer,
_: buffer::Offset,
_: hal::DrawCount,
_: u32,
) {
panic!("DX11 doesn't support MESH_SHADERS")
}
unsafe fn draw_mesh_tasks_indirect_count(
&mut self,
_: &Buffer,
_: buffer::Offset,
_: &Buffer,
_: buffer::Offset,
_: hal::DrawCount,
_: u32,
) {
panic!("DX11 doesn't support MESH_SHADERS")
}
unsafe fn set_event(&mut self, _: &(), _: pso::PipelineStage) {
unimplemented!()
}
unsafe fn reset_event(&mut self, _: &(), _: pso::PipelineStage) {
unimplemented!()
}
unsafe fn wait_events<'a, I, J>(&mut self, _: I, _: Range<pso::PipelineStage>, _: J)
where
I: IntoIterator,
I::Item: Borrow<()>,
J: IntoIterator,
J::Item: Borrow<memory::Barrier<'a, Backend>>,
{
unimplemented!()
}
unsafe fn begin_query(&mut self, _query: query::Query<Backend>, _flags: query::ControlFlags) {
unimplemented!()
}
unsafe fn end_query(&mut self, _query: query::Query<Backend>) {
unimplemented!()
}
unsafe fn reset_query_pool(&mut self, _pool: &QueryPool, _queries: Range<query::Id>) {
unimplemented!()
}
unsafe fn copy_query_pool_results(
&mut self,
_pool: &QueryPool,
_queries: Range<query::Id>,
_buffer: &Buffer,
_offset: buffer::Offset,
_stride: buffer::Offset,
_flags: query::ResultFlags,
) {
unimplemented!()
}
unsafe fn write_timestamp(&mut self, _: pso::PipelineStage, _query: query::Query<Backend>) {
unimplemented!()
}
unsafe fn push_graphics_constants(
&mut self,
_layout: &PipelineLayout,
_stages: pso::ShaderStageFlags,
offset: u32,
constants: &[u32],
) {
let start = (offset / 4) as usize;
let end = start + constants.len();
self.push_constant_data[start..end].copy_from_slice(constants);
self.context.UpdateSubresource(
self.push_constant_buffer.as_raw() as *mut _,
0,
ptr::null(),
self.push_constant_data.as_ptr() as *const _,
MAX_PUSH_CONSTANT_SIZE as _,
1,
);
}
unsafe fn push_compute_constants(
&mut self,
_layout: &PipelineLayout,
offset: u32,
constants: &[u32],
) {
let start = (offset / 4) as usize;
let end = start + constants.len();
self.push_constant_data[start..end].copy_from_slice(constants);
self.context.UpdateSubresource(
self.push_constant_buffer.as_raw() as *mut _,
0,
ptr::null(),
self.push_constant_data.as_ptr() as *const _,
MAX_PUSH_CONSTANT_SIZE as _,
1,
);
}
unsafe fn execute_commands<'a, T, I>(&mut self, _buffers: I)
where
T: 'a + Borrow<CommandBuffer>,
I: IntoIterator<Item = &'a T>,
{
unimplemented!()
}
unsafe fn insert_debug_marker(&mut self, name: &str, _color: u32) {
debug::debug_marker(&self.context, &format!("{}", name))
}
unsafe fn begin_debug_marker(&mut self, _name: &str, _color: u32) {
// TODO: This causes everything after this to be part of this scope, why?
// self.debug_scopes.push(debug::DebugScope::with_name(&self.context, format_args!("{}", name)))
}
unsafe fn end_debug_marker(&mut self) {
// self.debug_scopes.pop();
}
}
#[derive(Clone, Debug)]
enum SyncRange {
Whole,
Partial(Range<u64>),
}
#[derive(Debug)]
pub struct MemoryFlush {
host_memory: *const u8,
sync_range: SyncRange,
buffer: *mut d3d11::ID3D11Buffer,
}
pub struct MemoryInvalidate {
working_buffer: Option<ComPtr<d3d11::ID3D11Buffer>>,
working_buffer_size: u64,
host_memory: *mut u8,
host_sync_range: Range<u64>,
buffer_sync_range: Range<u64>,
buffer: *mut d3d11::ID3D11Buffer,
}
impl fmt::Debug for MemoryInvalidate {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("MemoryInvalidate")
}
}
fn intersection(a: &Range<u64>, b: &Range<u64>) -> Option<Range<u64>> {
let r = a.start.max(b.start)..a.end.min(b.end);
if r.start < r.end {
Some(r)
} else {
None
}
}
impl MemoryFlush {
fn do_flush(&self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
let src = self.host_memory;
debug_marker!(context, "Flush({:?})", self.sync_range);
let region = match self.sync_range {
SyncRange::Partial(ref range) if range.start < range.end => Some(d3d11::D3D11_BOX {
left: range.start as u32,
top: 0,
front: 0,
right: range.end as u32,
bottom: 1,
back: 1,
}),
_ => None,
};
unsafe {
context.UpdateSubresource(
self.buffer as _,
0,
region.as_ref().map_or(ptr::null(), |r| r),
src as _,
0,
0,
);
}
}
}
impl MemoryInvalidate {
fn download(
&self,
context: &ComPtr<d3d11::ID3D11DeviceContext>,
buffer: *mut d3d11::ID3D11Buffer,
host_range: Range<u64>,
buffer_range: Range<u64>
) {
// Range<u64> doesn't impl `len` for some bizzare reason relating to underflow
debug_assert_eq!(host_range.end - host_range.start, buffer_range.end - buffer_range.start);
unsafe {
context.CopySubresourceRegion(
self.working_buffer.clone().unwrap().as_raw() as _,
0,
0,
0,
0,
buffer as _,
0,
&d3d11::D3D11_BOX {
left: buffer_range.start as _,
top: 0,
front: 0,
right: buffer_range.end as _,
bottom: 1,
back: 1,
},
);
// copy over to our vec
let dst = self.host_memory.offset(host_range.start as isize);
let src = self.map(&context);
ptr::copy(src, dst, (host_range.end - host_range.start) as usize);
self.unmap(&context);
}
}
fn do_invalidate(&self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
let stride = self.working_buffer_size;
let len = self.host_sync_range.end - self.host_sync_range.start;
let chunks = len / stride;
let remainder = len % stride;
// we split up the copies into chunks the size of our working buffer
for i in 0..chunks {
let host_offset = self.host_sync_range.start + i * stride;
let host_range = host_offset..(host_offset + stride);
let buffer_offset = self.buffer_sync_range.start + i * stride;
let buffer_range = buffer_offset..(buffer_offset + stride);
self.download(context, self.buffer, host_range, buffer_range);
}
if remainder != 0 {
let host_offset = self.host_sync_range.start + chunks * stride;
let host_range = host_offset..self.host_sync_range.end;
let buffer_offset = self.buffer_sync_range.start + chunks * stride;
let buffer_range = buffer_offset..self.buffer_sync_range.end;
debug_assert!(host_range.end - host_range.start <= stride);
debug_assert!(buffer_range.end - buffer_range.start <= stride);
self.download(context, self.buffer, host_range, buffer_range);
}
}
fn map(&self, context: &ComPtr<d3d11::ID3D11DeviceContext>) -> *mut u8 {
assert_eq!(self.working_buffer.is_some(), true);
unsafe {
let mut map = mem::zeroed();
let hr = context.Map(
self.working_buffer.clone().unwrap().as_raw() as _,
0,
d3d11::D3D11_MAP_READ,
0,
&mut map,
);
assert_eq!(hr, winerror::S_OK);
map.pData as _
}
}
fn unmap(&self, context: &ComPtr<d3d11::ID3D11DeviceContext>) {
unsafe {
context.Unmap(self.working_buffer.clone().unwrap().as_raw() as _, 0);
}
}
}
type LocalResourceArena<T> = thunderdome::Arena<(Range<u64>, T)>;
// Since we dont have any heaps to work with directly, Beverytime we bind a
// buffer/image to memory we allocate a dx11 resource and assign it a range.
//
// `HOST_VISIBLE` memory gets a `Vec<u8>` which covers the entire memory
// range. This forces us to only expose non-coherent memory, as this
// abstraction acts as a "cache" since the "staging buffer" vec is disjoint
// from all the dx11 resources we store in the struct.
pub struct Memory {
properties: memory::Properties,
size: u64,
// pointer to staging memory, if it's HOST_VISIBLE
host_ptr: *mut u8,
// list of all buffers bound to this memory
local_buffers: Arc<RwLock<LocalResourceArena<InternalBuffer>>>,
}
impl fmt::Debug for Memory {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Memory")
}
}
unsafe impl Send for Memory {}
unsafe impl Sync for Memory {}
impl Memory {
pub fn resolve(&self, segment: &memory::Segment) -> Range<u64> {
segment.offset..segment.size.map_or(self.size, |s| segment.offset + s)
}
pub fn bind_buffer(&self, range: Range<u64>, buffer: InternalBuffer) -> thunderdome::Index {
let mut local_buffers = self.local_buffers.write();
local_buffers.insert((range, buffer))
}
pub fn flush(&self, context: &ComPtr<d3d11::ID3D11DeviceContext>, range: Range<u64>) {
use buffer::Usage;
for (_, &(ref buffer_range, ref buffer)) in self.local_buffers.read().iter() {
let range = match intersection(&range, &buffer_range) {
Some(r) => r,
None => continue,
};
// we need to handle 3 cases for updating buffers:
//
// 1. if our buffer was created as a `UNIFORM` buffer *and* other usage flags, we
// also have a disjoint buffer which only has `D3D11_BIND_CONSTANT_BUFFER` due
// to DX11 limitation. we then need to update both the original buffer and the
// disjoint one with the *whole* range
//
// 2. if our buffer was created with *only* `UNIFORM` usage we need to upload
// the whole range
//
// 3. the general case, without any `UNIFORM` usage has no restrictions on
// partial updates, so we upload the specified range
//
if let Some(disjoint) = buffer.disjoint_cb {
MemoryFlush {
host_memory: unsafe { self.host_ptr.offset(buffer_range.start as _) },
sync_range: SyncRange::Whole,
buffer: disjoint,
}
.do_flush(&context);
}
let mem_flush = if buffer.usage == Usage::UNIFORM {
MemoryFlush {
host_memory: unsafe { self.host_ptr.offset(buffer_range.start as _) },
sync_range: SyncRange::Whole,
buffer: buffer.raw,
}
} else {
let local_start = range.start - buffer_range.start;
let local_end = range.end - buffer_range.start;
MemoryFlush {
host_memory: unsafe { self.host_ptr.offset(range.start as _) },
sync_range: SyncRange::Partial(local_start..local_end),
buffer: buffer.raw,
}
};
mem_flush.do_flush(&context)
}
}
pub fn invalidate(
&self,
context: &ComPtr<d3d11::ID3D11DeviceContext>,
range: Range<u64>,
working_buffer: ComPtr<d3d11::ID3D11Buffer>,
working_buffer_size: u64,
) {
for (_, &(ref buffer_range, ref buffer)) in self.local_buffers.read().iter() {
if let Some(range) = intersection(&range, &buffer_range) {
let buffer_start_offset = range.start - buffer_range.start;
let buffer_end_offset = range.end - buffer_range.start;
let buffer_sync_range = buffer_start_offset..buffer_end_offset;
MemoryInvalidate {
working_buffer: Some(working_buffer.clone()),
working_buffer_size,
host_memory: self.host_ptr,
host_sync_range: range.clone(),
buffer_sync_range: buffer_sync_range,
buffer: buffer.raw,
}
.do_invalidate(&context);
}
}
}
}
#[derive(Debug)]
pub struct CommandPool {
device: ComPtr<d3d11::ID3D11Device>,
device1: Option<ComPtr<d3d11_1::ID3D11Device1>>,
internal: Arc<internal::Internal>,
}
unsafe impl Send for CommandPool {}
unsafe impl Sync for CommandPool {}
impl hal::pool::CommandPool<Backend> for CommandPool {
unsafe fn reset(&mut self, _release_resources: bool) {
//unimplemented!()
}
unsafe fn allocate_one(&mut self, _level: command::Level) -> CommandBuffer {
CommandBuffer::create_deferred(&self.device, self.device1.as_deref(), Arc::clone(&self.internal))
}
unsafe fn free<I>(&mut self, _cbufs: I)
where
I: IntoIterator<Item = CommandBuffer>,
{
// TODO:
// unimplemented!()
}
}
/// Similarily to dx12 backend, we can handle either precompiled dxbc or spirv
pub enum ShaderModule {
Dxbc(Vec<u8>),
Spirv(Vec<u32>),
}
// TODO: temporary
impl fmt::Debug for ShaderModule {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "{}", "ShaderModule { ... }")
}
}
unsafe impl Send for ShaderModule {}
unsafe impl Sync for ShaderModule {}
#[derive(Clone, Debug)]
pub struct SubpassDesc {
pub color_attachments: Vec<pass::AttachmentRef>,
pub depth_stencil_attachment: Option<pass::AttachmentRef>,
pub input_attachments: Vec<pass::AttachmentRef>,
pub resolve_attachments: Vec<pass::AttachmentRef>,
}
impl SubpassDesc {
pub(crate) fn is_using(&self, at_id: pass::AttachmentId) -> bool {
self.color_attachments
.iter()
.chain(self.depth_stencil_attachment.iter())
.chain(self.input_attachments.iter())
.chain(self.resolve_attachments.iter())
.any(|&(id, _)| id == at_id)
}
}
#[derive(Clone, Debug)]
pub struct RenderPass {
pub attachments: Vec<pass::Attachment>,
pub subpasses: Vec<SubpassDesc>,
}
#[derive(Clone, Debug)]
pub struct Framebuffer {
attachments: Vec<ImageView>,
layers: image::Layer,
}
#[derive(Clone, Debug)]
pub struct InternalBuffer {
raw: *mut d3d11::ID3D11Buffer,
// TODO: need to sync between `raw` and `disjoint_cb`, same way as we do with
// `MemoryFlush/Invalidate`
disjoint_cb: Option<*mut d3d11::ID3D11Buffer>, // if unbound this buffer might be null.
srv: Option<*mut d3d11::ID3D11ShaderResourceView>,
uav: Option<*mut d3d11::ID3D11UnorderedAccessView>,
usage: buffer::Usage,
debug_name: Option<String>,
}
impl InternalBuffer {
unsafe fn release_resources(&mut self) {
(&*self.raw).Release();
self.raw = ptr::null_mut();
self.disjoint_cb.take().map(|cb| (&*cb).Release());
self.uav.take().map(|uav| (&*uav).Release());
self.srv.take().map(|srv| (&*srv).Release());
self.usage = buffer::Usage::empty();
self.debug_name = None;
}
}
pub struct Buffer {
internal: InternalBuffer,
is_coherent: bool,
memory_ptr: *mut u8, // null if unbound or non-cpu-visible
bound_range: Range<u64>, // 0 if unbound
/// Handle to the Memory arena storing this buffer.
local_memory_arena: Weak<RwLock<LocalResourceArena<InternalBuffer>>>,
/// Index into the above memory arena.
///
/// Once memory is bound to a buffer, this should never be None.
memory_index: Option<thunderdome::Index>,
requirements: memory::Requirements,
bind: d3d11::D3D11_BIND_FLAG,
}
impl fmt::Debug for Buffer {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Buffer")
}
}
unsafe impl Send for Buffer {}
unsafe impl Sync for Buffer {}
#[derive(Debug)]
pub struct BufferView;
pub struct Image {
kind: image::Kind,
usage: image::Usage,
format: format::Format,
view_caps: image::ViewCapabilities,
decomposed_format: conv::DecomposedDxgiFormat,
mip_levels: image::Level,
internal: InternalImage,
bind: d3d11::D3D11_BIND_FLAG,
requirements: memory::Requirements,
}
impl fmt::Debug for Image {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Image")
}
}
pub struct InternalImage {
raw: *mut d3d11::ID3D11Resource,
copy_srv: Option<ComPtr<d3d11::ID3D11ShaderResourceView>>,
srv: Option<ComPtr<d3d11::ID3D11ShaderResourceView>>,
/// Contains UAVs for all subresources
unordered_access_views: Vec<ComPtr<d3d11::ID3D11UnorderedAccessView>>,
/// Contains DSVs for all subresources
depth_stencil_views: Vec<ComPtr<d3d11::ID3D11DepthStencilView>>,
/// Contains RTVs for all subresources
render_target_views: Vec<ComPtr<d3d11::ID3D11RenderTargetView>>,
debug_name: Option<String>
}
impl InternalImage {
unsafe fn release_resources(&mut self) {
(&*self.raw).Release();
self.copy_srv = None;
self.srv = None;
self.unordered_access_views.clear();
self.depth_stencil_views.clear();
self.render_target_views.clear();
}
}
impl fmt::Debug for InternalImage {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("InternalImage")
}
}
unsafe impl Send for Image {}
unsafe impl Sync for Image {}
impl Image {
fn calc_subresource(&self, mip_level: UINT, layer: UINT) -> UINT {
mip_level + (layer * self.mip_levels as UINT)
}
fn get_uav(
&self,
mip_level: image::Level,
_layer: image::Layer,
) -> Option<&ComPtr<d3d11::ID3D11UnorderedAccessView>> {
self.internal
.unordered_access_views
.get(self.calc_subresource(mip_level as _, 0) as usize)
}
fn get_dsv(
&self,
mip_level: image::Level,
layer: image::Layer,
) -> Option<&ComPtr<d3d11::ID3D11DepthStencilView>> {
self.internal
.depth_stencil_views
.get(self.calc_subresource(mip_level as _, layer as _) as usize)
}
fn get_rtv(
&self,
mip_level: image::Level,
layer: image::Layer,
) -> Option<&ComPtr<d3d11::ID3D11RenderTargetView>> {
self.internal
.render_target_views
.get(self.calc_subresource(mip_level as _, layer as _) as usize)
}
}
impl Drop for Image {
fn drop(&mut self) {
unsafe {
(*self.internal.raw).Release();
}
}
}
pub struct ImageView {
subresource: UINT,
format: format::Format,
rtv_handle: Option<*mut d3d11::ID3D11RenderTargetView>,
srv_handle: Option<*mut d3d11::ID3D11ShaderResourceView>,
dsv_handle: Option<*mut d3d11::ID3D11DepthStencilView>,
rodsv_handle: Option<*mut d3d11::ID3D11DepthStencilView>,
uav_handle: Option<*mut d3d11::ID3D11UnorderedAccessView>,
owned: bool,
}
impl Clone for ImageView {
fn clone(&self) -> Self {
Self {
subresource: self.subresource,
format: self.format,
rtv_handle: self.rtv_handle.clone(),
srv_handle: self.srv_handle.clone(),
dsv_handle: self.dsv_handle.clone(),
rodsv_handle: self.rodsv_handle.clone(),
uav_handle: self.uav_handle.clone(),
owned: false
}
}
}
impl Drop for ImageView {
fn drop(&mut self) {
if self.owned {
if let Some(rtv) = self.rtv_handle.take() {
unsafe { (&*rtv).Release() };
}
if let Some(srv) = self.srv_handle.take() {
unsafe { (&*srv).Release() };
}
if let Some(dsv) = self.dsv_handle.take() {
unsafe { (&*dsv).Release() };
}
if let Some(rodsv) = self.rodsv_handle.take() {
unsafe { (&*rodsv).Release() };
}
if let Some(uav) = self.uav_handle.take() {
unsafe { (&*uav).Release() };
}
}
}
}
impl fmt::Debug for ImageView {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("ImageView")
}
}
unsafe impl Send for ImageView {}
unsafe impl Sync for ImageView {}
pub struct Sampler {
sampler_handle: ComPtr<d3d11::ID3D11SamplerState>,
}
impl fmt::Debug for Sampler {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("Sampler")
}
}
unsafe impl Send for Sampler {}
unsafe impl Sync for Sampler {}
pub struct ComputePipeline {
cs: ComPtr<d3d11::ID3D11ComputeShader>,
}
impl fmt::Debug for ComputePipeline {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("ComputePipeline")
}
}
unsafe impl Send for ComputePipeline {}
unsafe impl Sync for ComputePipeline {}
/// NOTE: some objects are hashed internally and reused when created with the
/// same params[0], need to investigate which interfaces this applies
/// to.
///
/// [0]: https://msdn.microsoft.com/en-us/library/windows/desktop/ff476500(v=vs.85).aspx
#[derive(Clone)]
pub struct GraphicsPipeline {
vs: ComPtr<d3d11::ID3D11VertexShader>,
gs: Option<ComPtr<d3d11::ID3D11GeometryShader>>,
hs: Option<ComPtr<d3d11::ID3D11HullShader>>,
ds: Option<ComPtr<d3d11::ID3D11DomainShader>>,
ps: Option<ComPtr<d3d11::ID3D11PixelShader>>,
topology: d3d11::D3D11_PRIMITIVE_TOPOLOGY,
input_layout: ComPtr<d3d11::ID3D11InputLayout>,
rasterizer_state: ComPtr<d3d11::ID3D11RasterizerState>,
blend_state: ComPtr<d3d11::ID3D11BlendState>,
depth_stencil_state: Option<DepthStencilState>,
baked_states: pso::BakedStates,
required_bindings: u32,
max_vertex_bindings: u32,
strides: Vec<u32>,
}
impl fmt::Debug for GraphicsPipeline {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("GraphicsPipeline")
}
}
unsafe impl Send for GraphicsPipeline {}
unsafe impl Sync for GraphicsPipeline {}
type ResourceIndex = u8;
type DescriptorIndex = u16;
#[derive(Clone, Debug, Default)]
struct RegisterData<T> {
// CBV
c: T,
// SRV
t: T,
// UAV
u: T,
// Sampler
s: T,
}
impl<T> RegisterData<T> {
fn map<U, F: Fn(&T) -> U>(&self, fun: F) -> RegisterData<U> {
RegisterData {
c: fun(&self.c),
t: fun(&self.t),
u: fun(&self.u),
s: fun(&self.s),
}
}
}
impl RegisterData<DescriptorIndex> {
fn add_content_many(&mut self, content: DescriptorContent, many: DescriptorIndex) {
if content.contains(DescriptorContent::CBV) {
self.c += many;
}
if content.contains(DescriptorContent::SRV) {
self.t += many;
}
if content.contains(DescriptorContent::UAV) {
self.u += many;
}
if content.contains(DescriptorContent::SAMPLER) {
self.s += many;
}
}
fn sum(&self) -> DescriptorIndex {
self.c + self.t + self.u + self.s
}
}
#[derive(Clone, Debug, Default)]
struct MultiStageData<T> {
vs: T,
ps: T,
cs: T,
}
impl<T> MultiStageData<T> {
fn select(self, stage: ShaderStage) -> T {
match stage {
ShaderStage::Vertex => self.vs,
ShaderStage::Fragment => self.ps,
ShaderStage::Compute => self.cs,
_ => panic!("Unsupported stage {:?}", stage),
}
}
}
impl<T> MultiStageData<RegisterData<T>> {
fn map_register<U, F: Fn(&T) -> U>(&self, fun: F) -> MultiStageData<RegisterData<U>> {
MultiStageData {
vs: self.vs.map(&fun),
ps: self.ps.map(&fun),
cs: self.cs.map(&fun),
}
}
fn map_other<U, F: Fn(&RegisterData<T>) -> U>(&self, fun: F) -> MultiStageData<U> {
MultiStageData {
vs: fun(&self.vs),
ps: fun(&self.ps),
cs: fun(&self.cs),
}
}
}
impl MultiStageData<RegisterData<DescriptorIndex>> {
fn add_content_many(&mut self, content: DescriptorContent, stages: pso::ShaderStageFlags, count: DescriptorIndex) {
if stages.contains(pso::ShaderStageFlags::VERTEX) {
self.vs.add_content_many(content, count);
}
if stages.contains(pso::ShaderStageFlags::FRAGMENT) {
self.ps.add_content_many(content, count);
}
if stages.contains(pso::ShaderStageFlags::COMPUTE) {
self.cs.add_content_many(content, count);
}
}
fn sum(&self) -> DescriptorIndex {
self.vs.sum() + self.ps.sum() + self.cs.sum()
}
}
#[derive(Clone, Debug, Default)]
struct RegisterPoolMapping {
offset: DescriptorIndex,
count: ResourceIndex,
}
#[derive(Clone, Debug, Default)]
struct RegisterInfo {
res_index: ResourceIndex,
pool_offset: DescriptorIndex,
count: ResourceIndex,
}
impl RegisterInfo {
fn as_some(&self) -> Option<&Self> {
if self.count == 0 {
None
} else {
Some(self)
}
}
}
#[derive(Clone, Debug, Default)]
struct RegisterAccumulator {
res_index: ResourceIndex,
}
impl RegisterAccumulator {
fn to_mapping(&self, cur_offset: &mut DescriptorIndex) -> RegisterPoolMapping {
let offset = *cur_offset;
*cur_offset += self.res_index as DescriptorIndex;
RegisterPoolMapping {
offset,
count: self.res_index,
}
}
fn advance(&mut self, mapping: &RegisterPoolMapping) -> RegisterInfo {
let res_index = self.res_index;
self.res_index += mapping.count;
RegisterInfo {
res_index,
pool_offset: mapping.offset,
count: mapping.count,
}
}
}
impl RegisterData<RegisterAccumulator> {
fn to_mapping(&self, pool_offset: &mut DescriptorIndex) -> RegisterData<RegisterPoolMapping> {
RegisterData {
c: self.c.to_mapping(pool_offset),
t: self.t.to_mapping(pool_offset),
u: self.u.to_mapping(pool_offset),
s: self.s.to_mapping(pool_offset),
}
}
fn advance(
&mut self,
mapping: &RegisterData<RegisterPoolMapping>,
) -> RegisterData<RegisterInfo> {
RegisterData {
c: self.c.advance(&mapping.c),
t: self.t.advance(&mapping.t),
u: self.u.advance(&mapping.u),
s: self.s.advance(&mapping.s),
}
}
}
impl MultiStageData<RegisterData<RegisterAccumulator>> {
fn to_mapping(&self) -> MultiStageData<RegisterData<RegisterPoolMapping>> {
let mut pool_offset = 0;
MultiStageData {
vs: self.vs.to_mapping(&mut pool_offset),
ps: self.ps.to_mapping(&mut pool_offset),
cs: self.cs.to_mapping(&mut pool_offset),
}
}
fn advance(
&mut self,
mapping: &MultiStageData<RegisterData<RegisterPoolMapping>>,
) -> MultiStageData<RegisterData<RegisterInfo>> {
MultiStageData {
vs: self.vs.advance(&mapping.vs),
ps: self.ps.advance(&mapping.ps),
cs: self.cs.advance(&mapping.cs),
}
}
}
#[derive(Clone, Debug)]
struct DescriptorSetInfo {
bindings: Arc<Vec<pso::DescriptorSetLayoutBinding>>,
registers: MultiStageData<RegisterData<RegisterInfo>>,
}
impl DescriptorSetInfo {
fn find_register(
&self,
stage: ShaderStage,
binding_index: pso::DescriptorBinding,
) -> (DescriptorContent, RegisterData<ResourceIndex>) {
let mut res_offsets = self
.registers
.map_register(|info| info.res_index as DescriptorIndex)
.select(stage);
for binding in self.bindings.iter() {
if !binding.stage_flags.contains(stage.to_flag()) {
continue;
}
let content = DescriptorContent::from(binding.ty);
if binding.binding == binding_index {
return (content, res_offsets.map(|offset| *offset as ResourceIndex));
}
res_offsets.add_content_many(content, 1);
}
panic!("Unable to find binding {:?}", binding_index);
}
fn find_uav_register(
&self,
stage: ShaderStage,
binding_index: pso::DescriptorBinding,
) -> (DescriptorContent, RegisterData<ResourceIndex>) {
// Look only where uavs are stored for that stage.
let register_stage = if stage == ShaderStage::Compute {
stage
} else {
ShaderStage::Fragment
};
let mut res_offsets = self
.registers
.map_register(|info| info.res_index as DescriptorIndex)
.select(register_stage);
for binding in self.bindings.iter() {
// We don't care what stage they're in, only if they are UAVs or not.
let content = DescriptorContent::from(binding.ty);
if !content.contains(DescriptorContent::UAV) {
continue;
}
if binding.binding == binding_index {
return (content, res_offsets.map(|offset| *offset as ResourceIndex));
}
res_offsets.add_content_many(content, 1);
}
panic!("Unable to find binding {:?}", binding_index);
}
}
/// The pipeline layout holds optimized (less api calls) ranges of objects for all descriptor sets
/// belonging to the pipeline object.
#[derive(Debug)]
pub struct PipelineLayout {
sets: Vec<DescriptorSetInfo>,
}
/// The descriptor set layout contains mappings from a given binding to the offset in our
/// descriptor pool storage and what type of descriptor it is (combined image sampler takes up two
/// handles).
#[derive(Debug)]
pub struct DescriptorSetLayout {
bindings: Arc<Vec<pso::DescriptorSetLayoutBinding>>,
pool_mapping: MultiStageData<RegisterData<RegisterPoolMapping>>,
}
#[derive(Debug)]
struct CoherentBufferFlushRange {
device_buffer: *mut d3d11::ID3D11Buffer,
host_ptr: *mut u8,
range: SyncRange,
}
#[derive(Debug)]
struct CoherentBufferInvalidateRange {
device_buffer: *mut d3d11::ID3D11Buffer,
host_ptr: *mut u8,
range: Range<u64>,
}
#[derive(Debug)]
struct CoherentBuffers {
// descriptor set writes containing coherent resources go into these vecs and are added to the
// command buffers own Vec on binding the set.
flush_coherent_buffers: RefCell<Vec<CoherentBufferFlushRange>>,
invalidate_coherent_buffers: RefCell<Vec<CoherentBufferInvalidateRange>>,
}
impl CoherentBuffers {
fn _add_flush(&self, old: *mut d3d11::ID3D11Buffer, buffer: &Buffer) {
let new = buffer.internal.raw;
if old != new {
let mut buffers = self.flush_coherent_buffers.borrow_mut();
let pos = buffers.iter().position(|sync| old == sync.device_buffer);
let sync_range = CoherentBufferFlushRange {
device_buffer: new,
host_ptr: buffer.memory_ptr,
range: SyncRange::Whole,
};
if let Some(pos) = pos {
buffers[pos] = sync_range;
} else {
buffers.push(sync_range);
}
if let Some(disjoint) = buffer.internal.disjoint_cb {
let pos = buffers
.iter()
.position(|sync| disjoint == sync.device_buffer);
let sync_range = CoherentBufferFlushRange {
device_buffer: disjoint,
host_ptr: buffer.memory_ptr,
range: SyncRange::Whole,
};
if let Some(pos) = pos {
buffers[pos] = sync_range;
} else {
buffers.push(sync_range);
}
}
}
}
fn _add_invalidate(&self, old: *mut d3d11::ID3D11Buffer, buffer: &Buffer) {
let new = buffer.internal.raw;
if old != new {
let mut buffers = self.invalidate_coherent_buffers.borrow_mut();
let pos = buffers.iter().position(|sync| old == sync.device_buffer);
let sync_range = CoherentBufferInvalidateRange {
device_buffer: new,
host_ptr: buffer.memory_ptr,
range: buffer.bound_range.clone(),
};
if let Some(pos) = pos {
buffers[pos] = sync_range;
} else {
buffers.push(sync_range);
}
}
}
}
/// Newtype around a common interface that all bindable resources inherit from.
#[derive(Debug, Copy, Clone)]
#[repr(transparent)]
struct Descriptor(*mut d3d11::ID3D11DeviceChild);
bitflags! {
/// A set of D3D11 descriptor types that need to be associated
/// with a single gfx-hal `DescriptorType`.
#[derive(Default)]
pub struct DescriptorContent: u8 {
const CBV = 0x1;
const SRV = 0x2;
const UAV = 0x4;
const SAMPLER = 0x8;
/// Indicates if the descriptor is a dynamic uniform/storage buffer.
/// Important as dynamic buffers are implemented as root descriptors.
const DYNAMIC = 0x10;
}
}
impl From<pso::DescriptorType> for DescriptorContent {
fn from(ty: pso::DescriptorType) -> Self {
use hal::pso::{
BufferDescriptorFormat as Bdf, BufferDescriptorType as Bdt, DescriptorType as Dt,
ImageDescriptorType as Idt,
};
match ty {
Dt::Sampler => DescriptorContent::SAMPLER,
Dt::Image {
ty: Idt::Sampled { with_sampler: true },
} => DescriptorContent::SRV | DescriptorContent::SAMPLER,
Dt::Image {
ty: Idt::Sampled {
with_sampler: false,
},
}
| Dt::InputAttachment => DescriptorContent::SRV,
Dt::Image {
ty: Idt::Storage { .. },
} => DescriptorContent::UAV,
Dt::Buffer {
ty: Bdt::Uniform,
format:
Bdf::Structured {
dynamic_offset: true,
},
} => DescriptorContent::CBV | DescriptorContent::DYNAMIC,
Dt::Buffer {
ty: Bdt::Uniform, ..
} => DescriptorContent::CBV,
Dt::Buffer {
ty: Bdt::Storage { read_only: true },
format:
Bdf::Structured {
dynamic_offset: true,
},
} => DescriptorContent::SRV | DescriptorContent::DYNAMIC,
Dt::Buffer {
ty: Bdt::Storage { read_only: false },
format:
Bdf::Structured {
dynamic_offset: true,
},
} => DescriptorContent::UAV | DescriptorContent::DYNAMIC,
Dt::Buffer {
ty: Bdt::Storage { read_only: true },
..
} => DescriptorContent::SRV,
Dt::Buffer {
ty: Bdt::Storage { read_only: false },
..
} => DescriptorContent::UAV,
}
}
}
pub struct DescriptorSet {
offset: DescriptorIndex,
len: DescriptorIndex,
handles: *mut Descriptor,
coherent_buffers: Mutex<CoherentBuffers>,
layout: DescriptorSetLayout,
}
impl fmt::Debug for DescriptorSet {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt.write_str("DescriptorSet")
}
}
unsafe impl Send for DescriptorSet {}
unsafe impl Sync for DescriptorSet {}
impl DescriptorSet {
fn _add_flush(&self, old: *mut d3d11::ID3D11Buffer, buffer: &Buffer) {
let new = buffer.internal.raw;
if old != new {
self.coherent_buffers.lock()._add_flush(old, buffer);
}
}
fn _add_invalidate(&self, old: *mut d3d11::ID3D11Buffer, buffer: &Buffer) {
let new = buffer.internal.raw;
if old != new {
self.coherent_buffers.lock()._add_invalidate(old, buffer);
}
}
unsafe fn assign(&self, offset: DescriptorIndex, value: *mut d3d11::ID3D11DeviceChild) {
*self.handles.offset(offset as isize) = Descriptor(value);
}
unsafe fn assign_stages(
&self,
offsets: &MultiStageData<DescriptorIndex>,
stages: pso::ShaderStageFlags,
value: *mut d3d11::ID3D11DeviceChild,
) {
if stages.contains(pso::ShaderStageFlags::VERTEX) {
self.assign(offsets.vs, value);
}
if stages.contains(pso::ShaderStageFlags::FRAGMENT) {
self.assign(offsets.ps, value);
}
if stages.contains(pso::ShaderStageFlags::COMPUTE) {
self.assign(offsets.cs, value);
}
}
}
#[derive(Debug)]
pub struct DescriptorPool {
handles: Vec<Descriptor>,
allocator: RangeAllocator<DescriptorIndex>,
}
unsafe impl Send for DescriptorPool {}
unsafe impl Sync for DescriptorPool {}
impl DescriptorPool {
fn with_capacity(size: DescriptorIndex) -> Self {
DescriptorPool {
handles: vec![Descriptor(ptr::null_mut()); size as usize],
allocator: RangeAllocator::new(0..size),
}
}
}
impl pso::DescriptorPool<Backend> for DescriptorPool {
unsafe fn allocate_set(
&mut self,
layout: &DescriptorSetLayout,
) -> Result<DescriptorSet, pso::AllocationError> {
let len = layout
.pool_mapping
.map_register(|mapping| mapping.count as DescriptorIndex)
.sum()
.max(1);
self.allocator
.allocate_range(len)
.map(|range| {
for handle in &mut self.handles[range.start as usize..range.end as usize] {
*handle = Descriptor(ptr::null_mut());
}
DescriptorSet {
offset: range.start,
len,
handles: self.handles.as_mut_ptr().offset(range.start as _),
coherent_buffers: Mutex::new(CoherentBuffers {
flush_coherent_buffers: RefCell::new(Vec::new()),
invalidate_coherent_buffers: RefCell::new(Vec::new()),
}),
layout: DescriptorSetLayout {
bindings: Arc::clone(&layout.bindings),
pool_mapping: layout.pool_mapping.clone(),
},
}
})
.map_err(|_| pso::AllocationError::OutOfPoolMemory)
}
unsafe fn free<I>(&mut self, descriptor_sets: I)
where
I: IntoIterator<Item = DescriptorSet>,
{
for set in descriptor_sets {
self.allocator
.free_range(set.offset..(set.offset + set.len))
}
}
unsafe fn reset(&mut self) {
self.allocator.reset();
}
}
#[derive(Debug)]
pub struct RawFence {
mutex: Mutex<bool>,
condvar: Condvar,
}
pub type Fence = Arc<RawFence>;
#[derive(Debug)]
pub struct Semaphore;
#[derive(Debug)]
pub struct QueryPool;
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)]
pub enum Backend {}
impl hal::Backend for Backend {
type Instance = Instance;
type PhysicalDevice = PhysicalDevice;
type Device = device::Device;
type Surface = Surface;
type QueueFamily = QueueFamily;
type CommandQueue = CommandQueue;
type CommandBuffer = CommandBuffer;
type Memory = Memory;
type CommandPool = CommandPool;
type ShaderModule = ShaderModule;
type RenderPass = RenderPass;
type Framebuffer = Framebuffer;
type Buffer = Buffer;
type BufferView = BufferView;
type Image = Image;
type ImageView = ImageView;
type Sampler = Sampler;
type ComputePipeline = ComputePipeline;
type GraphicsPipeline = GraphicsPipeline;
type PipelineLayout = PipelineLayout;
type PipelineCache = ();
type DescriptorSetLayout = DescriptorSetLayout;
type DescriptorPool = DescriptorPool;
type DescriptorSet = DescriptorSet;
type Fence = Fence;
type Semaphore = Semaphore;
type Event = ();
type QueryPool = QueryPool;
}
fn validate_line_width(width: f32) {
// Note from the Vulkan spec:
// > If the wide lines feature is not enabled, lineWidth must be 1.0
// Simply assert and no-op because DX11 never exposes `Features::LINE_WIDTH`
assert_eq!(width, 1.0);
}
| 34.106413 | 160 | 0.55965 |
763ef5bdb11e484c71dc53cbd95f9e52972fb313 | 3,218 | use super::ast::*;
use nom::{
branch::alt,
bytes::complete::tag,
character::complete::{line_ending, not_line_ending},
combinator::{complete, cut, map},
error::context,
multi::separated_list,
sequence::delimited,
sequence::{preceded, terminated},
};
use std::num::NonZeroU16;
mod asm;
pub use asm::*;
mod utils;
pub use utils::*;
mod values;
pub use values::*;
mod playcmd;
pub use playcmd::*;
pub type ParseError<'a> = nom::error::VerboseError<&'a str>;
pub type ParseResult<'a, T> = nom::IResult<&'a str, T, ParseError<'a>>;
pub fn parse_file(input: &str) -> ParseResult<Vec<LangItem>> {
let (input, _) = multispace0(input)?;
let (input, res) = context(
"File root command parser",
separated_list(multispace1, parse_expr),
)(input)?;
let (input, _) = complete(multispace0)(input)?;
Ok((input, res))
}
pub fn parse_expr(input: &str) -> ParseResult<LangItem> {
context(
"Songlang Expression",
alt((
parse_loop,
map(parse_pressline, LangItem::NotePress),
map(parse_asm_command, LangItem::Asm),
)),
)(input)
}
pub fn parse_block(input: &str) -> ParseResult<Vec<LangItem>> {
let lines_parser = context(
"Sub-block lines parser",
separated_list(multispace1, parse_expr),
);
delimited(
terminated(tag("{"), multispace0),
lines_parser,
preceded(multispace0, tag("}")),
)(input)
}
pub fn parse_loop(input: &str) -> ParseResult<LangItem> {
let (input, _) = tag("loop")(input)?;
let loopcount_parser = |input| {
let (input, _) = space1(input)?;
let (input, res) = nonzerou16(input)?;
let (input, _) = space0(input)?;
Ok((input, Some(res)))
};
let nocount_parser = |input| {
let (input, _) = space0(input)?;
Ok((input, None))
};
let (input, loopcount): (_, Option<NonZeroU16>) =
alt((loopcount_parser, nocount_parser))(input)?;
let (input, body) = parse_block(input)?;
let res = LangItem::Loop {
expr: body,
repititions: loopcount,
};
Ok((input, res))
}
pub fn parse_comment_inline(input: &str) -> ParseResult<()> {
let body_parser = |inp: &str| {
let endparser = alt((eof, tag("*/"), line_ending));
let mut idx = 0;
loop {
let (head, tail) = input.split_at(idx);
if endparser(tail).is_ok() {
return Ok((tail, head));
}
idx += 1;
while !inp.is_char_boundary(idx) {
idx += 1;
}
}
};
let parser = delimited(tag("/*"), body_parser, cut(tag("*/")));
context("CommentInline", map(parser, |_| ()))(input)
}
pub fn parse_comment_fullline(input: &str) -> ParseResult<()> {
let line_beginning = alt((tag("#"), tag("//")));
let parser = delimited(line_beginning, not_line_ending, alt((line_ending, eof)));
let (input, _) = context("CommentFullline", parser)(input)?;
Ok((input, ()))
}
pub fn parse_comment(input: &str) -> ParseResult<()> {
context(
"CommentAny",
alt((parse_comment_inline, parse_comment_fullline)),
)(input)
}
| 26.816667 | 85 | 0.575202 |
5dfeac5960078f399e1301143c724a0ea73e6135 | 4,704 | //! This is used in tests (both unit tests and integration tests) to provide useful distributions
//! of random numbers.
use rand::distributions::uniform::Uniform;
use rand::distributions::Distribution;
use rand::Rng;
/// Smallest number in our varint encoding that takes the given number of bytes
pub fn smallest_number_in_n_byte_varint(byte_length: usize) -> u64 {
assert!(byte_length <= 9 && byte_length >= 1);
match byte_length {
1 => 0,
// one greater than the largest of the previous length
_ => largest_number_in_n_byte_varint(byte_length - 1) + 1,
}
}
/// Largest number in our varint encoding that takes the given number of bytes
pub fn largest_number_in_n_byte_varint(byte_length: usize) -> u64 {
assert!(byte_length <= 9 && byte_length >= 1);
match byte_length {
9 => u64::max_value(),
_ => largest_number_in_7_bit_chunk(byte_length - 1),
}
}
/// The largest in the set of numbers that have at least 1 bit set in the n'th chunk of 7 bits.
fn largest_number_in_7_bit_chunk(chunk_index: usize) -> u64 {
// Our 9-byte varints do different encoding in the last byte, so we don't handle them here
assert!(chunk_index <= 7);
// 1 in every bit below the lowest bit in this chunk
let lower_bits = match chunk_index {
0 => 0,
_ => largest_number_in_7_bit_chunk(chunk_index - 1),
};
// 1 in every bit in this chunk
let this_chunk = 0x7F_u64 << (chunk_index * 7);
lower_bits | this_chunk
}
// Evenly distributed random numbers end up biased heavily towards longer encoded byte lengths:
// there are a lot more large numbers than there are small (duh), but for exercising serialization
// code paths, we'd like many at all byte lengths. This is also arguably more representative of
// real data. This should emit values whose varint lengths are uniformly distributed across the
// whole length range (1 to 9).
pub struct RandomVarintEncodedLengthIter<R: Rng> {
ranges: [Uniform<u64>; 9],
range_for_picking_range: Uniform<usize>,
rng: R,
}
impl<R: Rng> RandomVarintEncodedLengthIter<R> {
pub fn new(rng: R) -> RandomVarintEncodedLengthIter<R> {
RandomVarintEncodedLengthIter {
ranges: [
Uniform::new(
smallest_number_in_n_byte_varint(1),
largest_number_in_n_byte_varint(1) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(2),
largest_number_in_n_byte_varint(2) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(3),
largest_number_in_n_byte_varint(3) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(4),
largest_number_in_n_byte_varint(4) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(5),
largest_number_in_n_byte_varint(5) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(6),
largest_number_in_n_byte_varint(6) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(7),
largest_number_in_n_byte_varint(7) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(8),
largest_number_in_n_byte_varint(8) + 1,
),
Uniform::new(
smallest_number_in_n_byte_varint(9),
largest_number_in_n_byte_varint(9),
),
],
range_for_picking_range: Uniform::new(0, 9),
rng,
}
}
}
impl<R: Rng> Iterator for RandomVarintEncodedLengthIter<R> {
type Item = u64;
fn next(&mut self) -> Option<Self::Item> {
// pick the range we'll use
let value_range = self.ranges[self.range_for_picking_range.sample(&mut self.rng)];
Some(value_range.sample(&mut self.rng))
}
}
#[test]
fn largest_number_in_7_bit_chunk_correct() {
// 8 chunks (indices 0-7) of 7 bits gets you to 56 bits. Last byte in varint is handled
// differently, so we don't test that here.
for i in 0..8 {
let largest = largest_number_in_7_bit_chunk(i);
assert_eq!((i as u32 + 1) * 7, largest.count_ones());
assert_eq!(64 - ((i as u32) + 1) * 7, largest.leading_zeros());
// any larger and it will be in the next chunk
assert_eq!(largest.leading_zeros() - 1, (largest + 1).leading_zeros());
}
}
| 36.75 | 98 | 0.604379 |
76f0045e799a10dded59be4686fac2d44e2bb397 | 2,125 | // Copyright 2016 Johannes Köster.
// Licensed under the MIT license (http://opensource.org/licenses/MIT)
// This file may not be copied, modified, or distributed
// except according to those terms.
//! Utilities for Bayesian statistics.
pub mod bayes_factors;
pub use self::bayes_factors::BayesFactor;
use itertools::Itertools;
use ordered_float::OrderedFloat;
use stats::LogProb;
/// For each of the hypothesis tests given as posterior error probabilities
/// (PEPs, i.e. the posterior probability of the null hypothesis), estimate the FDR
/// for the case that all null hypotheses with at most this PEP are rejected.
/// FDR is calculated as presented by Müller, Parmigiani, and Rice,
/// "FDR and Bayesian Multiple Comparisons Rules" (July 2006).
/// Johns Hopkin's University, Dept. of Biostatistics Working Papers. Working Paper 115.
///
/// # Returns
///
/// A vector of expected FDRs in the same order as the given PEPs.
pub fn expected_fdr(peps: &[LogProb]) -> Vec<LogProb> {
// sort indices
let sorted_idx =
(0..peps.len()).sorted_by(|&i, &j| OrderedFloat(*peps[i]).cmp(&OrderedFloat(*peps[j])));
// estimate FDR
let mut expected_fdr = vec![LogProb::ln_zero(); peps.len()];
for (j, (expected_fp, &i)) in LogProb::ln_cumsum_exp(sorted_idx.iter().map(|&i| peps[i]))
.zip(sorted_idx.iter())
.enumerate()
{
let fdr = LogProb(*expected_fp - ((j + 1) as f64).ln());
expected_fdr[i] = if fdr <= LogProb::ln_one() {
fdr
} else {
LogProb::ln_one()
};
}
expected_fdr
}
#[cfg(test)]
mod tests {
use super::*;
use stats::LogProb;
#[test]
fn test_expected_fdr() {
let peps = [
LogProb(0.1f64.ln()),
LogProb::ln_zero(),
LogProb(0.25f64.ln()),
];
let fdrs = expected_fdr(&peps);
println!("{:?}", fdrs);
assert_relative_eq!(*fdrs[1], *LogProb::ln_zero());
assert_relative_eq!(*fdrs[0], *LogProb(0.05f64.ln()));
assert_relative_eq!(*fdrs[2], *LogProb((0.35 / 3.0f64).ln()), epsilon = 0.000001);
}
}
| 31.716418 | 96 | 0.625882 |
183d39f45263eea2fc9b792c54afab8edd6a91a9 | 3,795 | /// Most of the code for this module comes from `rust-libp2p`.
use std::fmt;
use sha2::digest::Digest;
use unsigned_varint::{decode, encode};
use crate::handshake::handshake_struct::PublicKey;
const SHA256_CODE: u16 = 0x12;
const SHA256_SIZE: u8 = 32;
/// Identifier of a peer of the network
///
/// The data is a hash of the public key of the peer
#[derive(Clone, PartialOrd, PartialEq, Eq, Hash)]
pub struct PeerId {
inner: Vec<u8>,
}
impl PeerId {
/// Builds a `PeerId` from a public key.
#[inline]
pub fn from_public_key(public_key: &PublicKey) -> Self {
let key_inner = public_key.inner_ref();
let mut buf = encode::u16_buffer();
let code = encode::u16(SHA256_CODE, &mut buf);
let header_len = code.len() + 1;
let mut inner = Vec::new();
inner.resize(header_len + SHA256_SIZE as usize, 0);
inner[..code.len()].copy_from_slice(code);
inner[code.len()] = SHA256_SIZE;
let mut hasher = sha2::Sha256::default();
hasher.input(key_inner);
inner[header_len..].copy_from_slice(hasher.result().as_ref());
PeerId { inner }
}
/// If data is a valid `PeerId`, return `PeerId`, else return error
pub fn from_bytes(data: Vec<u8>) -> Result<Self, ()> {
if data.is_empty() {
return Err(());
}
let (code, bytes) = decode::u16(&data).map_err(|_| ())?;
if code != SHA256_CODE {
return Err(());
}
if bytes.len() != SHA256_SIZE as usize + 1 {
return Err(());
}
if bytes[0] != SHA256_SIZE {
return Err(());
}
Ok(PeerId { inner: data })
}
/// Return raw bytes representation of this peer id
#[inline]
pub fn as_bytes(&self) -> &[u8] {
&self.inner
}
/// Returns a base-58 encoded string of this `PeerId`.
#[inline]
pub fn to_base58(&self) -> String {
bs58::encode(self.inner.clone()).into_string()
}
/// Returns the raw bytes of the hash of this `PeerId`.
#[inline]
pub fn digest(&self) -> &[u8] {
let (_, bytes) = decode::u16(&self.inner).expect("a invalid digest");
&bytes[1..]
}
/// Checks whether the public key passed as parameter matches the public key of this `PeerId`.
pub fn is_public_key(&self, public_key: &PublicKey) -> bool {
let peer_id = Self::from_public_key(public_key);
&peer_id == self
}
}
impl fmt::Debug for PeerId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "PeerId({})", self.to_base58())
}
}
impl From<PublicKey> for PeerId {
#[inline]
fn from(key: PublicKey) -> PeerId {
PeerId::from_public_key(&key)
}
}
impl ::std::str::FromStr for PeerId {
type Err = ();
#[inline]
fn from_str(s: &str) -> Result<Self, Self::Err> {
let bytes = bs58::decode(s).into_vec().map_err(|_| ())?;
PeerId::from_bytes(bytes)
}
}
#[cfg(test)]
mod tests {
use crate::{peer_id::PeerId, SecioKeyPair};
#[test]
fn peer_id_is_public_key() {
let pub_key = SecioKeyPair::secp256k1_generated().to_public_key();
let peer_id = PeerId::from_public_key(&pub_key);
assert_eq!(peer_id.is_public_key(&pub_key), true);
}
#[test]
fn peer_id_into_bytes_then_from_bytes() {
let peer_id = SecioKeyPair::secp256k1_generated().to_peer_id();
let second = PeerId::from_bytes(peer_id.as_bytes().to_vec()).unwrap();
assert_eq!(peer_id, second);
}
#[test]
fn peer_id_to_base58_then_back() {
let peer_id = SecioKeyPair::secp256k1_generated().to_peer_id();
let second: PeerId = peer_id.to_base58().parse().unwrap();
assert_eq!(peer_id, second);
}
}
| 27.302158 | 98 | 0.591041 |
1ed7d422c8c4bd04647e721bf8b41b5270efa158 | 2,504 | use crate::aabb::surrounding_box;
use crate::rtweekend::random_i32;
use crate::HitRecord;
use crate::Hittable;
use crate::Ray;
use crate::Vec3;
use crate::AABB;
use std::boxed::Box;
use std::vec::Vec;
pub struct HittableList {
pub objects: Vec<Box<dyn Hittable>>,
}
impl HittableList {
pub fn add(&mut self, object: Box<dyn Hittable>) {
self.objects.push(object);
}
pub fn new() -> Self {
let objects: Vec<Box<dyn Hittable>> = Vec::new();
Self { objects }
}
}
impl Hittable for HittableList {
fn hit(&self, r: Ray, t_min: f64, t_max: f64) -> Option<HitRecord> {
let mut hit_anything: bool = false;
let mut closest_so_far: f64 = t_max;
let mut i: i32 = -1;
let mut j = 0;
let mut temt = 0.0;
for object in &self.objects {
i += 1;
if let None = object.hit(r, t_min, closest_so_far) {
continue;
} else {
let temp_rec = object.hit(r, t_min, closest_so_far).unwrap();
hit_anything = true;
temt = closest_so_far;
closest_so_far = temp_rec.t;
j = i;
}
}
if hit_anything {
return self.objects[j as usize].hit(r, t_min, temt);
} else {
None
}
}
fn bounding_box(&self, time0: f64, time1: f64, output_box: &mut crate::aabb::AABB) -> bool {
if self.objects.is_empty() {
return false;
}
let mut temp_box: AABB = AABB::new(Vec3::new(0.0, 0.0, 0.0), Vec3::new(0.0, 0.0, 0.0));
let mut first_box: bool = true;
for object in &self.objects {
if !object.bounding_box(time0, time1, &mut temp_box) {
return false;
}
*output_box = if first_box {
temp_box
} else {
surrounding_box(*output_box, temp_box)
};
first_box = false;
}
true
}
fn pdf_value(&self, o: Vec3, v: Vec3) -> f64 {
let weight = 1.0 / (self.objects.len() as f64);
let mut sum = 0.0;
for object in &self.objects {
sum += weight * object.pdf_value(o, v);
}
sum
}
fn random(&self, o: Vec3) -> Vec3 {
let int_size = self.objects.len() as i32;
self.objects[random_i32(0, int_size - 1) as usize].random(o)
}
}
unsafe impl Sync for HittableList {}
unsafe impl Send for HittableList {}
| 29.809524 | 96 | 0.527955 |
62325ad33714b2c58edc44d7c65ade6847847b6a | 155 | pub mod ipv4;
pub mod ipv6;
use rlua::prelude::*;
pub fn init(lua: &Lua) -> crate::Result<()> {
ipv4::init(lua)?;
ipv6::init(lua)?;
Ok(())
} | 14.090909 | 45 | 0.541935 |
7a46c263d1bf469013a302157a50cac86e18876b | 77 | pub const SLOT_COUNT: usize = 16;
pub type PlayerSlots = [u32; SLOT_COUNT];
| 19.25 | 41 | 0.727273 |
e953b6398f9c08e6eab61bfc34e807b28c92e740 | 5,788 | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Export recording
//
// This pass simply determines what all "export" keywords refer to and
// writes the results into the export map.
//
// FIXME #4953 This pass will be removed once exports change to per-item.
// Then this operation can simply be performed as part of item (or import)
// processing.
use {Module, NameBindings, Resolver};
use Namespace::{self, TypeNS, ValueNS};
use build_reduced_graph;
use module_to_string;
use rustc::middle::def::Export;
use syntax::ast;
use syntax::parse::token;
use std::ops::{Deref, DerefMut};
use std::rc::Rc;
struct ExportRecorder<'a, 'b:'a, 'tcx:'b> {
resolver: &'a mut Resolver<'b, 'tcx>
}
// Deref and DerefMut impls allow treating ExportRecorder as Resolver.
impl<'a, 'b, 'tcx:'b> Deref for ExportRecorder<'a, 'b, 'tcx> {
type Target = Resolver<'b, 'tcx>;
fn deref<'c>(&'c self) -> &'c Resolver<'b, 'tcx> {
&*self.resolver
}
}
impl<'a, 'b, 'tcx:'b> DerefMut for ExportRecorder<'a, 'b, 'tcx> {
fn deref_mut<'c>(&'c mut self) -> &'c mut Resolver<'b, 'tcx> {
&mut *self.resolver
}
}
impl<'a, 'b, 'tcx> ExportRecorder<'a, 'b, 'tcx> {
fn record_exports_for_module_subtree(&mut self,
module_: Rc<Module>) {
// If this isn't a local krate, then bail out. We don't need to record
// exports for nonlocal crates.
match module_.def_id.get() {
Some(def_id) if def_id.krate == ast::LOCAL_CRATE => {
// OK. Continue.
debug!("(recording exports for module subtree) recording \
exports for local module `{}`",
module_to_string(&*module_));
}
None => {
// Record exports for the root module.
debug!("(recording exports for module subtree) recording \
exports for root module `{}`",
module_to_string(&*module_));
}
Some(_) => {
// Bail out.
debug!("(recording exports for module subtree) not recording \
exports for `{}`",
module_to_string(&*module_));
return;
}
}
self.record_exports_for_module(&*module_);
build_reduced_graph::populate_module_if_necessary(self.resolver, &module_);
for (_, child_name_bindings) in &*module_.children.borrow() {
match child_name_bindings.get_module_if_available() {
None => {
// Nothing to do.
}
Some(child_module) => {
self.record_exports_for_module_subtree(child_module);
}
}
}
for (_, child_module) in &*module_.anonymous_children.borrow() {
self.record_exports_for_module_subtree(child_module.clone());
}
}
fn record_exports_for_module(&mut self, module_: &Module) {
let mut exports = Vec::new();
self.add_exports_for_module(&mut exports, module_);
match module_.def_id.get() {
Some(def_id) => {
self.export_map.insert(def_id.node, exports);
debug!("(computing exports) writing exports for {} (some)",
def_id.node);
}
None => {}
}
}
fn add_exports_of_namebindings(&mut self,
exports: &mut Vec<Export>,
name: ast::Name,
namebindings: &NameBindings,
ns: Namespace) {
match namebindings.def_for_namespace(ns) {
Some(d) => {
debug!("(computing exports) YES: export '{}' => {:?}",
name, d.def_id());
exports.push(Export {
name: name,
def_id: d.def_id()
});
}
d_opt => {
debug!("(computing exports) NO: {:?}", d_opt);
}
}
}
fn add_exports_for_module(&mut self,
exports: &mut Vec<Export>,
module_: &Module) {
for (name, import_resolution) in &*module_.import_resolutions.borrow() {
if !import_resolution.is_public {
continue
}
let xs = [TypeNS, ValueNS];
for &ns in &xs {
match import_resolution.target_for_namespace(ns) {
Some(target) => {
debug!("(computing exports) maybe export '{}'",
token::get_name(*name));
self.add_exports_of_namebindings(exports,
*name,
&*target.bindings,
ns)
}
_ => ()
}
}
}
}
}
pub fn record(resolver: &mut Resolver) {
let mut recorder = ExportRecorder { resolver: resolver };
let root_module = recorder.graph_root.get_module();
recorder.record_exports_for_module_subtree(root_module);
}
| 35.292683 | 83 | 0.514167 |
875fcfda6824921ea6c90a1afcd492532e67552d | 18,703 | #![allow(clippy::or_fun_call)]
use basilisk_runtime::{
AccountId, AssetRegistryConfig, AuraId, Balance, BalancesConfig, CollatorSelectionConfig, CouncilConfig,
ElectionsConfig, GenesisConfig, MultiTransactionPaymentConfig, OrmlNftConfig, ParachainInfoConfig, SessionConfig,
Signature, SudoConfig, SystemConfig, TechnicalCommitteeConfig, TokensConfig, VestingConfig, BSX, CORE_ASSET_ID,
WASM_BINARY,
};
use cumulus_primitives_core::ParaId;
use hex_literal::hex;
use primitives::BlockNumber;
use sc_chain_spec::{ChainSpecExtension, ChainSpecGroup};
use sc_service::ChainType;
use sc_telemetry::TelemetryEndpoints;
use serde::{Deserialize, Serialize};
use serde_json::map::Map;
use sp_core::{crypto::UncheckedInto, sr25519, Pair, Public};
use sp_runtime::traits::{IdentifyAccount, Verify};
const TOKEN_DECIMALS: u8 = 12;
const TOKEN_SYMBOL: &str = "BSX";
const PROTOCOL_ID: &str = "bsx";
// The URL for the telemetry server.
const TELEMETRY_URLS: [&str; 2] = [
"wss://telemetry.polkadot.io/submit/",
"wss://telemetry.hydradx.io:9000/submit/",
];
/// The extensions for the [`ChainSpec`].
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, ChainSpecGroup, ChainSpecExtension)]
#[serde(deny_unknown_fields)]
pub struct Extensions {
/// The relay chain of the Parachain.
pub relay_chain: String,
/// The id of the Parachain.
pub para_id: u32,
}
impl Extensions {
/// Try to get the extension from the given `ChainSpec`.
#[allow(clippy::borrowed_box)]
pub fn try_get(chain_spec: &Box<dyn sc_service::ChainSpec>) -> Option<&Self> {
sc_chain_spec::get_extension(chain_spec.extensions())
}
}
/// Specialized `ChainSpec`. This is a specialization of the general Substrate ChainSpec type.
pub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig, Extensions>;
/// Generate a crypto pair from seed.
pub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {
TPublic::Pair::from_string(&format!("//{}", seed), None)
.expect("static values are valid; qed")
.public()
}
type AccountPublic = <Signature as Verify>::Signer;
/// Generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId
where
AccountPublic: From<<TPublic::Pair as Pair>::Public>,
{
AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()
}
pub fn get_vesting_config_for_test() -> Vec<(AccountId, BlockNumber, BlockNumber, u32, Balance)> {
let vesting_list_json = &include_bytes!("../res/basilisk-vesting-lbp-test.json")[..];
let vesting_list: Vec<(AccountId, BlockNumber, BlockNumber, u32, Balance)> =
serde_json::from_slice(vesting_list_json).unwrap();
// ensure no duplicates exist.
let unique_vesting_accounts = vesting_list
.iter()
.map(|(x, _, _, _, _)| x)
.cloned()
.collect::<std::collections::BTreeSet<_>>();
assert!(
unique_vesting_accounts.len() == vesting_list.len(),
"duplicate vesting accounts in genesis."
);
vesting_list
}
pub fn basilisk_parachain_config() -> Result<ChainSpec, String> {
ChainSpec::from_json_bytes(&include_bytes!("../res/basilisk.json")[..])
}
pub fn kusama_staging_parachain_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk",
// ID
"basilisk",
ChainType::Live,
move || {
parachain_genesis(
wasm_binary,
// Sudo account
hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into(), //TODO intergalactic
//initial authorities & invulnerables
vec![
(
hex!["f25e5d7b43266a5b4cca762c9be917f18852d7a5db85e734776206eeb539dd4f"].into(),
hex!["f25e5d7b43266a5b4cca762c9be917f18852d7a5db85e734776206eeb539dd4f"].unchecked_into(),
),
(
hex!["e84a7090cb18fe39eafebdae9a3ac1111c955247a202a3ab2a3cfe8573c03c60"].into(),
hex!["e84a7090cb18fe39eafebdae9a3ac1111c955247a202a3ab2a3cfe8573c03c60"].unchecked_into(),
),
(
hex!["c49e3fbebac92027e0d19c2fc1ddc288eb549971831e336550832a476727f601"].into(),
hex!["c49e3fbebac92027e0d19c2fc1ddc288eb549971831e336550832a476727f601"].unchecked_into(),
),
(
hex!["c856aabea6e433be2dfe233c6118d156133e4e663a1223da06421058ddb56712"].into(),
hex!["c856aabea6e433be2dfe233c6118d156133e4e663a1223da06421058ddb56712"].unchecked_into(),
),
(
hex!["e02a753fc885bde7ea5839df8619ab80b67be6c869bc19b41f20f865a2f90578"].into(),
hex!["e02a753fc885bde7ea5839df8619ab80b67be6c869bc19b41f20f865a2f90578"].unchecked_into(),
),
],
// Pre-funded accounts
vec![],
true,
para_id,
//technical committee
hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into(), // TREASURY - Fallback for multi tx payment
)
},
// Bootnodes
vec![
"/dns/p2p-01.basilisk.hydradx.io/tcp/30333/p2p/12D3KooWJRdTtgFnwrrcigrMRxdJ9zfmhtpH5qgAV9budWat4UtR"
.parse()
.unwrap(),
"/dns/p2p-02.basilisk.hydradx.io/tcp/30333/p2p/12D3KooWQNvuYebz6Zt34LnesFfdVh5i7FWP8GUe9QxuBmKE4b9R"
.parse()
.unwrap(),
"/dns/p2p-03.basilisk.hydradx.io/tcp/30333/p2p/12D3KooWD2Y9VkfC9cmQEpKZLN26xWq7XPJXHDUH8LNVmhoNBrdJ"
.parse()
.unwrap(),
],
// Telemetry
Some(
TelemetryEndpoints::new(vec![
(TELEMETRY_URLS[0].to_string(), 0),
(TELEMETRY_URLS[1].to_string(), 0),
])
.expect("Telemetry url is valid"),
),
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "kusama".into(),
para_id: para_id.into(),
},
))
}
pub fn testnet_parachain_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Egg",
// ID
"basilisk_egg",
ChainType::Live,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into(),
//initial authorities & invulnerables
vec![
(
hex!["da0fa4ab419def66fb4ac5224e594e82c34ee795268fc7787c8a096c4ff14f11"].into(),
hex!["da0fa4ab419def66fb4ac5224e594e82c34ee795268fc7787c8a096c4ff14f11"].unchecked_into(),
),
(
hex!["ecd7a5439c6ab0cd6550bc2f1cef5299d425bb95bb6d7afb32aa3d95ee4f7f1f"].into(),
hex!["ecd7a5439c6ab0cd6550bc2f1cef5299d425bb95bb6d7afb32aa3d95ee4f7f1f"].unchecked_into(),
),
(
hex!["f0ad6f1aae7a445c1e80cac883096ec8177eda276fec53ad9ccbe570f3090a26"].into(),
hex!["f0ad6f1aae7a445c1e80cac883096ec8177eda276fec53ad9ccbe570f3090a26"].unchecked_into(),
),
],
// Pre-funded accounts
vec![hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into()],
true,
para_id,
//council
vec![hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into()],
//technical committee
vec![hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into()],
hex!["30035c21ba9eda780130f2029a80c3e962f56588bc04c36be95a225cb536fb55"].into(), // SAME AS ROOT
vec![].into(),
)
},
// Bootnodes
vec![
"/dns/p2p-01.basilisk-testnet.hydradx.io/tcp/30333/p2p/12D3KooW9qapYrocm6W1meShf8eQfeJzbry9PN2CN6SfBGbymxPL"
.parse()
.unwrap(),
"/dns/p2p-02.basilisk-testnet.hydradx.io/tcp/30333/p2p/12D3KooWPS16BYW173YxmxEJpQBoDz1t3Ht4yaPwwg5qCTED7N66"
.parse()
.unwrap(),
"/dns/p2p-03.basilisk-testnet.hydradx.io/tcp/30333/p2p/12D3KooWRMgQRtYrWsLvuwg3V3aQEvMgsbb88T29cKCTH6RAxTaj"
.parse()
.unwrap(),
],
// Telemetry
Some(
TelemetryEndpoints::new(vec![
(TELEMETRY_URLS[0].to_string(), 0),
(TELEMETRY_URLS[1].to_string(), 0),
])
.expect("Telemetry url is valid"),
),
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "westend".into(),
para_id: para_id.into(),
},
))
}
pub fn parachain_development_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Development",
// ID
"dev",
ChainType::Development,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
//initial authorities & invulnerables
vec![
(
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_from_seed::<AuraId>("Alice"),
),
(
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_from_seed::<AuraId>("Bob"),
),
],
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
],
true,
para_id,
//council
vec![get_account_id_from_seed::<sr25519::Public>("Alice")],
//technical_committe
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
],
get_account_id_from_seed::<sr25519::Public>("Alice"), // SAME AS ROOT
get_vesting_config_for_test(),
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "rococo-dev".into(),
para_id: para_id.into(),
},
))
}
pub fn local_parachain_config(para_id: ParaId) -> Result<ChainSpec, String> {
let wasm_binary = WASM_BINARY.ok_or("Development wasm binary not available".to_string())?;
let mut properties = Map::new();
properties.insert("tokenDecimals".into(), TOKEN_DECIMALS.into());
properties.insert("tokenSymbol".into(), TOKEN_SYMBOL.into());
Ok(ChainSpec::from_genesis(
// Name
"Basilisk Local Testnet",
// ID
"local_testnet",
ChainType::Local,
move || {
testnet_parachain_genesis(
wasm_binary,
// Sudo account
get_account_id_from_seed::<sr25519::Public>("Alice"),
//initial authorities & invulnerables
vec![
(
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_from_seed::<AuraId>("Alice"),
),
(
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_from_seed::<AuraId>("Bob"),
),
],
// Pre-funded accounts
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Charlie"),
get_account_id_from_seed::<sr25519::Public>("Dave"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
get_account_id_from_seed::<sr25519::Public>("Ferdie"),
get_account_id_from_seed::<sr25519::Public>("Alice//stash"),
get_account_id_from_seed::<sr25519::Public>("Bob//stash"),
get_account_id_from_seed::<sr25519::Public>("Charlie//stash"),
get_account_id_from_seed::<sr25519::Public>("Dave//stash"),
get_account_id_from_seed::<sr25519::Public>("Eve//stash"),
get_account_id_from_seed::<sr25519::Public>("Ferdie//stash"),
],
true,
para_id,
//council
vec![get_account_id_from_seed::<sr25519::Public>("Alice")],
//technical_committe
vec![
get_account_id_from_seed::<sr25519::Public>("Alice"),
get_account_id_from_seed::<sr25519::Public>("Bob"),
get_account_id_from_seed::<sr25519::Public>("Eve"),
],
get_account_id_from_seed::<sr25519::Public>("Alice"), // SAME AS ROOT
get_vesting_config_for_test(),
)
},
// Bootnodes
vec![],
// Telemetry
None,
// Protocol ID
Some(PROTOCOL_ID),
// Properties
Some(properties),
// Extensions
Extensions {
relay_chain: "rococo-local".into(),
para_id: para_id.into(),
},
))
}
/// Configure initial storage state for FRAME modules.
fn parachain_genesis(
wasm_binary: &[u8],
root_key: AccountId,
initial_authorities: Vec<(AccountId, AuraId)>,
_endowed_accounts: Vec<AccountId>,
_enable_println: bool,
parachain_id: ParaId,
tx_fee_payment_account: AccountId,
) -> GenesisConfig {
GenesisConfig {
system: SystemConfig {
// Add Wasm runtime to storage.
code: wasm_binary.to_vec(),
changes_trie_config: Default::default(),
},
balances: BalancesConfig {
// Configure endowed accounts with initial balance of a lot.
balances: vec![
(
// Intergalactic HDX Tokens 15%
hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into(),
15_000_000_000u128 * BSX,
),
(
// Treasury 9%
hex!["6d6f646c70792f74727372790000000000000000000000000000000000000000"].into(),
9_000_000_000 * BSX,
),
],
},
sudo: SudoConfig {
// Assign network admin rights.
key: root_key,
},
collator_selection: CollatorSelectionConfig {
invulnerables: initial_authorities.iter().cloned().map(|(acc, _)| acc).collect(),
candidacy_bond: 10_000,
..Default::default()
},
session: SessionConfig {
keys: initial_authorities
.iter()
.cloned()
.map(|(acc, aura)| {
(
acc.clone(), // account id
acc, // validator id
basilisk_runtime::opaque::SessionKeys { aura }, // session keys
)
})
.collect(),
},
// no need to pass anything, it will panic if we do. Session will take care
// of this.
aura: Default::default(),
asset_registry: AssetRegistryConfig {
core_asset_id: CORE_ASSET_ID,
asset_ids: vec![],
next_asset_id: 1,
},
multi_transaction_payment: MultiTransactionPaymentConfig {
currencies: vec![],
authorities: vec![],
fallback_account: tx_fee_payment_account,
},
tokens: TokensConfig { balances: vec![] },
treasury: Default::default(),
elections: ElectionsConfig {
// Intergalactic elections
members: vec![(
hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into(),
14_999_900_000u128 * BSX,
)],
},
council: CouncilConfig {
// Intergalactic council member
members: vec![hex!["bca8eeb9c7cf74fc28ebe4091d29ae1c12ed622f7e3656aae080b54d5ff9a23c"].into()],
phantom: Default::default(),
},
technical_committee: TechnicalCommitteeConfig {
members: vec![
hex!["d6cf8789dce651cb54a4036406f4aa0c771914d345c004ad0567b814c71fb637"].into(),
hex!["bc96ec00952efa8f0e3e08b36bf5096bcb877acac536e478aecb72868db5db02"].into(),
hex!["2875dd47bc1bcb70e23de79e7538c312be12c716033bbae425130e46f5f2b35e"].into(),
hex!["644643bf953233d08c4c9bae0acd49f3baa7658d9b342b7e6879bb149ee6e44c"].into(),
hex!["ccdb435892c9883656d0398b2b67023ba1e11bda0c7f213f70fdac54c6abab3f"].into(),
hex!["f461c5ae6e80bf4af5b84452789c17b0b0a095a2d77c2a407978147de2d5b572"].into(),
],
phantom: Default::default(),
},
orml_nft: OrmlNftConfig {
tokens: Default::default(),
},
vesting: VestingConfig { vesting: vec![] },
parachain_info: ParachainInfoConfig { parachain_id }, //TODO
aura_ext: Default::default(),
}
}
fn testnet_parachain_genesis(
wasm_binary: &[u8],
root_key: AccountId,
initial_authorities: Vec<(AccountId, AuraId)>,
endowed_accounts: Vec<AccountId>,
_enable_println: bool,
parachain_id: ParaId,
council_members: Vec<AccountId>,
tech_committee_members: Vec<AccountId>,
tx_fee_payment_account: AccountId,
vesting_list: Vec<(AccountId, BlockNumber, BlockNumber, u32, Balance)>,
) -> GenesisConfig {
GenesisConfig {
system: SystemConfig {
// Add Wasm runtime to storage.
code: wasm_binary.to_vec(),
changes_trie_config: Default::default(),
},
balances: BalancesConfig {
// Configure endowed accounts with initial balance of a lot.
balances: endowed_accounts
.iter()
.cloned()
.map(|k| (k, 1_000_000_000u128 * BSX))
.collect(),
},
sudo: SudoConfig {
// Assign network admin rights.
key: root_key,
},
collator_selection: CollatorSelectionConfig {
invulnerables: initial_authorities.iter().cloned().map(|(acc, _)| acc).collect(),
candidacy_bond: 10_000,
..Default::default()
},
session: SessionConfig {
keys: initial_authorities
.iter()
.cloned()
.map(|(acc, aura)| {
(
acc.clone(), // account id
acc, // validator id
basilisk_runtime::opaque::SessionKeys { aura }, // session keys
)
})
.collect(),
},
// no need to pass anything, it will panic if we do. Session will take care
// of this.
aura: Default::default(),
asset_registry: AssetRegistryConfig {
core_asset_id: CORE_ASSET_ID,
asset_ids: vec![
(b"hKSM".to_vec(), 1),
(b"hDOT".to_vec(), 2),
(b"hETH".to_vec(), 3),
(b"hUSDT".to_vec(), 4),
],
next_asset_id: 5,
},
multi_transaction_payment: MultiTransactionPaymentConfig {
currencies: vec![],
authorities: vec![],
fallback_account: tx_fee_payment_account,
},
tokens: TokensConfig {
balances: endowed_accounts
.iter()
.flat_map(|x| {
vec![
(x.clone(), 1, 1_000_000_000u128 * BSX),
(x.clone(), 2, 1_000_000_000u128 * BSX),
(x.clone(), 3, 1_000_000_000u128 * BSX),
(x.clone(), 4, 1_000_000_000u128 * BSX),
]
})
.collect(),
},
treasury: Default::default(),
elections: ElectionsConfig {
// Intergalactic elections
members: vec![(
get_account_id_from_seed::<sr25519::Public>("Alice"),
100_000_000u128 * BSX,
)],
},
council: CouncilConfig {
members: council_members,
phantom: Default::default(),
},
technical_committee: TechnicalCommitteeConfig {
members: tech_committee_members,
phantom: Default::default(),
},
vesting: VestingConfig { vesting: vesting_list },
orml_nft: OrmlNftConfig {
tokens: Default::default(),
},
parachain_info: ParachainInfoConfig { parachain_id },
aura_ext: Default::default(),
}
}
| 31.75382 | 128 | 0.690798 |
26fcf507c0d126d0720c6e672c4c5f1d897c4e15 | 2,493 | use std::io::Read;
use itertools::Itertools;
use crate::common::ordered;
fn read_input(input: &mut dyn Read) -> Vec<usize> {
let mut buf = String::new();
input.read_to_string(&mut buf).unwrap();
let mut crabs: Vec<usize> = buf
.trim_end()
.split(',')
.map(|s| s.parse().unwrap())
.collect();
crabs.sort_unstable();
crabs
}
fn cost_at(pos: usize, crabs: &[usize]) -> usize {
crabs
.iter()
.map(|&crab_pos| {
if crab_pos > pos {
crab_pos - pos
} else {
pos - crab_pos
}
})
.sum()
}
pub fn part1(input: &mut dyn Read) -> String {
let crabs = read_input(input);
let median = crabs[crabs.len() / 2 + (crabs.len() % 2)];
cost_at(median, &crabs).to_string()
}
pub fn sum_until(end: usize) -> usize {
(end * (1 + end)) / 2
}
fn cost_at2(pos: usize, groups: &[(usize, usize)]) -> usize {
groups
.iter()
.map(|&(number, new_pos)| {
let (first, last) = ordered(pos, new_pos);
number * sum_until(last - first)
})
.sum()
}
fn ternary_search(mut min: usize, mut max: usize, callback: impl Fn(usize) -> usize) -> usize {
while max - min > 6 {
let mid1 = min + (max - min) / 3;
let mid2 = max - (max - min) / 3;
let cost1 = callback(mid1);
let cost2 = callback(mid2);
if cost1 < cost2 {
max = mid2 - 1
} else {
min = mid1 + 1
}
}
// Ternary search isn't effective at such small intervals so we iterate the remaining part
(min..=max).map(callback).min().unwrap()
}
pub fn part2(input: &mut dyn Read) -> String {
let groups: Vec<_> = read_input(input).into_iter().dedup_with_count().collect();
let min = groups.first().unwrap().1;
let max = groups.last().unwrap().1;
ternary_search(min, max, |pos| cost_at2(pos, &groups)).to_string()
}
#[cfg(test)]
mod tests {
use crate::test_implementation;
use super::*;
const SAMPLE: &[u8] = &*b"16,1,2,0,4,2,7,1,2,14";
#[test]
fn sample_part1() {
test_implementation(part1, SAMPLE, 37);
}
#[test]
fn sample_part2() {
test_implementation(part2, SAMPLE, 168);
}
#[test]
fn test_maths() {
assert_eq!(sum_until(1), 1);
assert_eq!(sum_until(2), 3);
assert_eq!(sum_until(3), 6);
assert_eq!(sum_until(4), 10);
}
}
| 22.258929 | 95 | 0.53229 |
01819ae6098eac5ab8a8f32c4787be4fc4fdd907 | 2,378 | extern crate kiddo;
use kiddo::distance::squared_euclidean;
use kiddo::KdTree;
use std::sync::atomic::{AtomicUsize, Ordering};
static POINT_A: ([f64; 2], usize) = ([0f64, 0f64], 0);
static POINT_B: ([f64; 2], usize) = ([1f64, 1f64], 1);
static POINT_C: ([f64; 2], usize) = ([2f64, 2f64], 2);
static POINT_D: ([f64; 2], usize) = ([3f64, 3f64], 3);
#[test]
fn it_works() {
let capacity_per_node = 2;
let mut kdtree = KdTree::with_per_node_capacity(capacity_per_node).unwrap();
let count = AtomicUsize::new(0);
let new_dist = |a: &[f64; 2], b: &[f64; 2]| {
count.fetch_add(1, Ordering::SeqCst);
squared_euclidean(a, b)
};
kdtree.add(&POINT_A.0, POINT_A.1).unwrap();
kdtree.add(&POINT_B.0, POINT_B.1).unwrap();
kdtree.add(&POINT_C.0, POINT_C.1).unwrap();
kdtree.add(&POINT_D.0, POINT_D.1).unwrap();
kdtree.nearest(&POINT_A.0, 0, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 0);
kdtree.nearest(&POINT_A.0, 1, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 2);
kdtree.nearest(&POINT_A.0, 2, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 4);
kdtree.nearest(&POINT_A.0, 3, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 6);
kdtree.nearest(&POINT_A.0, 4, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 6);
kdtree.nearest(&POINT_A.0, 5, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 6);
kdtree.nearest(&POINT_B.0, 4, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 6);
kdtree.within(&POINT_A.0, 0.0, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 2);
kdtree.within(&POINT_B.0, 1.0, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 3);
kdtree.within(&POINT_B.0, 2.0, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 6);
let mut iter = kdtree.iter_nearest(&POINT_A.0, &new_dist).unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 0);
iter.next().unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 2);
iter.next().unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 2);
iter.next().unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 2);
iter.next().unwrap();
assert_eq!(count.swap(0, Ordering::SeqCst), 0);
}
| 32.575342 | 80 | 0.635828 |
1dfb5825ce4ed9521a840f047f0d455f23278666 | 2,603 | // Copyright 2019 The Tari Project
//
// Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
// following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
// disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
// following disclaimer in the documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
// products derived from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use std::sync::Arc;
use rand::rngs::OsRng;
use crate::{
peer_manager::{NodeId, NodeIdentity, PeerFeatures},
transports::MemoryTransport,
};
pub fn build_node_identity(features: PeerFeatures) -> Arc<NodeIdentity> {
let public_addr = format!("/memory/{}", MemoryTransport::acquire_next_memsocket_port())
.parse()
.unwrap();
Arc::new(NodeIdentity::random(&mut OsRng, public_addr, features))
}
pub fn ordered_node_identities(n: usize, features: PeerFeatures) -> Vec<Arc<NodeIdentity>> {
let mut ids = build_many_node_identities(n, features);
ids.sort_unstable_by(|a, b| a.node_id().cmp(b.node_id()));
ids
}
pub fn build_many_node_identities(n: usize, features: PeerFeatures) -> Vec<Arc<NodeIdentity>> {
(0..n).map(|_| build_node_identity(features)).collect()
}
pub fn ordered_node_identities_by_distance(
node_id: &NodeId,
n: usize,
features: PeerFeatures,
) -> Vec<Arc<NodeIdentity>> {
let mut ids = build_many_node_identities(n, features);
ids.sort_unstable_by_key(|a| a.node_id().distance(node_id));
ids
}
| 44.87931 | 118 | 0.751056 |
fcab3fd0badce93e2e177c76cb859c61bb200c20 | 3,715 | #![unstable(feature = "wake_trait", issue = "69912")]
//! Types and Traits for working with asynchronous tasks.
use core::mem::ManuallyDrop;
use core::task::{RawWaker, RawWakerVTable, Waker};
use crate::sync::Arc;
/// The implementation of waking a task on an executor.
///
/// This trait can be used to create a [`Waker`]. An executor can define an
/// implementation of this trait, and use that to construct a Waker to pass
/// to the tasks that are executed on that executor.
///
/// This trait is a memory-safe and ergonomic alternative to constructing a
/// [`RawWaker`]. It supports the common executor design in which the data used
/// to wake up a task is stored in an [`Arc`]. Some executors (especially
/// those for embedded systems) cannot use this API, which is why [`RawWaker`]
/// exists as an alternative for those systems.
#[unstable(feature = "wake_trait", issue = "69912")]
pub trait Wake {
/// Wake this task.
#[unstable(feature = "wake_trait", issue = "69912")]
fn wake(self: Arc<Self>);
/// Wake this task without consuming the waker.
///
/// If an executor supports a cheaper way to wake without consuming the
/// waker, it should override this method. By default, it clones the
/// [`Arc`] and calls `wake` on the clone.
#[unstable(feature = "wake_trait", issue = "69912")]
fn wake_by_ref(self: &Arc<Self>) {
self.clone().wake();
}
}
#[allow(rustc::ineffective_unstable_trait_impl)]
#[unstable(feature = "wake_trait", issue = "69912")]
impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for Waker {
fn from(waker: Arc<W>) -> Waker {
// SAFETY: This is safe because raw_waker safely constructs
// a RawWaker from Arc<W>.
unsafe { Waker::from_raw(raw_waker(waker)) }
}
}
#[allow(rustc::ineffective_unstable_trait_impl)]
#[unstable(feature = "wake_trait", issue = "69912")]
impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
fn from(waker: Arc<W>) -> RawWaker {
raw_waker(waker)
}
}
// NB: This private function for constructing a RawWaker is used, rather than
// inlining this into the `From<Arc<W>> for RawWaker` impl, to ensure that
// the safety of `From<Arc<W>> for Waker` does not depend on the correct
// trait dispatch - instead both impls call this function directly and
// explicitly.
#[inline(always)]
fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
// Increment the reference count of the arc to clone it.
unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
unsafe { Arc::incr_strong_count(waker as *const W) };
RawWaker::new(
waker as *const (),
&RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
)
}
// Wake by value, moving the Arc into the Wake::wake function
unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker = unsafe { Arc::from_raw(waker as *const W) };
<W as Wake>::wake(waker);
}
// Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
let waker = unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
<W as Wake>::wake_by_ref(&waker);
}
// Decrement the reference count of the Arc on drop
unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
unsafe { Arc::decr_strong_count(waker as *const W) };
}
RawWaker::new(
Arc::into_raw(waker) as *const (),
&RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
)
}
| 40.380435 | 97 | 0.64576 |
7606fef0cf78077cdaa5ec26ad0aeef84b1ad75c | 32,317 | // Copyright 2018 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Simple data-oriented GUI.
#![deny(intra_doc_link_resolution_failure, unsafe_code)]
pub use druid_shell::{self as shell, kurbo, piet};
mod app;
mod app_delegate;
pub mod command;
mod data;
mod env;
mod event;
mod lens;
pub mod localization;
pub mod menu;
pub mod theme;
pub mod widget;
mod win_handler;
mod window;
use std::collections::VecDeque;
use std::ops::{Deref, DerefMut};
use std::time::Instant;
use log::{error, warn};
use kurbo::{Affine, Point, Rect, Shape, Size, Vec2};
use piet::{Piet, RenderContext};
pub use unicode_segmentation;
// TODO: remove these unused annotations when we wire these up; they're
// placeholders for functionality not yet implemented.
#[allow(unused)]
use druid_shell::application::Application;
pub use druid_shell::clipboard::ClipboardItem;
pub use druid_shell::dialog::{FileDialogOptions, FileDialogType};
pub use druid_shell::keyboard::{KeyCode, KeyEvent, KeyModifiers};
#[allow(unused)]
use druid_shell::platform::IdleHandle;
pub use druid_shell::window::{Cursor, MouseButton, MouseEvent, TimerToken};
use druid_shell::window::{Text, WinCtx, WindowHandle};
pub use shell::hotkey::{HotKey, RawMods, SysMods};
pub use app::{AppLauncher, WindowDesc};
pub use app_delegate::{AppDelegate, DelegateCtx};
pub use command::{Command, Selector};
pub use data::Data;
pub use env::{Env, Key, Value};
pub use event::{Event, WheelEvent};
pub use lens::{Lens, LensWrap};
pub use localization::LocalizedString;
pub use menu::MenuDesc;
pub use win_handler::DruidHandler;
pub use window::{Window, WindowId};
/// A container for one widget in the hierarchy.
///
/// Generally, container widgets don't contain other widgets directly,
/// but rather contain a `WidgetPod`, which has additional state needed
/// for layout and for the widget to participate in event flow.
///
/// This struct also contains the previous data for a widget, which is
/// essential for the [`update`] method, both to decide when the update
/// needs to propagate, and to provide the previous data so that a
/// widget can process a diff between the old value and the new.
///
/// [`update`]: trait.Widget.html#tymethod.update
pub struct WidgetPod<T: Data, W: Widget<T>> {
state: BaseState,
old_data: Option<T>,
env: Option<Env>,
inner: W,
}
/// Convenience type for dynamic boxed widget.
pub type BoxedWidget<T> = WidgetPod<T, Box<dyn Widget<T>>>;
/// Generic state for all widgets in the hierarchy.
///
/// This struct contains the widget's layout rect, flags
/// indicating when the widget is active or focused, and other
/// state necessary for the widget to participate in event
/// flow.
///
/// It is provided to [`paint`] calls as a non-mutable reference,
/// largely so a widget can know its size, also because active
/// and focus state can affect the widget's appearance. Other than
/// that, widgets will generally not interact with it directly,
/// but it is an important part of the [`WidgetPod`] struct.
///
/// [`paint`]: trait.Widget.html#tymethod.paint
/// [`WidgetPod`]: struct.WidgetPod.html
#[derive(Default)]
pub struct BaseState {
layout_rect: Rect,
// TODO: consider using bitflags for the booleans.
// This should become an invalidation rect.
needs_inval: bool,
is_hot: bool,
is_active: bool,
/// Any descendant is active.
has_active: bool,
/// Any descendant has requested an animation frame.
request_anim: bool,
/// Any descendant has requested a timer.
///
/// Note: we don't have any way of clearing this request, as it's
/// likely not worth the complexity.
request_timer: bool,
/// This widget or a descendant has focus.
has_focus: bool,
/// This widget or a descendant has requested focus.
request_focus: bool,
}
/// The trait implemented by all widgets.
///
/// All appearance and behavior for a widget is encapsulated in an
/// object that implements this trait.
///
/// The trait is parametrized by a type (`T`) for associated data.
/// All trait methods are provided with access to this data, and
/// in the case of `event` the reference is mutable, so that events
/// can directly update the data.
///
/// Whenever the application data changes, the framework traverses
/// the widget hierarchy with an [`update`] method. The framework
/// needs to know whether the data has actually changed or not, which
/// is why `T` has a [`Data`] bound.
///
/// All the trait methods are provided with a corresponding context.
/// The widget can request things and cause actions by calling methods
/// on that context.
///
/// In addition, all trait methods are provided with an environment
/// ([`Env`](struct.Env.html)).
///
/// Container widgets will generally not call `Widget` methods directly
/// on their child widgets, but rather will own their widget wrapped in
/// a [`WidgetPod`], and call the corresponding method on that. The
/// `WidgetPod` contains state and logic for these traversals. On the
/// other hand, particularly light-weight containers might contain their
/// child `Widget` directly (when no layout or event flow logic is
/// needed), and in those cases will call these methods.
///
/// As a general pattern, container widgets will call the corresponding
/// `WidgetPod` method on all their children. The `WidgetPod` applies
/// logic to determine whether to recurse, as needed.
///
/// [`event`]: #tymethod.event
/// [`update`]: #tymethod.update
/// [`Data`]: trait.Data.html
/// [`WidgetPod`]: struct.WidgetPod.html
pub trait Widget<T> {
/// Paint the widget appearance.
///
/// The widget calls methods on the `render_ctx` field of the
/// `paint_ctx` in order to paint its appearance. `paint_ctx` auto
/// derefs to `render_ctx` for convenience.
///
/// Container widgets can paint a background before recursing to their
/// children, or annotations (for example, scrollbars) by painting
/// afterwards. In addition, they can apply masks and transforms on
/// the render context, which is especially useful for scrolling.
fn paint(&mut self, paint_ctx: &mut PaintCtx, base_state: &BaseState, data: &T, env: &Env);
/// Compute layout.
///
/// A leaf widget should determine its size (subject to the provided
/// constraints) and return it.
///
/// A container widget will recursively call [`WidgetPod::layout`] on its
/// child widgets, providing each of them an appropriate box constraint,
/// compute layout, then call [`set_layout_rect`] on each of its children.
/// Finally, it should return the size of the container. The container
/// can recurse in any order, which can be helpful to, for example, compute
/// the size of non-flex widgets first, to determine the amount of space
/// available for the flex widgets.
///
/// For efficiency, a container should only invoke layout of a child widget
/// once, though there is nothing enforcing this.
///
/// The layout strategy is strongly inspired by Flutter.
///
/// [`WidgetPod::layout`]: struct.WidgetPod.html#method.layout
/// [`set_layout_rect`]: struct.LayoutCtx.html#method.set_layout_rect
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size;
/// Handle an event.
///
/// A number of different events (in the [`Event`] enum) are handled in this
/// method call. A widget can handle these events in a number of ways:
/// requesting things from the [`EventCtx`], mutating the data, or submitting
/// a [`Command`].
///
/// [`Event`]: struct.Event.html
/// [`EventCtx`]: struct.EventCtx.html
/// [`Command`]: struct.Command.html
fn event(&mut self, event: &Event, ctx: &mut EventCtx, data: &mut T, env: &Env);
/// Handle a change of data.
///
/// This method is called whenever the data changes. When the appearance of
/// the widget depends on data, call [`invalidate`] so that it's scheduled
/// for repaint.
///
/// The previous value of the data is provided in case the widget wants to
/// compute a fine-grained delta. Before any paint operation, this method
/// will be called with `None` for `old_data`. Thus, this method can also be
/// used to build resources that will be retained for painting.
///
/// [`invalidate`]: struct.UpdateCtx.html#method.invalidate
// Consider a no-op default impl. One reason against is that containers might
// inadvertently forget to propagate.
fn update(&mut self, ctx: &mut UpdateCtx, old_data: Option<&T>, data: &T, env: &Env);
}
// TODO: explore getting rid of this (ie be consistent about using
// `dyn Widget` only).
impl<T> Widget<T> for Box<dyn Widget<T>> {
fn paint(&mut self, paint_ctx: &mut PaintCtx, base_state: &BaseState, data: &T, env: &Env) {
self.deref_mut().paint(paint_ctx, base_state, data, env);
}
fn layout(&mut self, ctx: &mut LayoutCtx, bc: &BoxConstraints, data: &T, env: &Env) -> Size {
self.deref_mut().layout(ctx, bc, data, env)
}
fn event(&mut self, event: &Event, ctx: &mut EventCtx, data: &mut T, env: &Env) {
self.deref_mut().event(event, ctx, data, env)
}
fn update(&mut self, ctx: &mut UpdateCtx, old_data: Option<&T>, data: &T, env: &Env) {
self.deref_mut().update(ctx, old_data, data, env);
}
}
/// A context passed to paint methods of widgets.
///
/// Widgets paint their appearance by calling methods on the
/// `render_ctx`, which PaintCtx derefs to for convenience.
/// This struct is expected to grow, for example to include the
/// "damage region" indicating that only a subset of the entire
/// widget hierarchy needs repainting.
pub struct PaintCtx<'a, 'b: 'a> {
/// The render context for actually painting.
pub render_ctx: &'a mut Piet<'b>,
pub window_id: WindowId,
}
impl<'a, 'b: 'a> Deref for PaintCtx<'a, 'b> {
type Target = Piet<'b>;
fn deref(&self) -> &Self::Target {
self.render_ctx
}
}
impl<'a, 'b: 'a> DerefMut for PaintCtx<'a, 'b> {
fn deref_mut(&mut self) -> &mut Self::Target {
self.render_ctx
}
}
/// A context provided to layout handling methods of widgets.
///
/// As of now, the main service provided is access to a factory for
/// creating text layout objects, which are likely to be useful
/// during widget layout.
pub struct LayoutCtx<'a, 'b: 'a> {
text_factory: &'a mut Text<'b>,
window_id: WindowId,
}
/// A mutable context provided to event handling methods of widgets.
///
/// Widgets should call [`invalidate`] whenever an event causes a change
/// in the widget's appearance, to schedule a repaint.
///
/// [`invalidate`]: #method.invalidate
pub struct EventCtx<'a, 'b> {
// Note: there's a bunch of state that's just passed down, might
// want to group that into a single struct.
win_ctx: &'a mut dyn WinCtx<'b>,
cursor: &'a mut Option<Cursor>,
/// Commands submitted to be run after this event.
command_queue: &'a mut VecDeque<(WindowId, Command)>,
window_id: WindowId,
// TODO: migrate most usage of `WindowHandle` to `WinCtx` instead.
window: &'a WindowHandle,
base_state: &'a mut BaseState,
had_active: bool,
is_handled: bool,
is_root: bool,
}
/// A mutable context provided to data update methods of widgets.
///
/// Widgets should call [`invalidate`] whenever a data change causes a change
/// in the widget's appearance, to schedule a repaint.
///
/// [`invalidate`]: #method.invalidate
pub struct UpdateCtx<'a, 'b: 'a> {
text_factory: &'a mut Text<'b>,
window: &'a WindowHandle,
// Discussion: we probably want to propagate more fine-grained
// invalidations, which would mean a structure very much like
// `EventCtx` (and possibly using the same structure). But for
// now keep it super-simple.
needs_inval: bool,
window_id: WindowId,
}
/// Constraints for layout.
///
/// The layout strategy for druid is strongly inspired by Flutter,
/// and this struct is similar to the [Flutter BoxConstraints] class.
///
/// At the moment, it represents simply a minimum and maximum size.
/// A widget's [`layout`] method should choose an appropriate size that
/// meets these constraints.
///
/// Further, a container widget should compute appropriate constraints
/// for each of its child widgets, and pass those down when recursing.
///
/// [`layout`]: trait.Widget.html#tymethod.layout
/// [Flutter BoxConstraints]: https://api.flutter.dev/flutter/rendering/BoxConstraints-class.html
#[derive(Clone, Copy, Debug)]
pub struct BoxConstraints {
min: Size,
max: Size,
}
impl<T: Data, W: Widget<T>> WidgetPod<T, W> {
/// Create a new widget pod.
///
/// In a widget hierarchy, each widget is wrapped in a `WidgetPod`
/// so it can participate in layout and event flow. The process of
/// adding a child widget to a container should call this method.
pub fn new(inner: W) -> WidgetPod<T, W> {
WidgetPod {
state: Default::default(),
old_data: None,
env: None,
inner,
}
}
/// Set layout rectangle.
///
/// Intended to be called on child widget in container's `layout`
/// implementation.
pub fn set_layout_rect(&mut self, layout_rect: Rect) {
self.state.layout_rect = layout_rect;
}
/// Get the layout rectangle.
///
/// This will be same value as set by `set_layout_rect`.
pub fn get_layout_rect(&self) -> Rect {
self.state.layout_rect
}
/// Paint a child widget.
///
/// Generally called by container widgets as part of their [`paint`]
/// method.
///
/// Note that this method does not apply the offset of the layout rect.
/// If that is desired, use [`paint_with_offset`] instead.
///
/// [`layout`]: trait.Widget.html#method.layout
/// [`paint`]: trait.Widget.html#method.paint
/// [`paint_with_offset`]: #method.paint_with_offset
pub fn paint(&mut self, paint_ctx: &mut PaintCtx, data: &T, env: &Env) {
self.inner.paint(paint_ctx, &self.state, data, &env);
}
/// Paint the widget, translating it by the origin of its layout rectangle.
// Discussion: should this be `paint` and the other `paint_raw`?
pub fn paint_with_offset(&mut self, paint_ctx: &mut PaintCtx, data: &T, env: &Env) {
if let Err(e) = paint_ctx.save() {
error!("saving render context failed: {:?}", e);
return;
}
paint_ctx.transform(Affine::translate(self.state.layout_rect.origin().to_vec2()));
self.paint(paint_ctx, data, env);
if let Err(e) = paint_ctx.restore() {
error!("restoring render context failed: {:?}", e);
}
}
/// Compute layout of a widget.
///
/// Generally called by container widgets as part of their [`layout`]
/// method.
///
/// [`layout`]: trait.Widget.html#method.layout
pub fn layout(
&mut self,
layout_ctx: &mut LayoutCtx,
bc: &BoxConstraints,
data: &T,
env: &Env,
) -> Size {
self.inner.layout(layout_ctx, bc, data, &env)
}
/// Propagate an event.
///
/// Generally the [`event`] method of a container widget will call this
/// method on all its children. Here is where a great deal of the event
/// flow logic resides, particularly whether to continue propagating
/// the event.
///
/// [`event`]: trait.Widget.html#method.event
pub fn event(&mut self, event: &Event, ctx: &mut EventCtx, data: &mut T, env: &Env) {
// TODO: factor as much logic as possible into monomorphic functions.
if ctx.is_handled || !event.recurse() {
// This function is called by containers to propagate an event from
// containers to children. Non-recurse events will be invoked directly
// from other points in the library.
return;
}
let had_active = self.state.has_active;
let mut child_ctx = EventCtx {
win_ctx: ctx.win_ctx,
cursor: ctx.cursor,
command_queue: ctx.command_queue,
window: &ctx.window,
window_id: ctx.window_id,
base_state: &mut self.state,
had_active,
is_handled: false,
is_root: false,
};
let rect = child_ctx.base_state.layout_rect;
// Note: could also represent this as `Option<Event>`.
let mut recurse = true;
let mut hot_changed = None;
let child_event = match event {
Event::OpenFile(file) => {
recurse = ctx.is_root;
Event::OpenFile(file.clone())
}
Event::Size(size) => {
recurse = ctx.is_root;
Event::Size(*size)
}
Event::MouseDown(mouse_event) => {
recurse = had_active || !ctx.had_active && rect.winding(mouse_event.pos) != 0;
let mut mouse_event = mouse_event.clone();
mouse_event.pos -= rect.origin().to_vec2();
Event::MouseDown(mouse_event)
}
Event::MouseUp(mouse_event) => {
recurse = had_active || !ctx.had_active && rect.winding(mouse_event.pos) != 0;
let mut mouse_event = mouse_event.clone();
mouse_event.pos -= rect.origin().to_vec2();
Event::MouseUp(mouse_event)
}
Event::MouseMoved(mouse_event) => {
let had_hot = child_ctx.base_state.is_hot;
child_ctx.base_state.is_hot = rect.winding(mouse_event.pos) != 0;
if had_hot != child_ctx.base_state.is_hot {
hot_changed = Some(child_ctx.base_state.is_hot);
}
recurse = had_active || had_hot || child_ctx.base_state.is_hot;
let mut mouse_event = mouse_event.clone();
mouse_event.pos -= rect.origin().to_vec2();
Event::MouseMoved(mouse_event)
}
Event::KeyDown(e) => {
recurse = child_ctx.base_state.has_focus;
Event::KeyDown(*e)
}
Event::KeyUp(e) => {
recurse = child_ctx.base_state.has_focus;
Event::KeyUp(*e)
}
Event::Paste(e) => {
recurse = child_ctx.base_state.has_focus;
Event::Paste(e.clone())
}
Event::Wheel(wheel_event) => {
recurse = had_active || child_ctx.base_state.is_hot;
Event::Wheel(wheel_event.clone())
}
Event::HotChanged(is_hot) => Event::HotChanged(*is_hot),
Event::FocusChanged(_is_focused) => {
let had_focus = child_ctx.base_state.has_focus;
let focus = child_ctx.base_state.request_focus;
child_ctx.base_state.request_focus = false;
child_ctx.base_state.has_focus = focus;
recurse = focus || had_focus;
Event::FocusChanged(focus)
}
Event::AnimFrame(interval) => {
recurse = child_ctx.base_state.request_anim;
child_ctx.base_state.request_anim = false;
Event::AnimFrame(*interval)
}
Event::Timer(id) => {
recurse = child_ctx.base_state.request_timer;
Event::Timer(*id)
}
Event::Command(cmd) => Event::Command(cmd.clone()),
};
child_ctx.base_state.needs_inval = false;
if let Some(is_hot) = hot_changed {
let hot_changed_event = Event::HotChanged(is_hot);
self.inner
.event(&hot_changed_event, &mut child_ctx, data, &env);
}
if recurse {
child_ctx.base_state.has_active = false;
self.inner.event(&child_event, &mut child_ctx, data, &env);
child_ctx.base_state.has_active |= child_ctx.base_state.is_active;
};
ctx.base_state.needs_inval |= child_ctx.base_state.needs_inval;
ctx.base_state.request_anim |= child_ctx.base_state.request_anim;
ctx.base_state.request_timer |= child_ctx.base_state.request_timer;
ctx.base_state.is_hot |= child_ctx.base_state.is_hot;
ctx.base_state.has_active |= child_ctx.base_state.has_active;
ctx.base_state.request_focus |= child_ctx.base_state.request_focus;
ctx.is_handled |= child_ctx.is_handled;
}
/// Propagate a data update.
///
/// Generally called by container widgets as part of their [`update`]
/// method.
///
/// [`update`]: trait.Widget.html#method.update
pub fn update(&mut self, ctx: &mut UpdateCtx, data: &T, env: &Env) {
let data_same = if let Some(ref old_data) = self.old_data {
old_data.same(data)
} else {
false
};
let env_same = if let Some(ref old_env) = self.env {
old_env.same(env)
} else {
false
};
if data_same && env_same {
return;
}
self.inner.update(ctx, self.old_data.as_ref(), data, env);
self.old_data = Some(data.clone());
self.env = Some(env.clone());
}
}
impl<T: Data, W: Widget<T> + 'static> WidgetPod<T, W> {
/// Box the contained widget.
///
/// Convert a `WidgetPod` containing a widget of a specific concrete type
/// into a dynamically boxed widget.
pub fn boxed(self) -> BoxedWidget<T> {
WidgetPod {
state: self.state,
old_data: self.old_data,
env: self.env,
inner: Box::new(self.inner),
}
}
}
impl BaseState {
/// The "hot" (aka hover) status of a widget.
///
/// A widget is "hot" when the mouse is hovered over it. Widgets will
/// often change their appearance as a visual indication that they
/// will respond to mouse interaction.
///
/// The hot status is computed from the widget's layout rect. In a
/// container hierarchy, all widgets with layout rects containing the
/// mouse position have hot status.
///
/// Discussion: there is currently some confusion about whether a
/// widget can be considered hot when some other widget is active (for
/// example, when clicking to one widget and dragging to the next).
/// The documentation should clearly state the resolution.
pub fn is_hot(&self) -> bool {
self.is_hot
}
/// The active status of a widget.
///
/// Active status generally corresponds to a mouse button down. Widgets
/// with behavior similar to a button will call [`set_active`] on mouse
/// down and then up.
///
/// When a widget is active, it gets mouse events even when the mouse
/// is dragged away.
///
/// [`set_active`]: struct.EventCtx.html#method.set_active
pub fn is_active(&self) -> bool {
self.is_active
}
/// The focus status of a widget.
///
/// Focus means that the widget receives keyboard events.
///
/// A widget can request focus using the [`request_focus`] method.
/// This will generally result in a separate event propagation of
/// a `FocusChanged` method, including sending `false` to the previous
/// widget that held focus.
///
/// Only one leaf widget at a time has focus. However, in a container
/// hierarchy, all ancestors of that leaf widget are also invoked with
/// `FocusChanged(true)`.
///
/// Discussion question: is "is_focused" a better name?
///
/// [`request_focus`]: struct.EventCtx.html#method.request_focus
pub fn has_focus(&self) -> bool {
self.has_focus
}
/// The layout size.
///
/// This is the layout size as ultimately determined by the parent
/// container. Generally it will be the same as the size returned by
/// the child widget's [`layout`] method.
///
/// [`layout`]: trait.Widget.html#tymethod.layout
pub fn size(&self) -> Size {
self.layout_rect.size()
}
}
impl BoxConstraints {
/// Create a new box constraints object.
///
/// Create constraints based on minimum and maximum size.
pub fn new(min: Size, max: Size) -> BoxConstraints {
BoxConstraints { min, max }
}
/// Create a "tight" box constraints object.
///
/// A "tight" constraint can only be satisfied by a single size.
pub fn tight(size: Size) -> BoxConstraints {
BoxConstraints {
min: size,
max: size,
}
}
/// Create a "loose" version of the constraints.
///
/// Make a version with zero minimum size, but the same maximum size.
pub fn loosen(&self) -> BoxConstraints {
BoxConstraints {
min: Size::ZERO,
max: self.max,
}
}
/// Clamp a given size so that fits within the constraints.
pub fn constrain(&self, size: impl Into<Size>) -> Size {
size.into().clamp(self.min, self.max)
}
/// Returns the max size of these constraints.
pub fn max(&self) -> Size {
self.max
}
/// Returns the min size of these constraints.
pub fn min(&self) -> Size {
self.min
}
/// Whether there is an upper bound on the width.
pub fn is_width_bounded(&self) -> bool {
self.max.width.is_finite()
}
/// Whether there is an upper bound on the height.
pub fn is_height_bounded(&self) -> bool {
self.max.height.is_finite()
}
/// Check to see if these constraints are legit.
pub fn check(&self, name: &str) {
if !(0.0 <= self.min.width && self.min.width <= self.max.width)
|| !(0.0 <= self.min.height && self.min.height <= self.max.height)
{
warn!("Bad BoxConstraints passed to {}:", name);
warn!("{:?}", self);
}
}
}
impl<'a, 'b> EventCtx<'a, 'b> {
/// Invalidate.
///
/// Right now, it just invalidates the entire window, but we'll want
/// finer grained invalidation before long.
pub fn invalidate(&mut self) {
// Note: for the current functionality, we could shortcut and just
// request an invalidate on the window. But when we do fine-grained
// invalidation, we'll want to compute the invalidation region, and
// that needs to be propagated (with, likely, special handling for
// scrolling).
self.base_state.needs_inval = true;
}
/// Get an object which can create text layouts.
pub fn text(&mut self) -> &mut Text<'b> {
self.win_ctx.text_factory()
}
/// Set the cursor icon.
///
/// Call this when handling a mouse move event, to set the cursor for the
/// widget. A container widget can safely call this method, then recurse
/// to its children, as a sequence of calls within an event propagation
/// only has the effect of the last one (ie no need to worry about
/// flashing).
///
/// This method is expected to be called mostly from the [`MouseMoved`]
/// event handler, but can also be called in response to other events,
/// for example pressing a key to change the behavior of a widget.
///
/// [`MouseMoved`]: enum.Event.html#variant.MouseDown
pub fn set_cursor(&mut self, cursor: &Cursor) {
*self.cursor = Some(cursor.clone());
}
/// Set the "active" state of the widget.
///
/// See [`BaseState::is_active`](struct.BaseState.html#method.is_hot).
pub fn set_active(&mut self, active: bool) {
self.base_state.is_active = active;
// TODO: plumb mouse grab through to platform (through druid-shell)
}
/// Query the "hot" state of the widget.
///
/// See [`BaseState::is_hot`](struct.BaseState.html#method.is_hot).
pub fn is_hot(&self) -> bool {
self.base_state.is_hot
}
/// Query the "active" state of the widget.
///
/// This is the same state set by [`set_active`](#method.set_active) and
/// is provided as a convenience.
pub fn is_active(&self) -> bool {
self.base_state.is_active
}
/// Returns a reference to the current `WindowHandle`.
///
/// Note: we're in the process of migrating towards providing functionality
/// provided by the window handle in mutable contexts instead. If you're
/// considering a new use of this method, try adding it to `WinCtx` and
/// plumbing it through instead.
pub fn window(&self) -> &WindowHandle {
&self.window
}
/// Set the event as "handled", which stops its propagation to other
/// widgets.
pub fn set_handled(&mut self) {
self.is_handled = true;
}
/// Determine whether the event has been handled by some other widget.
pub fn is_handled(&self) -> bool {
self.is_handled
}
/// Query the focus state of the widget.
///
/// See [`BaseState::has_focus`](struct.BaseState.html#method.has_focus).
pub fn has_focus(&self) -> bool {
self.base_state.has_focus
}
/// Request keyboard focus.
///
/// Discussion question: is method needed in contexts other than event?
pub fn request_focus(&mut self) {
self.base_state.request_focus = true;
}
/// Request an animation frame.
pub fn request_anim_frame(&mut self) {
self.base_state.request_anim = true;
}
/// Request a timer event.
///
/// The return value is a token, which can be used to associate the
/// request with the event.
pub fn request_timer(&mut self, deadline: Instant) -> TimerToken {
self.base_state.request_timer = true;
self.win_ctx.request_timer(deadline)
}
/// Returns the layout size of the current widget.
pub fn size(&self) -> Size {
self.base_state.size()
}
/// Submit a [`Command`] to be run after this event is handled.
///
/// Commands are run in the order they are submitted; all commands
/// submitted during the handling of an event are executed before
/// the [`update()`] method is called.
///
/// [`Command`]: struct.Command.html
/// [`update()`]: trait.Widget.html#tymethod.update
pub fn submit_command(&mut self, command: Command, window_id: impl Into<Option<WindowId>>) {
let window_id = window_id.into().unwrap_or(self.window_id);
self.command_queue.push_back((window_id, command))
}
/// Get the window id.
pub fn window_id(&self) -> WindowId {
self.window_id
}
}
impl<'a, 'b> LayoutCtx<'a, 'b> {
/// Get an object which can create text layouts.
pub fn text(&mut self) -> &mut Text<'b> {
&mut self.text_factory
}
/// Get the window id.
pub fn window_id(&self) -> WindowId {
self.window_id
}
}
impl<'a, 'b> UpdateCtx<'a, 'b> {
/// Invalidate.
///
/// See [`EventCtx::invalidate`](struct.EventCtx.html#method.invalidate) for
/// more discussion.
pub fn invalidate(&mut self) {
self.needs_inval = true;
}
/// Get an object which can create text layouts.
pub fn text(&mut self) -> &mut Text<'b> {
self.text_factory
}
/// Returns a reference to the current `WindowHandle`.
///
/// Note: For the most part we're trying to migrate `WindowHandle`
/// functionality to `WinCtx`, but the update flow is the exception, as
/// it's shared across multiple windows.
pub fn window(&self) -> &WindowHandle {
&self.window
}
/// Get the window id.
pub fn window_id(&self) -> WindowId {
self.window_id
}
}
| 35.987751 | 97 | 0.630659 |
fb69f961f907371329a5855e4fb05728bfcb2b7c | 14,636 | use crate::mechanics::Queue;
use crate::{
Event, ParkingSimState, ParkingSpot, PersonID, SidewalkSpot, TripID, TripPhaseType, Vehicle,
};
use geom::Distance;
use map_model::{
BuildingID, IntersectionID, LaneID, Map, Path, PathConstraints, PathRequest, PathStep,
Position, Traversable, TurnID, TurnType,
};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
pub struct Router {
// Front is always the current step
path: Path,
goal: Goal,
}
#[derive(Debug)]
pub enum ActionAtEnd {
VanishAtBorder(IntersectionID),
StartParking(ParkingSpot),
GotoLaneEnd,
StopBiking(SidewalkSpot),
BusAtStop,
GiveUpOnParking,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]
enum Goal {
// Spot and cached distance along the last driving lane
// TODO Right now, the building is ignored when choosing the best spot.
ParkNearBuilding {
target: BuildingID,
spot: Option<(ParkingSpot, Distance)>,
// No parking available at all!
stuck_end_dist: Option<Distance>,
started_looking: bool,
},
EndAtBorder {
end_dist: Distance,
i: IntersectionID,
},
BikeThenStop {
end_dist: Distance,
},
FollowBusRoute {
end_dist: Distance,
},
}
impl Router {
pub fn end_at_border(path: Path, end_dist: Distance, i: IntersectionID) -> Router {
Router {
path,
goal: Goal::EndAtBorder { end_dist, i },
}
}
pub fn vanish_bus(l: LaneID, map: &Map) -> Router {
let lane = map.get_l(l);
Router {
path: Path::one_step(l, map),
goal: Goal::EndAtBorder {
end_dist: lane.length(),
i: lane.dst_i,
},
}
}
pub fn park_near(path: Path, bldg: BuildingID) -> Router {
Router {
path,
goal: Goal::ParkNearBuilding {
target: bldg,
spot: None,
stuck_end_dist: None,
started_looking: false,
},
}
}
pub fn bike_then_stop(path: Path, end_dist: Distance, map: &Map) -> Option<Router> {
let last_lane = path.get_steps().iter().last().unwrap().as_lane();
if map
.get_parent(last_lane)
.bike_to_sidewalk(last_lane)
.is_some()
{
Some(Router {
path,
goal: Goal::BikeThenStop { end_dist },
})
} else {
println!("{} is the end of a bike route, with no sidewalk", last_lane);
None
}
}
pub fn follow_bus_route(path: Path, end_dist: Distance) -> Router {
Router {
path,
goal: Goal::FollowBusRoute { end_dist },
}
}
pub fn head(&self) -> Traversable {
self.path.current_step().as_traversable()
}
pub fn next(&self) -> Traversable {
self.path.next_step().as_traversable()
}
pub fn maybe_next(&self) -> Option<Traversable> {
if self.last_step() {
None
} else {
Some(self.path.next_step().as_traversable())
}
}
pub fn last_step(&self) -> bool {
self.path.is_last_step()
}
pub fn get_end_dist(&self) -> Distance {
// Shouldn't ask earlier!
assert!(self.last_step());
match self.goal {
Goal::EndAtBorder { end_dist, .. } => end_dist,
Goal::ParkNearBuilding {
spot,
stuck_end_dist,
..
} => stuck_end_dist.unwrap_or_else(|| spot.unwrap().1),
Goal::BikeThenStop { end_dist } => end_dist,
Goal::FollowBusRoute { end_dist } => end_dist,
}
}
pub fn get_path(&self) -> &Path {
&self.path
}
// Returns the step just finished
pub fn advance(
&mut self,
vehicle: &Vehicle,
parking: &ParkingSimState,
map: &Map,
trip_and_person: Option<(TripID, PersonID)>,
events: &mut Vec<Event>,
) -> Traversable {
let prev = self.path.shift(map).as_traversable();
if self.last_step() {
// Do this to trigger the side-effect of looking for parking.
self.maybe_handle_end(
Distance::ZERO,
vehicle,
parking,
map,
trip_and_person,
events,
);
}
// Sanity check laws haven't been broken
if let Traversable::Lane(l) = self.head() {
let lane = map.get_l(l);
if !vehicle.vehicle_type.to_constraints().can_use(lane, map) {
panic!(
"{} just wound up on {}, a {:?} (check the OSM tags)",
vehicle.id, l, lane.lane_type
);
}
}
prev
}
// Called when the car is Queued at the last step, or when they initially advance to the last
// step.
pub fn maybe_handle_end(
&mut self,
front: Distance,
vehicle: &Vehicle,
parking: &ParkingSimState,
map: &Map,
// TODO Not so nice to plumb all of this here
trip_and_person: Option<(TripID, PersonID)>,
events: &mut Vec<Event>,
) -> Option<ActionAtEnd> {
match self.goal {
Goal::EndAtBorder { end_dist, i } => {
if end_dist == front {
Some(ActionAtEnd::VanishAtBorder(i))
} else {
None
}
}
Goal::ParkNearBuilding {
ref mut spot,
ref mut stuck_end_dist,
target,
ref mut started_looking,
} => {
if let Some(d) = stuck_end_dist {
if *d == front {
return Some(ActionAtEnd::GiveUpOnParking);
} else {
return None;
}
}
let need_new_spot = match spot {
Some((s, _)) => !parking.is_free(*s),
None => true,
};
if need_new_spot {
*started_looking = true;
let current_lane = self.path.current_step().as_lane();
let candidates = parking.get_all_free_spots(
Position::new(current_lane, front),
vehicle,
target,
map,
);
let best = if let Some(ref p) = map.get_b(target).parking {
if p.driving_pos.lane() == current_lane {
let target_dist = p.driving_pos.dist_along();
// Closest to the building
candidates
.into_iter()
.min_by_key(|(_, pos)| (pos.dist_along() - target_dist).abs())
} else {
// Closest to the road endpoint, I guess
candidates
.into_iter()
.min_by_key(|(_, pos)| pos.dist_along())
}
} else {
// Closest to the road endpoint, I guess
candidates
.into_iter()
.min_by_key(|(_, pos)| pos.dist_along())
};
if let Some((new_spot, new_pos)) = best {
if let Some((t, p)) = trip_and_person {
events.push(Event::TripPhaseStarting(
t,
p,
Some(PathRequest {
start: Position::new(current_lane, front),
end: new_pos,
constraints: PathConstraints::Car,
}),
TripPhaseType::Parking,
));
}
*spot = Some((new_spot, new_pos.dist_along()));
} else {
if let Some((new_path_steps, new_spot, new_pos)) =
parking.path_to_free_parking_spot(current_lane, vehicle, target, map)
{
*spot = Some((new_spot, new_pos.dist_along()));
for step in new_path_steps {
self.path.add(step, map);
}
events.push(Event::PathAmended(self.path.clone()));
// TODO This path might not be the same as the one found here...
if let Some((t, p)) = trip_and_person {
events.push(Event::TripPhaseStarting(
t,
p,
Some(PathRequest {
start: Position::new(current_lane, front),
end: new_pos,
constraints: PathConstraints::Car,
}),
TripPhaseType::Parking,
));
}
} else {
println!(
"WARNING: {} can't find parking on {} or anywhere reachable from \
it. Possibly we're just totally out of parking space!",
vehicle.id, current_lane
);
*stuck_end_dist = Some(map.get_l(current_lane).length());
}
return Some(ActionAtEnd::GotoLaneEnd);
}
}
if spot.unwrap().1 == front {
Some(ActionAtEnd::StartParking(spot.unwrap().0))
} else {
None
}
}
Goal::BikeThenStop { end_dist } => {
if end_dist == front {
// Checked up-front that this exists
let last_lane = self.head().as_lane();
let sidewalk = map
.get_parent(last_lane)
.bike_to_sidewalk(last_lane)
.unwrap();
Some(ActionAtEnd::StopBiking(
SidewalkSpot::bike_rack(sidewalk, map).unwrap(),
))
} else {
None
}
}
Goal::FollowBusRoute { end_dist } => {
if end_dist == front {
Some(ActionAtEnd::BusAtStop)
} else {
None
}
}
}
}
pub fn opportunistically_lanechange(
&mut self,
queues: &BTreeMap<Traversable, Queue>,
map: &Map,
) {
if self.path.approaching_uber_turn() || self.path.currently_inside_ut().is_some() {
return;
}
let (current_turn, next_lane) = {
let steps = self.path.get_steps();
if steps.len() < 5 {
return;
}
match (steps[1], steps[4]) {
(PathStep::Turn(t), PathStep::Lane(l)) => (t, l),
_ => {
return;
}
}
};
let orig_target_lane = current_turn.dst;
let parent = map.get_parent(orig_target_lane);
let next_parent = map.get_l(next_lane).src_i;
// Look for other candidate lanes. Must be the same lane type -- if there was a bus/bike
// lane originally and pathfinding already decided to use it, stick with that decision.
let orig_lt = map.get_l(orig_target_lane).lane_type;
let siblings = parent.children(parent.is_forwards(orig_target_lane));
let (_, turn1, best_lane, turn2) = siblings
.iter()
.filter_map(|(l, lt)| {
let turn1 = TurnID {
parent: current_turn.parent,
src: current_turn.src,
dst: *l,
};
if orig_lt == *lt && map.maybe_get_t(turn1).is_some() {
// All other things being equal, prefer to not change lanes at all.
let penalize_unnecessary_lc =
if map.get_t(turn1).turn_type == TurnType::Straight {
0
} else {
1
};
// Now make sure we can go from this lane to next_lane.
let turn2 = TurnID {
parent: next_parent,
src: *l,
dst: next_lane,
};
if map.maybe_get_t(turn2).is_some() {
let cost =
penalize_unnecessary_lc + queues[&Traversable::Lane(*l)].cars.len();
Some((cost, turn1, *l, turn2))
} else {
None
}
} else {
None
}
})
.min_by_key(|(len, _, _, _)| *len)
.unwrap();
// TODO Only switch if the target queue is some amount better; don't oscillate
// unnecessarily.
// TODO Better weight function... any slower vehicles in one?
if best_lane == orig_target_lane {
return;
}
self.path.modify_step(1, PathStep::Turn(turn1), map);
self.path.modify_step(2, PathStep::Lane(best_lane), map);
self.path.modify_step(3, PathStep::Turn(turn2), map);
}
pub fn replace_path_for_serialization(&mut self, path: Path) -> Path {
std::mem::replace(&mut self.path, path)
}
pub fn is_parking(&self) -> bool {
match self.goal {
Goal::ParkNearBuilding {
started_looking, ..
} => started_looking,
_ => false,
}
}
}
| 35.098321 | 98 | 0.445545 |
fed6cc9c8f733ad34fb78df8828f2d51479987b7 | 2,009 | use anyhow::{Context, Result};
use jsonwebtoken::errors::Error as JwtError;
use jsonwebtoken::{encode, EncodingKey, Header};
use serde::{Deserialize, Serialize};
use std::process::Command;
use web3::types::Address;
pub fn run_external_command(command: &str, args: &[&str]) -> Result<String> {
let result = Command::new(command)
.args(args)
.output()
.context(format!("failed to execute command: {}", command))?;
let stdout = String::from_utf8(result.stdout).context("stdout is not valid utf8")?;
let stderr = String::from_utf8(result.stderr).context("stderr is not valid utf8")?;
if !result.status.success() {
return Err(anyhow::anyhow!(
"failed to run exetrnal command {}:\nstdout: {}\nstderr: {}",
command,
stdout,
stderr
));
}
Ok(stdout)
}
pub fn str_to_address(value: &str) -> Result<Address> {
let str_addr = value["0x".len()..].parse().context("Error parse address")?;
Ok(str_addr)
}
pub fn get_matches_from_lines(stream: &str, pattern: &str) -> Result<String> {
let lines = stream.split_whitespace().collect::<Vec<_>>();
for std_out_line in lines {
if std_out_line.starts_with(pattern) {
return Ok(std_out_line.to_string());
}
}
Err(anyhow::anyhow!(
"error of finding the pattern '{}' in stream",
pattern
))
}
#[derive(Debug, Serialize, Deserialize)]
struct PayloadAuthToken {
/// Subject (whom auth token refers to).
sub: String,
/// Expiration time (as UTC timestamp).
exp: usize,
}
/// Encode JsonWebToken with shared secret - secret,
/// sub - message and exp - time until token will be valid
pub fn encode_auth_token(secret: &str, sub: &str, exp: usize) -> Result<String, JwtError> {
let payload = PayloadAuthToken {
sub: sub.to_string(),
exp,
};
encode(
&Header::default(),
&payload,
&EncodingKey::from_secret(secret.as_ref()),
)
}
| 29.544118 | 91 | 0.620209 |
8a6941a451621930fb52c787037b74fec521dadb | 53,343 | //! An "interner" is a data structure that associates values with usize tags and
//! allows bidirectional lookup; i.e., given a value, one can easily find the
//! type, and vice versa.
use rustc_arena::DroplessArena;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
use rustc_data_structures::sync::Lock;
use rustc_macros::HashStable_Generic;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use std::cmp::{Ord, PartialEq, PartialOrd};
use std::fmt;
use std::hash::{Hash, Hasher};
use std::str;
use crate::{with_session_globals, Edition, Span, DUMMY_SP};
#[cfg(test)]
mod tests;
// The proc macro code for this is in `compiler/rustc_macros/src/symbols.rs`.
symbols! {
// After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`,
// this should be rarely necessary though if the keywords are kept in alphabetic order.
Keywords {
// Special reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
Empty: "",
PathRoot: "{{root}}",
DollarCrate: "$crate",
Underscore: "_",
// Keywords that are used in stable Rust.
As: "as",
Break: "break",
Const: "const",
Continue: "continue",
Crate: "crate",
Else: "else",
Enum: "enum",
Extern: "extern",
False: "false",
Fn: "fn",
For: "for",
If: "if",
Impl: "impl",
In: "in",
Let: "let",
Loop: "loop",
Match: "match",
Mod: "mod",
Move: "move",
Mut: "mut",
Pub: "pub",
Ref: "ref",
Return: "return",
SelfLower: "self",
SelfUpper: "Self",
Static: "static",
Struct: "struct",
Super: "super",
Trait: "trait",
True: "true",
Type: "type",
Unsafe: "unsafe",
Use: "use",
Where: "where",
While: "while",
// Keywords that are used in unstable Rust or reserved for future use.
Abstract: "abstract",
Become: "become",
Box: "box",
Do: "do",
Final: "final",
Macro: "macro",
Override: "override",
Priv: "priv",
Typeof: "typeof",
Unsized: "unsized",
Virtual: "virtual",
Yield: "yield",
// Edition-specific keywords that are used in stable Rust.
Async: "async", // >= 2018 Edition only
Await: "await", // >= 2018 Edition only
Dyn: "dyn", // >= 2018 Edition only
// Edition-specific keywords that are used in unstable Rust or reserved for future use.
Try: "try", // >= 2018 Edition only
// Special lifetime names
UnderscoreLifetime: "'_",
StaticLifetime: "'static",
// Weak keywords, have special meaning only in specific contexts.
Auto: "auto",
Catch: "catch",
Default: "default",
MacroRules: "macro_rules",
Raw: "raw",
Union: "union",
Yeet: "yeet",
}
// Pre-interned symbols that can be referred to with `rustc_span::sym::*`.
//
// The symbol is the stringified identifier unless otherwise specified, in
// which case the name should mention the non-identifier punctuation.
// E.g. `sym::proc_dash_macro` represents "proc-macro", and it shouldn't be
// called `sym::proc_macro` because then it's easy to mistakenly think it
// represents "proc_macro".
//
// As well as the symbols listed, there are symbols for the strings
// "0", "1", ..., "9", which are accessible via `sym::integer`.
//
// The proc macro will abort if symbols are not in alphabetical order (as
// defined by `impl Ord for str`) or if any symbols are duplicated. Vim
// users can sort the list by selecting it and executing the command
// `:'<,'>!LC_ALL=C sort`.
//
// There is currently no checking that all symbols are used; that would be
// nice to have.
Symbols {
AcqRel,
Acquire,
AddSubdiagnostic,
Alignment,
Any,
Arc,
Argument,
ArgumentV1,
ArgumentV1Methods,
Arguments,
AsMut,
AsRef,
AtomicBool,
AtomicI128,
AtomicI16,
AtomicI32,
AtomicI64,
AtomicI8,
AtomicIsize,
AtomicPtr,
AtomicU128,
AtomicU16,
AtomicU32,
AtomicU64,
AtomicU8,
AtomicUsize,
BTreeEntry,
BTreeMap,
BTreeSet,
BinaryHeap,
Borrow,
Break,
C,
CStr,
CString,
Capture,
Center,
Clone,
Continue,
Copy,
Count,
Cow,
Debug,
DebugStruct,
DebugTuple,
Decodable,
Decoder,
Default,
Deref,
DiagnosticMessage,
DirBuilder,
Display,
DoubleEndedIterator,
Duration,
Encodable,
Encoder,
Eq,
Equal,
Err,
Error,
File,
FileType,
Fn,
FnMut,
FnOnce,
FormatSpec,
Formatter,
From,
FromIterator,
FromResidual,
Future,
FxHashMap,
FxHashSet,
GlobalAlloc,
Hash,
HashMap,
HashMapEntry,
HashSet,
Hasher,
Implied,
Input,
Into,
IntoFuture,
IntoIterator,
IoRead,
IoWrite,
IrTyKind,
Is,
ItemContext,
Iterator,
Layout,
Left,
LinkedList,
LintPass,
Mutex,
N,
None,
Ok,
Option,
Ord,
Ordering,
OsStr,
OsString,
Output,
Param,
PartialEq,
PartialOrd,
Path,
PathBuf,
Pending,
Pin,
Pointer,
Poll,
ProcMacro,
ProcMacroHack,
ProceduralMasqueradeDummyType,
Range,
RangeFrom,
RangeFull,
RangeInclusive,
RangeTo,
RangeToInclusive,
Rc,
Ready,
Receiver,
Relaxed,
Release,
Result,
Return,
Right,
RustcDecodable,
RustcEncodable,
Send,
SeqCst,
SessionDiagnostic,
SliceIndex,
Some,
String,
StructuralEq,
StructuralPartialEq,
SubdiagnosticMessage,
Sync,
Target,
ToOwned,
ToString,
Try,
TryCaptureGeneric,
TryCapturePrintable,
TryFrom,
TryInto,
Ty,
TyCtxt,
TyKind,
Unknown,
UnsafeArg,
Vec,
VecDeque,
Wrapper,
Yield,
_DECLS,
_Self,
__D,
__H,
__S,
__awaitee,
__try_var,
_d,
_e,
_task_context,
a32,
aarch64_target_feature,
aarch64_ver_target_feature,
abi,
abi_amdgpu_kernel,
abi_avr_interrupt,
abi_c_cmse_nonsecure_call,
abi_efiapi,
abi_msp430_interrupt,
abi_ptx,
abi_sysv64,
abi_thiscall,
abi_unadjusted,
abi_vectorcall,
abi_x86_interrupt,
abort,
aborts,
add,
add_assign,
add_with_overflow,
address,
adt_const_params,
advanced_slice_patterns,
adx_target_feature,
alias,
align,
align_offset,
alignstack,
all,
alloc,
alloc_error_handler,
alloc_layout,
alloc_zeroed,
allocator,
allocator_api,
allocator_internals,
allow,
allow_fail,
allow_internal_unsafe,
allow_internal_unstable,
allowed,
alu32,
always,
and,
and_then,
any,
append_const_msg,
arbitrary_enum_discriminant,
arbitrary_self_types,
args,
arith_offset,
arm,
arm_target_feature,
array,
arrays,
as_ptr,
as_ref,
as_str,
asm,
asm_const,
asm_experimental_arch,
asm_sym,
asm_unwind,
assert,
assert_eq_macro,
assert_inhabited,
assert_macro,
assert_ne_macro,
assert_receiver_is_total_eq,
assert_uninit_valid,
assert_zero_valid,
asserting,
associated_const_equality,
associated_consts,
associated_type_bounds,
associated_type_defaults,
associated_types,
assume,
assume_init,
async_await,
async_closure,
atomic,
atomic_mod,
atomics,
att_syntax,
attr,
attr_literals,
attributes,
augmented_assignments,
auto_traits,
automatically_derived,
avx,
avx512_target_feature,
avx512bw,
avx512f,
await_macro,
bang,
begin_panic,
bench,
bin,
bind_by_move_pattern_guards,
bindings_after_at,
bitand,
bitand_assign,
bitor,
bitor_assign,
bitreverse,
bitxor,
bitxor_assign,
black_box,
block,
bool,
borrowck_graphviz_format,
borrowck_graphviz_postflow,
borrowck_graphviz_preflow,
box_free,
box_patterns,
box_syntax,
bpf_target_feature,
braced_empty_structs,
branch,
breakpoint,
bridge,
bswap,
c_str,
c_unwind,
c_variadic,
call,
call_mut,
call_once,
caller_location,
capture_disjoint_fields,
cdylib,
ceilf32,
ceilf64,
cfg,
cfg_accessible,
cfg_attr,
cfg_attr_multi,
cfg_doctest,
cfg_eval,
cfg_hide,
cfg_macro,
cfg_panic,
cfg_sanitize,
cfg_target_abi,
cfg_target_compact,
cfg_target_feature,
cfg_target_has_atomic,
cfg_target_has_atomic_equal_alignment,
cfg_target_has_atomic_load_store,
cfg_target_thread_local,
cfg_target_vendor,
cfg_version,
cfi,
char,
client,
clippy,
clobber_abi,
clone,
clone_closures,
clone_from,
closure,
closure_to_fn_coercion,
closure_track_caller,
cmp,
cmp_max,
cmp_min,
cmpxchg16b_target_feature,
cmse_nonsecure_entry,
coerce_unsized,
cold,
column,
column_macro,
compare_and_swap,
compare_exchange,
compare_exchange_weak,
compile_error,
compile_error_macro,
compiler,
compiler_builtins,
compiler_fence,
concat,
concat_bytes,
concat_idents,
concat_macro,
conservative_impl_trait,
console,
const_allocate,
const_async_blocks,
const_compare_raw_pointers,
const_constructor,
const_deallocate,
const_eval_limit,
const_eval_select,
const_eval_select_ct,
const_evaluatable_checked,
const_extern_fn,
const_fn,
const_fn_floating_point_arithmetic,
const_fn_fn_ptr_basics,
const_fn_trait_bound,
const_fn_transmute,
const_fn_union,
const_fn_unsize,
const_for,
const_format_args,
const_generic_defaults,
const_generics,
const_generics_defaults,
const_if_match,
const_impl_trait,
const_in_array_repeat_expressions,
const_indexing,
const_let,
const_loop,
const_mut_refs,
const_panic,
const_panic_fmt,
const_precise_live_drops,
const_raw_ptr_deref,
const_raw_ptr_to_usize_cast,
const_refs_to_cell,
const_trait,
const_trait_bound_opt_out,
const_trait_impl,
const_transmute,
const_try,
constant,
constructor,
contents,
context,
convert,
copy,
copy_closures,
copy_nonoverlapping,
copysignf32,
copysignf64,
core,
core_intrinsics,
core_panic,
core_panic_2015_macro,
core_panic_macro,
cosf32,
cosf64,
count,
cr,
crate_id,
crate_in_paths,
crate_local,
crate_name,
crate_type,
crate_visibility_modifier,
crt_dash_static: "crt-static",
cstring_type,
ctlz,
ctlz_nonzero,
ctpop,
cttz,
cttz_nonzero,
custom_attribute,
custom_derive,
custom_inner_attributes,
custom_test_frameworks,
d,
d32,
dbg_macro,
dead_code,
dealloc,
debug,
debug_assert_eq_macro,
debug_assert_macro,
debug_assert_ne_macro,
debug_assertions,
debug_struct,
debug_trait_builder,
debug_tuple,
debugger_visualizer,
decl_macro,
declare_lint_pass,
decode,
default_alloc_error_handler,
default_lib_allocator,
default_method_body_is_const,
default_type_parameter_fallback,
default_type_params,
delay_span_bug_from_inside_query,
deny,
deprecated,
deprecated_safe,
deprecated_suggestion,
deref,
deref_method,
deref_mut,
deref_target,
derive,
derive_default_enum,
destruct,
destructuring_assignment,
diagnostic,
direct,
discriminant_kind,
discriminant_type,
discriminant_value,
dispatch_from_dyn,
display_trait,
div,
div_assign,
doc,
doc_alias,
doc_auto_cfg,
doc_cfg,
doc_cfg_hide,
doc_keyword,
doc_masked,
doc_notable_trait,
doc_primitive,
doc_spotlight,
doctest,
document_private_items,
dotdot: "..",
dotdot_in_tuple_patterns,
dotdoteq_in_patterns,
dreg,
dreg_low16,
dreg_low8,
drop,
drop_in_place,
drop_types_in_const,
dropck_eyepatch,
dropck_parametricity,
dylib,
dyn_metadata,
dyn_trait,
e,
edition_macro_pats,
edition_panic,
eh_catch_typeinfo,
eh_personality,
emit_enum,
emit_enum_variant,
emit_enum_variant_arg,
emit_struct,
emit_struct_field,
enable,
enclosing_scope,
encode,
end,
env,
env_macro,
eprint_macro,
eprintln_macro,
eq,
ermsb_target_feature,
exact_div,
except,
exchange_malloc,
exclusive_range_pattern,
exhaustive_integer_patterns,
exhaustive_patterns,
existential_type,
exp2f32,
exp2f64,
expect,
expected,
expf32,
expf64,
explicit_generic_args_with_impl_trait,
export_name,
expr,
extended_key_value_attributes,
extern_absolute_paths,
extern_crate_item_prelude,
extern_crate_self,
extern_in_paths,
extern_prelude,
extern_types,
external_doc,
f,
f16c_target_feature,
f32,
f64,
fabsf32,
fabsf64,
fadd_fast,
fdiv_fast,
feature,
fence,
ferris: "🦀",
fetch_update,
ffi,
ffi_const,
ffi_pure,
ffi_returns_twice,
field,
field_init_shorthand,
file,
file_macro,
fill,
finish,
flags,
float,
float_to_int_unchecked,
floorf32,
floorf64,
fmaf32,
fmaf64,
fmt,
fmt_as_str,
fmt_internals,
fmul_fast,
fn_align,
fn_must_use,
fn_mut,
fn_once,
fn_once_output,
forbid,
forget,
format,
format_args,
format_args_capture,
format_args_macro,
format_args_nl,
format_macro,
fp,
freeze,
freg,
frem_fast,
from,
from_desugaring,
from_generator,
from_iter,
from_method,
from_output,
from_residual,
from_size_align_unchecked,
from_usize,
from_yeet,
fsub_fast,
fundamental,
future,
future_trait,
gdb_script_file,
ge,
gen_future,
gen_kill,
generator,
generator_return,
generator_state,
generators,
generic_arg_infer,
generic_assert,
generic_associated_types,
generic_associated_types_extended,
generic_const_exprs,
generic_param_attrs,
get_context,
global_allocator,
global_asm,
globs,
gt,
half_open_range_patterns,
hash,
hexagon_target_feature,
hidden,
homogeneous_aggregate,
html_favicon_url,
html_logo_url,
html_no_source,
html_playground_url,
html_root_url,
hwaddress,
i,
i128,
i128_type,
i16,
i32,
i64,
i8,
ident,
if_let,
if_let_guard,
if_while_or_patterns,
ignore,
impl_header_lifetime_elision,
impl_lint_pass,
impl_macros,
impl_trait_in_bindings,
import_shadowing,
imported_main,
in_band_lifetimes,
include,
include_bytes,
include_bytes_macro,
include_macro,
include_str,
include_str_macro,
inclusive_range_syntax,
index,
index_mut,
infer_outlives_requirements,
infer_static_outlives_requirements,
inherent_associated_types,
inlateout,
inline,
inline_const,
inline_const_pat,
inout,
instruction_set,
integer_: "integer",
integral,
intel,
into_future,
into_iter,
intra_doc_pointers,
intrinsics,
irrefutable_let_patterns,
isa_attribute,
isize,
issue,
issue_5723_bootstrap,
issue_tracker_base_url,
item,
item_like_imports,
iter,
iter_repeat,
keyword,
kind,
kreg,
kreg0,
label,
label_break_value,
lang,
lang_items,
large_assignments,
lateout,
lazy_normalization_consts,
le,
len,
let_chains,
let_else,
lhs,
lib,
libc,
lifetime,
likely,
line,
line_macro,
link,
link_args,
link_cfg,
link_llvm_intrinsics,
link_name,
link_ordinal,
link_section,
linkage,
linker,
lint_reasons,
literal,
load,
loaded_from_disk,
local,
local_inner_macros,
log10f32,
log10f64,
log2f32,
log2f64,
log_syntax,
logf32,
logf64,
loop_break_value,
lt,
macro_at_most_once_rep,
macro_attributes_in_derive_output,
macro_escape,
macro_export,
macro_lifetime_matcher,
macro_literal_matcher,
macro_metavar_expr,
macro_reexport,
macro_use,
macro_vis_matcher,
macros_in_extern,
main,
managed_boxes,
manually_drop,
map,
marker,
marker_trait_attr,
masked,
match_beginning_vert,
match_default_bindings,
matches_macro,
maxnumf32,
maxnumf64,
may_dangle,
may_unwind,
maybe_uninit,
maybe_uninit_uninit,
maybe_uninit_zeroed,
mem_discriminant,
mem_drop,
mem_forget,
mem_replace,
mem_size_of,
mem_size_of_val,
mem_uninitialized,
mem_variant_count,
mem_zeroed,
member_constraints,
memory,
memtag,
message,
meta,
metadata_type,
min_align_of,
min_align_of_val,
min_const_fn,
min_const_generics,
min_const_unsafe_fn,
min_specialization,
min_type_alias_impl_trait,
minnumf32,
minnumf64,
mips_target_feature,
miri,
misc,
mmx_reg,
modifiers,
module,
module_path,
module_path_macro,
more_qualified_paths,
more_struct_aliases,
movbe_target_feature,
move_ref_pattern,
move_size_limit,
mul,
mul_assign,
mul_with_overflow,
must_not_suspend,
must_use,
naked,
naked_functions,
name,
names,
native_link_modifiers,
native_link_modifiers_as_needed,
native_link_modifiers_bundle,
native_link_modifiers_verbatim,
native_link_modifiers_whole_archive,
natvis_file,
ne,
nearbyintf32,
nearbyintf64,
needs_allocator,
needs_drop,
needs_panic_runtime,
neg,
negate_unsigned,
negative_impls,
neon,
never,
never_type,
never_type_fallback,
new,
new_unchecked,
next,
nll,
no,
no_builtins,
no_core,
no_coverage,
no_crate_inject,
no_debug,
no_default_passes,
no_implicit_prelude,
no_inline,
no_link,
no_main,
no_mangle,
no_niche,
no_sanitize,
no_stack_check,
no_start,
no_std,
nomem,
non_ascii_idents,
non_exhaustive,
non_exhaustive_omitted_patterns_lint,
non_modrs_mods,
none_error,
nontemporal_store,
noop_method_borrow,
noop_method_clone,
noop_method_deref,
noreturn,
nostack,
not,
notable_trait,
note,
object_safe_for_dispatch,
of,
offset,
omit_gdb_pretty_printer_section,
on,
on_unimplemented,
oom,
opaque,
ops,
opt_out_copy,
optimize,
optimize_attribute,
optin_builtin_traits,
option,
option_env,
option_env_macro,
options,
or,
or_patterns,
other,
out,
overlapping_marker_traits,
owned_box,
packed,
panic,
panic_2015,
panic_2021,
panic_abort,
panic_bounds_check,
panic_display,
panic_fmt,
panic_handler,
panic_impl,
panic_implementation,
panic_info,
panic_location,
panic_no_unwind,
panic_runtime,
panic_str,
panic_unwind,
panicking,
param_attrs,
partial_cmp,
partial_ord,
passes,
pat,
pat_param,
path,
pattern_parentheses,
phantom_data,
pin,
platform_intrinsics,
plugin,
plugin_registrar,
plugins,
pointee_trait,
pointer,
pointer_trait_fmt,
poll,
position,
post_dash_lto: "post-lto",
powerpc_target_feature,
powf32,
powf64,
powif32,
powif64,
pre_dash_lto: "pre-lto",
precise_pointer_size_matching,
precision,
pref_align_of,
prefetch_read_data,
prefetch_read_instruction,
prefetch_write_data,
prefetch_write_instruction,
preg,
prelude,
prelude_import,
preserves_flags,
primitive,
print_macro,
println_macro,
proc_dash_macro: "proc-macro",
proc_macro,
proc_macro_attribute,
proc_macro_def_site,
proc_macro_derive,
proc_macro_expr,
proc_macro_gen,
proc_macro_hygiene,
proc_macro_internals,
proc_macro_mod,
proc_macro_non_items,
proc_macro_path_invoc,
profiler_builtins,
profiler_runtime,
ptr,
ptr_guaranteed_eq,
ptr_guaranteed_ne,
ptr_null,
ptr_null_mut,
ptr_offset_from,
ptr_offset_from_unsigned,
pub_macro_rules,
pub_restricted,
pure,
pushpop_unsafe,
qreg,
qreg_low4,
qreg_low8,
quad_precision_float,
question_mark,
quote,
range_inclusive_new,
raw_dylib,
raw_eq,
raw_identifiers,
raw_ref_op,
re_rebalance_coherence,
read_enum,
read_enum_variant,
read_enum_variant_arg,
read_struct,
read_struct_field,
readonly,
realloc,
reason,
receiver,
recursion_limit,
reexport_test_harness_main,
ref_unwind_safe_trait,
reference,
reflect,
reg,
reg16,
reg32,
reg64,
reg_abcd,
reg_byte,
reg_iw,
reg_nonzero,
reg_pair,
reg_ptr,
reg_upper,
register_attr,
register_tool,
relaxed_adts,
relaxed_struct_unsize,
rem,
rem_assign,
repr,
repr128,
repr_align,
repr_align_enum,
repr_no_niche,
repr_packed,
repr_simd,
repr_transparent,
residual,
result,
rhs,
rintf32,
rintf64,
riscv_target_feature,
rlib,
rotate_left,
rotate_right,
roundf32,
roundf64,
rt,
rtm_target_feature,
rust,
rust_2015,
rust_2015_preview,
rust_2018,
rust_2018_preview,
rust_2021,
rust_2021_preview,
rust_2024,
rust_2024_preview,
rust_begin_unwind,
rust_cold_cc,
rust_eh_catch_typeinfo,
rust_eh_personality,
rust_eh_register_frames,
rust_eh_unregister_frames,
rust_oom,
rustc,
rustc_allocator,
rustc_allocator_nounwind,
rustc_allow_const_fn_unstable,
rustc_allow_incoherent_impl,
rustc_attrs,
rustc_box,
rustc_builtin_macro,
rustc_capture_analysis,
rustc_clean,
rustc_coherence_is_core,
rustc_const_stable,
rustc_const_unstable,
rustc_conversion_suggestion,
rustc_def_path,
rustc_diagnostic_item,
rustc_diagnostic_macros,
rustc_dirty,
rustc_do_not_const_check,
rustc_dummy,
rustc_dump_env_program_clauses,
rustc_dump_program_clauses,
rustc_dump_user_substs,
rustc_dump_vtable,
rustc_error,
rustc_evaluate_where_clauses,
rustc_expected_cgu_reuse,
rustc_has_incoherent_inherent_impls,
rustc_if_this_changed,
rustc_inherit_overflow_checks,
rustc_insignificant_dtor,
rustc_layout,
rustc_layout_scalar_valid_range_end,
rustc_layout_scalar_valid_range_start,
rustc_legacy_const_generics,
rustc_lint_diagnostics,
rustc_lint_query_instability,
rustc_macro_transparency,
rustc_main,
rustc_mir,
rustc_must_implement_one_of,
rustc_nonnull_optimization_guaranteed,
rustc_object_lifetime_default,
rustc_on_unimplemented,
rustc_outlives,
rustc_paren_sugar,
rustc_partition_codegened,
rustc_partition_reused,
rustc_pass_by_value,
rustc_peek,
rustc_peek_definite_init,
rustc_peek_liveness,
rustc_peek_maybe_init,
rustc_peek_maybe_uninit,
rustc_polymorphize_error,
rustc_private,
rustc_proc_macro_decls,
rustc_promotable,
rustc_regions,
rustc_reservation_impl,
rustc_serialize,
rustc_skip_array_during_method_dispatch,
rustc_specialization_trait,
rustc_stable,
rustc_std_internal_symbol,
rustc_strict_coherence,
rustc_symbol_name,
rustc_test_marker,
rustc_then_this_would_need,
rustc_trivial_field_reads,
rustc_unsafe_specialization_marker,
rustc_variance,
rustdoc,
rustdoc_internals,
rustfmt,
rvalue_static_promotion,
s,
sanitize,
sanitizer_runtime,
saturating_add,
saturating_sub,
self_in_typedefs,
self_struct_ctor,
semitransparent,
shl,
shl_assign,
should_panic,
shr,
shr_assign,
simd,
simd_add,
simd_and,
simd_arith_offset,
simd_as,
simd_bitmask,
simd_cast,
simd_ceil,
simd_div,
simd_eq,
simd_extract,
simd_fabs,
simd_fcos,
simd_fexp,
simd_fexp2,
simd_ffi,
simd_flog,
simd_flog10,
simd_flog2,
simd_floor,
simd_fma,
simd_fmax,
simd_fmin,
simd_fpow,
simd_fpowi,
simd_fsin,
simd_fsqrt,
simd_gather,
simd_ge,
simd_gt,
simd_insert,
simd_le,
simd_lt,
simd_mul,
simd_ne,
simd_neg,
simd_or,
simd_reduce_add_ordered,
simd_reduce_add_unordered,
simd_reduce_all,
simd_reduce_and,
simd_reduce_any,
simd_reduce_max,
simd_reduce_max_nanless,
simd_reduce_min,
simd_reduce_min_nanless,
simd_reduce_mul_ordered,
simd_reduce_mul_unordered,
simd_reduce_or,
simd_reduce_xor,
simd_rem,
simd_round,
simd_saturating_add,
simd_saturating_sub,
simd_scatter,
simd_select,
simd_select_bitmask,
simd_shl,
simd_shr,
simd_shuffle,
simd_sub,
simd_trunc,
simd_xor,
since,
sinf32,
sinf64,
size,
size_of,
size_of_val,
sized,
skip,
slice,
slice_len_fn,
slice_patterns,
slicing_syntax,
soft,
specialization,
speed,
spotlight,
sqrtf32,
sqrtf64,
sreg,
sreg_low16,
sse,
sse4a_target_feature,
stable,
staged_api,
start,
state,
static_in_const,
static_nobundle,
static_recursion,
staticlib,
std,
std_inject,
std_panic,
std_panic_2015_macro,
std_panic_macro,
stmt,
stmt_expr_attributes,
stop_after_dataflow,
store,
str,
str_split_whitespace,
str_trim,
str_trim_end,
str_trim_start,
strict_provenance,
stringify,
stringify_macro,
struct_field_attributes,
struct_inherit,
struct_variant,
structural_match,
structural_peq,
structural_teq,
sty,
sub,
sub_assign,
sub_with_overflow,
suggestion,
sym,
sync,
t32,
target,
target_abi,
target_arch,
target_endian,
target_env,
target_family,
target_feature,
target_feature_11,
target_has_atomic,
target_has_atomic_equal_alignment,
target_has_atomic_load_store,
target_os,
target_pointer_width,
target_target_vendor,
target_thread_local,
target_vendor,
task,
tbm_target_feature,
termination,
termination_trait,
termination_trait_test,
test,
test_2018_feature,
test_accepted_feature,
test_case,
test_removed_feature,
test_runner,
test_unstable_lint,
then_with,
thread,
thread_local,
thread_local_macro,
thumb2,
thumb_mode: "thumb-mode",
tmm_reg,
todo_macro,
tool_attributes,
tool_lints,
trace_macros,
track_caller,
trait_alias,
trait_upcasting,
transmute,
transparent,
transparent_enums,
transparent_unions,
trivial_bounds,
truncf32,
truncf64,
try_blocks,
try_capture,
try_from,
try_into,
try_trait_v2,
tt,
tuple,
tuple_from_req,
tuple_indexing,
tuple_variadic,
two_phase,
ty,
type_alias_enum_variants,
type_alias_impl_trait,
type_ascription,
type_changing_struct_update,
type_id,
type_length_limit,
type_macros,
type_name,
u128,
u16,
u32,
u64,
u8,
unaligned_volatile_load,
unaligned_volatile_store,
unboxed_closures,
unchecked_add,
unchecked_div,
unchecked_mul,
unchecked_rem,
unchecked_shl,
unchecked_shr,
unchecked_sub,
underscore_const_names,
underscore_imports,
underscore_lifetimes,
uniform_paths,
unimplemented_macro,
unit,
universal_impl_trait,
unix,
unlikely,
unmarked_api,
unpin,
unreachable,
unreachable_2015,
unreachable_2015_macro,
unreachable_2021,
unreachable_2021_macro,
unreachable_code,
unreachable_display,
unreachable_macro,
unrestricted_attribute_tokens,
unsafe_block_in_unsafe_fn,
unsafe_cell,
unsafe_no_drop_flag,
unsafe_pin_internals,
unsize,
unsized_fn_params,
unsized_locals,
unsized_tuple_coercion,
unstable,
untagged_unions,
unused_imports,
unused_qualifications,
unwind,
unwind_attributes,
unwind_safe_trait,
unwrap,
unwrap_or,
use_extern_macros,
use_nested_groups,
used,
used_with_arg,
usize,
v1,
va_arg,
va_copy,
va_end,
va_list,
va_start,
val,
values,
var,
variant_count,
vec,
vec_macro,
version,
vfp2,
vis,
visible_private_types,
volatile,
volatile_copy_memory,
volatile_copy_nonoverlapping_memory,
volatile_load,
volatile_set_memory,
volatile_store,
vreg,
vreg_low16,
warn,
wasm_abi,
wasm_import_module,
wasm_target_feature,
while_let,
width,
windows,
windows_subsystem,
with_negative_coherence,
wrapping_add,
wrapping_mul,
wrapping_sub,
wreg,
write_bytes,
write_macro,
write_str,
writeln_macro,
x87_reg,
xer,
xmm_reg,
yeet_desugar_details,
yeet_expr,
ymm_reg,
zmm_reg,
}
}
#[derive(Copy, Clone, Eq, HashStable_Generic, Encodable, Decodable)]
pub struct Ident {
pub name: Symbol,
pub span: Span,
}
impl Ident {
#[inline]
/// Constructs a new identifier from a symbol and a span.
pub const fn new(name: Symbol, span: Span) -> Ident {
Ident { name, span }
}
/// Constructs a new identifier with a dummy span.
#[inline]
pub const fn with_dummy_span(name: Symbol) -> Ident {
Ident::new(name, DUMMY_SP)
}
#[inline]
pub fn empty() -> Ident {
Ident::with_dummy_span(kw::Empty)
}
/// Maps a string to an identifier with a dummy span.
pub fn from_str(string: &str) -> Ident {
Ident::with_dummy_span(Symbol::intern(string))
}
/// Maps a string and a span to an identifier.
pub fn from_str_and_span(string: &str, span: Span) -> Ident {
Ident::new(Symbol::intern(string), span)
}
/// Replaces `lo` and `hi` with those from `span`, but keep hygiene context.
pub fn with_span_pos(self, span: Span) -> Ident {
Ident::new(self.name, span.with_ctxt(self.span.ctxt()))
}
pub fn without_first_quote(self) -> Ident {
Ident::new(Symbol::intern(self.as_str().trim_start_matches('\'')), self.span)
}
/// "Normalize" ident for use in comparisons using "item hygiene".
/// Identifiers with same string value become same if they came from the same macro 2.0 macro
/// (e.g., `macro` item, but not `macro_rules` item) and stay different if they came from
/// different macro 2.0 macros.
/// Technically, this operation strips all non-opaque marks from ident's syntactic context.
pub fn normalize_to_macros_2_0(self) -> Ident {
Ident::new(self.name, self.span.normalize_to_macros_2_0())
}
/// "Normalize" ident for use in comparisons using "local variable hygiene".
/// Identifiers with same string value become same if they came from the same non-transparent
/// macro (e.g., `macro` or `macro_rules!` items) and stay different if they came from different
/// non-transparent macros.
/// Technically, this operation strips all transparent marks from ident's syntactic context.
pub fn normalize_to_macro_rules(self) -> Ident {
Ident::new(self.name, self.span.normalize_to_macro_rules())
}
/// Access the underlying string. This is a slowish operation because it
/// requires locking the symbol interner.
///
/// Note that the lifetime of the return value is a lie. See
/// `Symbol::as_str()` for details.
pub fn as_str(&self) -> &str {
self.name.as_str()
}
}
impl PartialEq for Ident {
fn eq(&self, rhs: &Self) -> bool {
self.name == rhs.name && self.span.eq_ctxt(rhs.span)
}
}
impl Hash for Ident {
fn hash<H: Hasher>(&self, state: &mut H) {
self.name.hash(state);
self.span.ctxt().hash(state);
}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)?;
fmt::Debug::fmt(&self.span.ctxt(), f)
}
}
/// This implementation is supposed to be used in error messages, so it's expected to be identical
/// to printing the original identifier token written in source code (`token_to_string`),
/// except that AST identifiers don't keep the rawness flag, so we have to guess it.
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&IdentPrinter::new(self.name, self.is_raw_guess(), None), f)
}
}
/// This is the most general way to print identifiers.
/// AST pretty-printer is used as a fallback for turning AST structures into token streams for
/// proc macros. Additionally, proc macros may stringify their input and expect it survive the
/// stringification (especially true for proc macro derives written between Rust 1.15 and 1.30).
/// So we need to somehow pretty-print `$crate` in a way preserving at least some of its
/// hygiene data, most importantly name of the crate it refers to.
/// As a result we print `$crate` as `crate` if it refers to the local crate
/// and as `::other_crate_name` if it refers to some other crate.
/// Note, that this is only done if the ident token is printed from inside of AST pretty-printing,
/// but not otherwise. Pretty-printing is the only way for proc macros to discover token contents,
/// so we should not perform this lossy conversion if the top level call to the pretty-printer was
/// done for a token stream or a single token.
pub struct IdentPrinter {
symbol: Symbol,
is_raw: bool,
/// Span used for retrieving the crate name to which `$crate` refers to,
/// if this field is `None` then the `$crate` conversion doesn't happen.
convert_dollar_crate: Option<Span>,
}
impl IdentPrinter {
/// The most general `IdentPrinter` constructor. Do not use this.
pub fn new(symbol: Symbol, is_raw: bool, convert_dollar_crate: Option<Span>) -> IdentPrinter {
IdentPrinter { symbol, is_raw, convert_dollar_crate }
}
/// This implementation is supposed to be used when printing identifiers
/// as a part of pretty-printing for larger AST pieces.
/// Do not use this either.
pub fn for_ast_ident(ident: Ident, is_raw: bool) -> IdentPrinter {
IdentPrinter::new(ident.name, is_raw, Some(ident.span))
}
}
impl fmt::Display for IdentPrinter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.is_raw {
f.write_str("r#")?;
} else if self.symbol == kw::DollarCrate {
if let Some(span) = self.convert_dollar_crate {
let converted = span.ctxt().dollar_crate_name();
if !converted.is_path_segment_keyword() {
f.write_str("::")?;
}
return fmt::Display::fmt(&converted, f);
}
}
fmt::Display::fmt(&self.symbol, f)
}
}
/// An newtype around `Ident` that calls [Ident::normalize_to_macro_rules] on
/// construction.
// FIXME(matthewj, petrochenkov) Use this more often, add a similar
// `ModernIdent` struct and use that as well.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct MacroRulesNormalizedIdent(Ident);
impl MacroRulesNormalizedIdent {
pub fn new(ident: Ident) -> Self {
Self(ident.normalize_to_macro_rules())
}
}
impl fmt::Debug for MacroRulesNormalizedIdent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.0, f)
}
}
impl fmt::Display for MacroRulesNormalizedIdent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.0, f)
}
}
/// An interned string.
///
/// Internally, a `Symbol` is implemented as an index, and all operations
/// (including hashing, equality, and ordering) operate on that index. The use
/// of `rustc_index::newtype_index!` means that `Option<Symbol>` only takes up 4 bytes,
/// because `rustc_index::newtype_index!` reserves the last 256 values for tagging purposes.
///
/// Note that `Symbol` cannot directly be a `rustc_index::newtype_index!` because it
/// implements `fmt::Debug`, `Encodable`, and `Decodable` in special ways.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Symbol(SymbolIndex);
rustc_index::newtype_index! {
struct SymbolIndex { .. }
}
impl Symbol {
const fn new(n: u32) -> Self {
Symbol(SymbolIndex::from_u32(n))
}
/// Maps a string to its interned representation.
pub fn intern(string: &str) -> Self {
with_session_globals(|session_globals| session_globals.symbol_interner.intern(string))
}
/// Access the underlying string. This is a slowish operation because it
/// requires locking the symbol interner.
///
/// Note that the lifetime of the return value is a lie. It's not the same
/// as `&self`, but actually tied to the lifetime of the underlying
/// interner. Interners are long-lived, and there are very few of them, and
/// this function is typically used for short-lived things, so in practice
/// it works out ok.
pub fn as_str(&self) -> &str {
with_session_globals(|session_globals| unsafe {
std::mem::transmute::<&str, &str>(session_globals.symbol_interner.get(*self))
})
}
pub fn as_u32(self) -> u32 {
self.0.as_u32()
}
pub fn is_empty(self) -> bool {
self == kw::Empty
}
/// This method is supposed to be used in error messages, so it's expected to be
/// identical to printing the original identifier token written in source code
/// (`token_to_string`, `Ident::to_string`), except that symbols don't keep the rawness flag
/// or edition, so we have to guess the rawness using the global edition.
pub fn to_ident_string(self) -> String {
Ident::with_dummy_span(self).to_string()
}
}
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self.as_str(), f)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self.as_str(), f)
}
}
impl<S: Encoder> Encodable<S> for Symbol {
fn encode(&self, s: &mut S) {
s.emit_str(self.as_str());
}
}
impl<D: Decoder> Decodable<D> for Symbol {
#[inline]
fn decode(d: &mut D) -> Symbol {
Symbol::intern(&d.read_str())
}
}
impl<CTX> HashStable<CTX> for Symbol {
#[inline]
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
self.as_str().hash_stable(hcx, hasher);
}
}
impl<CTX> ToStableHashKey<CTX> for Symbol {
type KeyType = String;
#[inline]
fn to_stable_hash_key(&self, _: &CTX) -> String {
self.as_str().to_string()
}
}
#[derive(Default)]
pub(crate) struct Interner(Lock<InternerInner>);
// The `&'static str`s in this type actually point into the arena.
//
// The `FxHashMap`+`Vec` pair could be replaced by `FxIndexSet`, but #75278
// found that to regress performance up to 2% in some cases. This might be
// revisited after further improvements to `indexmap`.
//
// This type is private to prevent accidentally constructing more than one
// `Interner` on the same thread, which makes it easy to mix up `Symbol`s
// between `Interner`s.
#[derive(Default)]
struct InternerInner {
arena: DroplessArena,
names: FxHashMap<&'static str, Symbol>,
strings: Vec<&'static str>,
}
impl Interner {
fn prefill(init: &[&'static str]) -> Self {
Interner(Lock::new(InternerInner {
strings: init.into(),
names: init.iter().copied().zip((0..).map(Symbol::new)).collect(),
..Default::default()
}))
}
#[inline]
fn intern(&self, string: &str) -> Symbol {
let mut inner = self.0.lock();
if let Some(&name) = inner.names.get(string) {
return name;
}
let name = Symbol::new(inner.strings.len() as u32);
// SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena,
// and immediately convert the clone back to `&[u8], all because there
// is no `inner.arena.alloc_str()` method. This is clearly safe.
let string: &str =
unsafe { str::from_utf8_unchecked(inner.arena.alloc_slice(string.as_bytes())) };
// SAFETY: we can extend the arena allocation to `'static` because we
// only access these while the arena is still alive.
let string: &'static str = unsafe { &*(string as *const str) };
inner.strings.push(string);
// This second hash table lookup can be avoided by using `RawEntryMut`,
// but this code path isn't hot enough for it to be worth it. See
// #91445 for details.
inner.names.insert(string, name);
name
}
// Get the symbol as a string. `Symbol::as_str()` should be used in
// preference to this function.
fn get(&self, symbol: Symbol) -> &str {
self.0.lock().strings[symbol.0.as_usize()]
}
}
// This module has a very short name because it's used a lot.
/// This module contains all the defined keyword `Symbol`s.
///
/// Given that `kw` is imported, use them like `kw::keyword_name`.
/// For example `kw::Loop` or `kw::Break`.
pub mod kw {
pub use super::kw_generated::*;
}
// This module has a very short name because it's used a lot.
/// This module contains all the defined non-keyword `Symbol`s.
///
/// Given that `sym` is imported, use them like `sym::symbol_name`.
/// For example `sym::rustfmt` or `sym::u8`.
pub mod sym {
use super::Symbol;
use std::convert::TryInto;
#[doc(inline)]
pub use super::sym_generated::*;
// Used from a macro in `librustc_feature/accepted.rs`
pub use super::kw::MacroRules as macro_rules;
/// Get the symbol for an integer.
///
/// The first few non-negative integers each have a static symbol and therefore
/// are fast.
pub fn integer<N: TryInto<usize> + Copy + ToString>(n: N) -> Symbol {
if let Result::Ok(idx) = n.try_into() {
if idx < 10 {
return Symbol::new(super::SYMBOL_DIGITS_BASE + idx as u32);
}
}
Symbol::intern(&n.to_string())
}
}
impl Symbol {
fn is_special(self) -> bool {
self <= kw::Underscore
}
fn is_used_keyword_always(self) -> bool {
self >= kw::As && self <= kw::While
}
fn is_used_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool {
(self >= kw::Async && self <= kw::Dyn) && edition() >= Edition::Edition2018
}
fn is_unused_keyword_always(self) -> bool {
self >= kw::Abstract && self <= kw::Yield
}
fn is_unused_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool {
self == kw::Try && edition() >= Edition::Edition2018
}
pub fn is_reserved(self, edition: impl Copy + FnOnce() -> Edition) -> bool {
self.is_special()
|| self.is_used_keyword_always()
|| self.is_unused_keyword_always()
|| self.is_used_keyword_conditional(edition)
|| self.is_unused_keyword_conditional(edition)
}
/// A keyword or reserved identifier that can be used as a path segment.
pub fn is_path_segment_keyword(self) -> bool {
self == kw::Super
|| self == kw::SelfLower
|| self == kw::SelfUpper
|| self == kw::Crate
|| self == kw::PathRoot
|| self == kw::DollarCrate
}
/// Returns `true` if the symbol is `true` or `false`.
pub fn is_bool_lit(self) -> bool {
self == kw::True || self == kw::False
}
/// Returns `true` if this symbol can be a raw identifier.
pub fn can_be_raw(self) -> bool {
self != kw::Empty && self != kw::Underscore && !self.is_path_segment_keyword()
}
}
impl Ident {
// Returns `true` for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special(self) -> bool {
self.name.is_special()
}
/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
self.name.is_used_keyword_always()
|| self.name.is_used_keyword_conditional(|| self.span.edition())
}
/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
self.name.is_unused_keyword_always()
|| self.name.is_unused_keyword_conditional(|| self.span.edition())
}
/// Returns `true` if the token is either a special identifier or a keyword.
pub fn is_reserved(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
self.name.is_reserved(|| self.span.edition())
}
/// A keyword or reserved identifier that can be used as a path segment.
pub fn is_path_segment_keyword(self) -> bool {
self.name.is_path_segment_keyword()
}
/// We see this identifier in a normal identifier position, like variable name or a type.
/// How was it written originally? Did it use the raw form? Let's try to guess.
pub fn is_raw_guess(self) -> bool {
self.name.can_be_raw() && self.is_reserved()
}
}
| 26.238564 | 100 | 0.569222 |
1e81061310e5ee90c91f7d538e55a8ebdbafcb51 | 5,193 | use url::Url;
use crate::discovery::errors::LookupError;
use crate::discovery::GetLookupRequestParam;
use crate::discovery::request::utils::validate_lookup_response;
use crate::discovery::response::*;
use crate::message::codec::Message;
use crate::message::errors::server_error;
use crate::message::proto;
use crate::message::proto::{CommandLookupTopicResponse, CommandPartitionedTopicMetadataResponse};
use crate::message::proto::command_lookup_topic_response;
use crate::message::proto::command_partitioned_topic_metadata_response;
#[derive(Clone)]
pub struct GetPartitionTopicMetadata {
pub topic: String
}
impl GetLookupRequestParam for GetPartitionTopicMetadata {
type ReturnedVal = PartitionNumbersResponse;
type ExtractedVal = CommandPartitionedTopicMetadataResponse;
fn into_message(self, request_id: u64) -> Message {
let Self { topic } = self;
Message {
command: proto::BaseCommand {
type_: proto::base_command::Type::PartitionedMetadata as i32,
partition_metadata: Some(proto::CommandPartitionedTopicMetadata {
topic,
request_id,
..Default::default()
}),
..Default::default()
},
payload: None,
}
}
fn extract(response: Message) -> Option<Self::ExtractedVal> {
response.command.partition_metadata_response
}
fn map(extracted_val: Self::ExtractedVal) -> Result<Self::ReturnedVal, LookupError> {
let _ = {
validate_lookup_response(
extracted_val.response.clone(),
|code| (command_partitioned_topic_metadata_response::LookupType::Failed as i32) == code
)?
};
if (extracted_val.clone().partitions).is_none() {
Err(LookupError::Query(extracted_val.error.and_then(server_error)))
} else {
Ok(PartitionNumbersResponse(extracted_val))
}
}
}
#[derive(Clone)]
pub struct LookupTopic {
pub topic: String,
pub authoritative: bool
}
impl GetLookupRequestParam for LookupTopic {
type ReturnedVal = LookupTopicResponse;
type ExtractedVal = CommandLookupTopicResponse;
fn into_message(self, request_id: u64) -> Message {
let Self { topic, authoritative } = self;
Message {
command: proto::BaseCommand {
type_: proto::base_command::Type::Lookup as i32,
lookup_topic: Some(proto::CommandLookupTopic {
topic,
request_id,
authoritative: Some(authoritative),
..Default::default()
}),
..Default::default()
},
payload: None,
}
}
fn extract(response: Message) -> Option<Self::ExtractedVal> {
response.command.lookup_topic_response
}
fn map(extracted_val: Self::ExtractedVal) -> Result<Self::ReturnedVal, LookupError> {
let _ = {
validate_lookup_response(
extracted_val.response.clone(),
|code| (command_lookup_topic_response::LookupType::Failed as i32) == code)?
};
let proxy_through_service_url = extracted_val.proxy_through_service_url.unwrap_or(false);
let is_authoritative = extracted_val.authoritative.unwrap_or(false);
let redirect = extracted_val.response == Some(command_lookup_topic_response::LookupType::Redirect as i32);
if extracted_val.broker_service_url.is_none() {
Err(LookupError::NotFound("The broker service url is not found!".to_string()))
} else {
let broker_url = Url::parse(&extracted_val.broker_service_url.clone().unwrap());
let broker_url = broker_url.map_err(LookupError::from)?;
let broker_url_tls = match extracted_val.broker_service_url_tls.as_ref() {
Some(url) => Some(Url::parse(&url).map_err(LookupError::from)?),
None => None
};
Ok(LookupTopicResponse {
broker_url,
broker_url_tls,
proxy_through_service: proxy_through_service_url,
redirect,
is_authoritative
})
}
}
}
mod utils {
use crate::discovery::errors::LookupError;
use crate::message::errors::server_error;
use crate::message::proto;
pub fn validate_lookup_response(
code: Option<i32>,
is_error: impl FnOnce(i32) -> bool,
) -> Result<(), LookupError> {
match code {
Some(code) if is_error(code) => {
if let Some(err) = server_error(code) {
if err == proto::ServerError::ServiceNotReady {
Err(LookupError::ServiceNotReady)
} else {
Err(LookupError::Query(Some(err)))
}
} else {
Err(LookupError::Query(None))
}
},
None => Err(LookupError::Unexpected("Receive none response code!".to_string())),
_ => Ok(())
}
}
} | 34.164474 | 114 | 0.595802 |
b9140327e0670284c44e101ebf9d063aead59bfa | 1,999 | use std::error::Error;
use std::net::IpAddr;
use console::Style;
use dialoguer::{theme::ColorfulTheme, Confirm, Input, Select};
#[derive(Debug)]
#[allow(dead_code)]
struct Config {
interface: IpAddr,
hostname: String,
use_acme: bool,
private_key: Option<String>,
cert: Option<String>,
}
fn init_config() -> Result<Option<Config>, Box<dyn Error>> {
let theme = ColorfulTheme {
values_style: Style::new().yellow().dim(),
..ColorfulTheme::default()
};
println!("Welcome to the setup wizard");
if !Confirm::with_theme(&theme)
.with_prompt("Do you want to continue?")
.interact()?
{
return Ok(None);
}
let interface = Input::with_theme(&theme)
.with_prompt("Interface")
.default("127.0.0.1".parse().unwrap())
.interact()?;
let hostname = Input::with_theme(&theme)
.with_prompt("Hostname")
.interact()?;
let tls = Select::with_theme(&theme)
.with_prompt("Configure TLS")
.default(0)
.item("automatic with ACME")
.item("manual")
.item("no")
.interact()?;
let (private_key, cert, use_acme) = match tls {
0 => (Some("acme.pkey".into()), Some("acme.cert".into()), true),
1 => (
Some(
Input::with_theme(&theme)
.with_prompt(" Path to private key")
.interact()?,
),
Some(
Input::with_theme(&theme)
.with_prompt(" Path to certificate")
.interact()?,
),
false,
),
_ => (None, None, false),
};
Ok(Some(Config {
hostname,
interface,
private_key,
cert,
use_acme,
}))
}
fn main() {
match init_config() {
Ok(None) => println!("Aborted."),
Ok(Some(config)) => println!("{:#?}", config),
Err(err) => println!("error: {}", err),
}
}
| 24.378049 | 72 | 0.511256 |
0376a3f2ad0d27bb7eca87331447db36aa8329ef | 4,091 | use std::cell::RefCell;
use std::fmt::Display;
use std::rc::Rc;
type NodeRef<T> = Rc<RefCell<Node<T>>>;
struct BinaryTree<T> {
head: Option<NodeRef<T>>,
}
struct Node<T> {
data: T,
left: Option<NodeRef<T>>,
right: Option<NodeRef<T>>,
}
impl<T> BinaryTree<T>
where
T: std::cmp::PartialEq,
T: std::cmp::PartialOrd,
T: std::marker::Copy,
{
fn new() -> Self {
Self { head: None }
}
fn insert(&mut self, value: T) -> NodeRef<T> {
let ret = Rc::new(RefCell::new(Node {
data: value,
left: None,
right: None,
}));
if self.head.is_none() {
self.head = Some(ret.clone());
ret
} else {
let mut head = self.head.as_mut().unwrap().clone();
self.insert_at(&mut head, ret.clone())
}
}
fn insert_at(&mut self, parent_node: &mut NodeRef<T>, new_node: NodeRef<T>) -> NodeRef<T> {
if new_node.borrow().data < parent_node.borrow().data {
if parent_node.borrow().left.is_some() {
let mut new_parent = parent_node.borrow_mut().left.as_mut().unwrap().clone();
self.insert_at(&mut new_parent, new_node)
} else {
parent_node.borrow_mut().left = Some(new_node.clone());
new_node
}
} else if parent_node.borrow().right.is_some() {
let mut new_parent = parent_node.borrow_mut().right.as_mut().unwrap().clone();
self.insert_at(&mut new_parent, new_node)
} else {
parent_node.borrow_mut().right = Some(new_node.clone());
new_node
}
}
fn visit_from<F>(&self, parent_node: &NodeRef<T>, f: &mut F)
where
F: FnMut(&NodeRef<T>),
{
f(parent_node);
if let Some(left) = parent_node.borrow().left.as_ref() {
self.visit_from(left, f);
}
if let Some(right) = parent_node.borrow().right.as_ref() {
self.visit_from(right, f);
}
}
fn visit_all<F>(&self, mut f: F)
where
F: FnMut(&NodeRef<T>),
{
if self.head.is_some() {
self.visit_from(self.head.as_ref().unwrap(), &mut f)
}
}
fn add_vector(&mut self, arr: &[T]) {
if arr.len() > 2 {
let middle = arr.len() / 2;
self.insert(arr[middle]);
self.add_vector(&arr[0..middle]);
self.add_vector(&arr[(middle + 1)..arr.len()]);
} else {
for i in arr {
self.insert(*i);
}
}
}
fn height_inner(&self, node: &NodeRef<T>, height: usize) -> usize {
let mut max_height = height;
if let Some(left) = node.borrow().left.as_ref() {
max_height = std::cmp::max(self.height_inner(left, height + 1), max_height);
}
if let Some(right) = node.borrow().right.as_ref() {
max_height = std::cmp::max(self.height_inner(right, height + 1), max_height);
}
max_height
}
fn height(&self) -> usize {
let mut height = 0;
if let Some(head) = self.head.as_ref() {
height = self.height_inner(head, height + 1);
}
height
}
}
impl<T: Display> Display for BinaryTree<T>
where
T: std::cmp::PartialEq,
T: std::cmp::PartialOrd,
T: std::marker::Copy,
{
fn fmt(&self, w: &mut std::fmt::Formatter) -> std::result::Result<(), std::fmt::Error> {
write!(w, "[")?;
self.visit_all(|v| {
write!(w, "{}, ", v.borrow().data).unwrap();
});
write!(w, "]")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_minimal_tree() {
let mut binary_tree = BinaryTree::<i32>::new();
let arr: Vec<i32> = (0..10).collect();
binary_tree.add_vector(&arr);
assert_eq!(binary_tree.height(), 4);
}
}
fn main() {
let mut binary_tree = BinaryTree::<i32>::new();
let arr: Vec<i32> = (0..10).collect();
binary_tree.add_vector(&arr);
binary_tree.height();
}
| 27.456376 | 95 | 0.519922 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.