hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1dd47140bfa23b424d135ca6a24c6f5788ea2077 | 1,966 | use dkfs::replies::*;
use dkfs::*;
use fuse::*;
use libc::*;
use time::Timespec;
pub fn file_type(mode: FileMode) -> FileType {
match mode & FileMode::FILE_TYPE_MASK {
FileMode::REGULAR_FILE => FileType::RegularFile,
FileMode::SOCKET => FileType::Socket,
FileMode::DIRECTORY => FileType::Directory,
FileMode::SYMBOLIC_LINK => FileType::Symlink,
FileMode::CHARACTER_DEVICE => FileType::CharDevice,
FileMode::BLOCK_DEVICE => FileType::BlockDevice,
FileMode::FIFO => FileType::NamedPipe,
_ => unreachable!(),
}
}
pub fn permission(mode: FileMode) -> u16 {
0o7777 & mode.bits() as u16
}
pub fn timespec(t: DkTimespec) -> Timespec {
Timespec {
sec: t.sec,
nsec: t.nsec as i32,
}
}
pub fn flags(flags: Flags) -> u32 {
let access_flags = flags & Flags::ACCESS_MODE_MASK;
let res = match access_flags {
Flags::READ_ONLY => O_RDONLY,
Flags::WRITE_ONLY => O_WRONLY,
Flags::READ_WRITE => O_RDWR,
_ => unreachable!(),
};
res as u32
}
pub fn file_attr(stat: &Stat) -> FileAttr {
FileAttr {
ino: stat.ino,
size: stat.size,
blocks: stat.blocks,
atime: timespec(stat.atime),
mtime: timespec(stat.mtime),
ctime: timespec(stat.ctime),
crtime: timespec(stat.crtime),
kind: file_type(stat.mode),
perm: permission(stat.mode),
nlink: stat.nlink as u32,
uid: stat.uid,
gid: stat.gid,
rdev: stat.rdev as u32,
flags: 0,
}
}
pub fn errno(error: &DkError) -> c_int {
use DkError::*;
match error {
IoError(_) | Corrupted(_) | Other(_) => EIO,
Exhausted => EDQUOT,
NotSupported => ENOSYS,
NotFound => ENOENT,
NotEmpty => ENOTEMPTY,
NotDirectory => ENOTDIR,
AlreadyExists => EEXIST,
Invalid(_) => EINVAL,
NameTooLong => ENAMETOOLONG,
}
}
| 26.213333 | 59 | 0.580366 |
d7587a6f7f1d801e02ca11f934eb8a2af88ea3d5 | 584 | #[macro_use]
extern crate pest_derive;
mod error;
mod eval;
mod lenv;
mod lval;
mod parse;
mod run;
#[cfg(test)]
mod test;
use crate::run::run;
use std::{path::PathBuf, process::exit};
use structopt::StructOpt;
#[derive(StructOpt, Debug)]
#[structopt(name = "blispr")]
pub struct Opt {
/// debug mode
#[structopt(short = "d", long = "debug")]
debug: bool,
/// input file
#[structopt(short = "i", long = "input")]
input: Option<PathBuf>,
}
fn main() {
if let Err(e) = run(Opt::from_args()) {
eprintln!("Error: {}", e);
exit(1);
}
}
| 16.685714 | 45 | 0.589041 |
1c03c2f2a96c584c4b538b61af7691e6a7a9ae64 | 17,448 | #![allow(non_snake_case, non_upper_case_globals)]
#![allow(non_camel_case_types)]
//! USBPHYC
use crate::{RORegister, RWRegister};
#[cfg(not(feature = "nosync"))]
use core::marker::PhantomData;
/// This register is used to control the PLL of the HS PHY.
pub mod USBPHYC_PLL {
/// PLLNDIV
pub mod PLLNDIV {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (7 bits: 0x7f << 0)
pub const mask: u32 = 0x7f << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLODF
pub mod PLLODF {
/// Offset (7 bits)
pub const offset: u32 = 7;
/// Mask (3 bits: 0b111 << 7)
pub const mask: u32 = 0b111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLFRACIN
pub mod PLLFRACIN {
/// Offset (10 bits)
pub const offset: u32 = 10;
/// Mask (16 bits: 0xffff << 10)
pub const mask: u32 = 0xffff << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLEN
pub mod PLLEN {
/// Offset (26 bits)
pub const offset: u32 = 26;
/// Mask (1 bit: 1 << 26)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLSTRB
pub mod PLLSTRB {
/// Offset (27 bits)
pub const offset: u32 = 27;
/// Mask (1 bit: 1 << 27)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLSTRBYP
pub mod PLLSTRBYP {
/// Offset (28 bits)
pub const offset: u32 = 28;
/// Mask (1 bit: 1 << 28)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLFRACCTL
pub mod PLLFRACCTL {
/// Offset (29 bits)
pub const offset: u32 = 29;
/// Mask (1 bit: 1 << 29)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLDITHEN0
pub mod PLLDITHEN0 {
/// Offset (30 bits)
pub const offset: u32 = 30;
/// Mask (1 bit: 1 << 30)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PLLDITHEN1
pub mod PLLDITHEN1 {
/// Offset (31 bits)
pub const offset: u32 = 31;
/// Mask (1 bit: 1 << 31)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// This register is used to control the switch between controllers for the HS PHY.
pub mod USBPHYC_MISC {
/// SWITHOST
pub mod SWITHOST {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// PPCKDIS
pub mod PPCKDIS {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (2 bits: 0b11 << 1)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// This register is used to control the tune interface of the HS PHY, port #x.
pub mod USBPHYC_TUNE1 {
/// INCURREN
pub mod INCURREN {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (1 bit: 1 << 0)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// INCURRINT
pub mod INCURRINT {
/// Offset (1 bits)
pub const offset: u32 = 1;
/// Mask (1 bit: 1 << 1)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// LFSCAPEN
pub mod LFSCAPEN {
/// Offset (2 bits)
pub const offset: u32 = 2;
/// Mask (1 bit: 1 << 2)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVSLEW
pub mod HSDRVSLEW {
/// Offset (3 bits)
pub const offset: u32 = 3;
/// Mask (1 bit: 1 << 3)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVDCCUR
pub mod HSDRVDCCUR {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (1 bit: 1 << 4)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVDCLEV
pub mod HSDRVDCLEV {
/// Offset (5 bits)
pub const offset: u32 = 5;
/// Mask (1 bit: 1 << 5)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVCURINCR
pub mod HSDRVCURINCR {
/// Offset (6 bits)
pub const offset: u32 = 6;
/// Mask (1 bit: 1 << 6)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// FSDRVRFADJ
pub mod FSDRVRFADJ {
/// Offset (7 bits)
pub const offset: u32 = 7;
/// Mask (1 bit: 1 << 7)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVRFRED
pub mod HSDRVRFRED {
/// Offset (8 bits)
pub const offset: u32 = 8;
/// Mask (1 bit: 1 << 8)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVCHKITRM
pub mod HSDRVCHKITRM {
/// Offset (9 bits)
pub const offset: u32 = 9;
/// Mask (4 bits: 0b1111 << 9)
pub const mask: u32 = 0b1111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSDRVCHKZTRM
pub mod HSDRVCHKZTRM {
/// Offset (13 bits)
pub const offset: u32 = 13;
/// Mask (2 bits: 0b11 << 13)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// OTPCOMP
pub mod OTPCOMP {
/// Offset (15 bits)
pub const offset: u32 = 15;
/// Mask (5 bits: 0b11111 << 15)
pub const mask: u32 = 0b11111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// SQLCHCTL
pub mod SQLCHCTL {
/// Offset (20 bits)
pub const offset: u32 = 20;
/// Mask (2 bits: 0b11 << 20)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HDRXGNEQEN
pub mod HDRXGNEQEN {
/// Offset (22 bits)
pub const offset: u32 = 22;
/// Mask (1 bit: 1 << 22)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSRXOFF
pub mod HSRXOFF {
/// Offset (23 bits)
pub const offset: u32 = 23;
/// Mask (2 bits: 0b11 << 23)
pub const mask: u32 = 0b11 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// HSFALLPREEM
pub mod HSFALLPREEM {
/// Offset (25 bits)
pub const offset: u32 = 25;
/// Mask (1 bit: 1 << 25)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// SHTCCTCTLPROT
pub mod SHTCCTCTLPROT {
/// Offset (26 bits)
pub const offset: u32 = 26;
/// Mask (1 bit: 1 << 26)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// STAGSEL
pub mod STAGSEL {
/// Offset (27 bits)
pub const offset: u32 = 27;
/// Mask (1 bit: 1 << 27)
pub const mask: u32 = 1 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
/// This register is used to control the tune interface of the HS PHY, port #x.
pub mod USBPHYC_TUNE2 {
pub use super::USBPHYC_TUNE1::FSDRVRFADJ;
pub use super::USBPHYC_TUNE1::HDRXGNEQEN;
pub use super::USBPHYC_TUNE1::HSDRVCHKITRM;
pub use super::USBPHYC_TUNE1::HSDRVCHKZTRM;
pub use super::USBPHYC_TUNE1::HSDRVCURINCR;
pub use super::USBPHYC_TUNE1::HSDRVDCCUR;
pub use super::USBPHYC_TUNE1::HSDRVDCLEV;
pub use super::USBPHYC_TUNE1::HSDRVRFRED;
pub use super::USBPHYC_TUNE1::HSDRVSLEW;
pub use super::USBPHYC_TUNE1::HSFALLPREEM;
pub use super::USBPHYC_TUNE1::HSRXOFF;
pub use super::USBPHYC_TUNE1::INCURREN;
pub use super::USBPHYC_TUNE1::INCURRINT;
pub use super::USBPHYC_TUNE1::LFSCAPEN;
pub use super::USBPHYC_TUNE1::OTPCOMP;
pub use super::USBPHYC_TUNE1::SHTCCTCTLPROT;
pub use super::USBPHYC_TUNE1::SQLCHCTL;
pub use super::USBPHYC_TUNE1::STAGSEL;
}
/// This register defines the version of this IP.
pub mod USBPHYC_VERR {
/// MINREV
pub mod MINREV {
/// Offset (0 bits)
pub const offset: u32 = 0;
/// Mask (4 bits: 0b1111 << 0)
pub const mask: u32 = 0b1111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
/// MAJREV
pub mod MAJREV {
/// Offset (4 bits)
pub const offset: u32 = 4;
/// Mask (4 bits: 0b1111 << 4)
pub const mask: u32 = 0b1111 << offset;
/// Read-only values (empty)
pub mod R {}
/// Write-only values (empty)
pub mod W {}
/// Read-write values (empty)
pub mod RW {}
}
}
#[repr(C)]
pub struct RegisterBlock {
/// This register is used to control the PLL of the HS PHY.
pub USBPHYC_PLL: RWRegister<u32>,
_reserved1: [u32; 1],
/// This register is used to control the switch between controllers for the HS PHY.
pub USBPHYC_MISC: RWRegister<u32>,
_reserved2: [u32; 64],
/// This register is used to control the tune interface of the HS PHY, port #x.
pub USBPHYC_TUNE1: RWRegister<u32>,
_reserved3: [u32; 63],
/// This register is used to control the tune interface of the HS PHY, port #x.
pub USBPHYC_TUNE2: RWRegister<u32>,
_reserved4: [u32; 891],
/// This register defines the version of this IP.
pub USBPHYC_VERR: RORegister<u32>,
}
pub struct ResetValues {
pub USBPHYC_PLL: u32,
pub USBPHYC_MISC: u32,
pub USBPHYC_TUNE1: u32,
pub USBPHYC_TUNE2: u32,
pub USBPHYC_VERR: u32,
}
#[cfg(not(feature = "nosync"))]
pub struct Instance {
pub(crate) addr: u32,
pub(crate) _marker: PhantomData<*const RegisterBlock>,
}
#[cfg(not(feature = "nosync"))]
impl ::core::ops::Deref for Instance {
type Target = RegisterBlock;
#[inline(always)]
fn deref(&self) -> &RegisterBlock {
unsafe { &*(self.addr as *const _) }
}
}
#[cfg(feature = "rtic")]
unsafe impl Send for Instance {}
/// Access functions for the USBPHYC peripheral instance
pub mod USBPHYC {
use super::ResetValues;
#[cfg(not(feature = "nosync"))]
use super::Instance;
#[cfg(not(feature = "nosync"))]
const INSTANCE: Instance = Instance {
addr: 0x5a006000,
_marker: ::core::marker::PhantomData,
};
/// Reset values for each field in USBPHYC
pub const reset: ResetValues = ResetValues {
USBPHYC_PLL: 0xC0000000,
USBPHYC_MISC: 0x00000000,
USBPHYC_TUNE1: 0x04070004,
USBPHYC_TUNE2: 0x04070004,
USBPHYC_VERR: 0x00000010,
};
#[cfg(not(feature = "nosync"))]
#[allow(renamed_and_removed_lints)]
#[allow(private_no_mangle_statics)]
#[no_mangle]
static mut USBPHYC_TAKEN: bool = false;
/// Safe access to USBPHYC
///
/// This function returns `Some(Instance)` if this instance is not
/// currently taken, and `None` if it is. This ensures that if you
/// do get `Some(Instance)`, you are ensured unique access to
/// the peripheral and there cannot be data races (unless other
/// code uses `unsafe`, of course). You can then pass the
/// `Instance` around to other functions as required. When you're
/// done with it, you can call `release(instance)` to return it.
///
/// `Instance` itself dereferences to a `RegisterBlock`, which
/// provides access to the peripheral's registers.
#[cfg(not(feature = "nosync"))]
#[inline]
pub fn take() -> Option<Instance> {
external_cortex_m::interrupt::free(|_| unsafe {
if USBPHYC_TAKEN {
None
} else {
USBPHYC_TAKEN = true;
Some(INSTANCE)
}
})
}
/// Release exclusive access to USBPHYC
///
/// This function allows you to return an `Instance` so that it
/// is available to `take()` again. This function will panic if
/// you return a different `Instance` or if this instance is not
/// already taken.
#[cfg(not(feature = "nosync"))]
#[inline]
pub fn release(inst: Instance) {
external_cortex_m::interrupt::free(|_| unsafe {
if USBPHYC_TAKEN && inst.addr == INSTANCE.addr {
USBPHYC_TAKEN = false;
} else {
panic!("Released a peripheral which was not taken");
}
});
}
/// Unsafely steal USBPHYC
///
/// This function is similar to take() but forcibly takes the
/// Instance, marking it as taken irregardless of its previous
/// state.
#[cfg(not(feature = "nosync"))]
#[inline]
pub unsafe fn steal() -> Instance {
USBPHYC_TAKEN = true;
INSTANCE
}
}
/// Raw pointer to USBPHYC
///
/// Dereferencing this is unsafe because you are not ensured unique
/// access to the peripheral, so you may encounter data races with
/// other users of this peripheral. It is up to you to ensure you
/// will not cause data races.
///
/// This constant is provided for ease of use in unsafe code: you can
/// simply call for example `write_reg!(gpio, GPIOA, ODR, 1);`.
pub const USBPHYC: *const RegisterBlock = 0x5a006000 as *const _;
| 28.051447 | 87 | 0.527625 |
3aa94b2eb427972505771942d528623db2cfc89c | 1,843 | use net2::UdpBuilder;
#[cfg(not(windows))]
use net2::unix::UnixUdpBuilderExt;
use std::io;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr, UdpSocket};
use super::MDNS_PORT;
pub enum Inet {}
pub enum Inet6 {}
pub trait AddressFamily {
fn bind() -> io::Result<UdpSocket> {
let addr = SocketAddr::new(Self::any_addr(), MDNS_PORT);
let builder = Self::socket_builder()?;
builder.reuse_address(true)?;
#[cfg(not(windows))]
builder.reuse_port(true)?;
let socket = builder.bind(&addr)?;
Self::join_multicast(&socket)?;
Ok(socket)
}
fn socket_builder() -> io::Result<UdpBuilder>;
fn any_addr() -> IpAddr;
fn mdns_group() -> IpAddr;
fn join_multicast(socket: &UdpSocket) -> io::Result<()>;
fn v6() -> bool;
}
impl AddressFamily for Inet {
fn socket_builder() -> io::Result<UdpBuilder> {
UdpBuilder::new_v4()
}
fn any_addr() -> IpAddr {
IpAddr::V4(Ipv4Addr::new(0,0,0,0))
}
fn mdns_group() -> IpAddr {
IpAddr::V4(Ipv4Addr::new(224,0,0,251))
}
fn join_multicast(socket: &UdpSocket) -> io::Result<()> {
socket.join_multicast_v4(
&Ipv4Addr::new(224,0,0,251),
&Ipv4Addr::new(0,0,0,0),
)
}
fn v6() -> bool {
false
}
}
impl AddressFamily for Inet6 {
fn socket_builder() -> io::Result<UdpBuilder> {
UdpBuilder::new_v6()
}
fn any_addr() -> IpAddr {
IpAddr::V6(Ipv6Addr::new(0,0,0,0,0,0,0,0))
}
fn mdns_group() -> IpAddr {
IpAddr::V6(Ipv6Addr::new(0xff02,0,0,0,0,0,0,0xfb))
}
fn join_multicast(socket: &UdpSocket) -> io::Result<()> {
socket.join_multicast_v6(
&Ipv6Addr::new(0xff02,0,0,0,0,0,0,0xfb),
0
)
}
fn v6() -> bool {
true
}
}
| 25.957746 | 66 | 0.563212 |
39de484207ea972b4f79a4b4250d263617b542d3 | 13,474 | // This program does assorted benchmarking of rustls.
//
// Note: we don't use any of the standard 'cargo bench', 'test::Bencher',
// etc. because it's unstable at the time of writing.
use std::time::{Duration, Instant};
use std::sync::Arc;
use std::fs;
use std::io::{self, Write};
use std::env;
extern crate rustls;
use rustls::{ClientConfig, ClientSession};
use rustls::{ServerConfig, ServerSession};
use rustls::ServerSessionMemoryCache;
use rustls::ClientSessionMemoryCache;
use rustls::NoServerSessionStorage;
use rustls::NoClientSessionStorage;
use rustls::{NoClientAuth, RootCertStore, AllowAnyAuthenticatedClient};
use rustls::Session;
use rustls::Ticketer;
use rustls::internal::pemfile;
use rustls::internal::msgs::enums::SignatureAlgorithm;
extern crate webpki;
fn duration_nanos(d: Duration) -> f64 {
(d.as_secs() as f64) + f64::from(d.subsec_nanos()) / 1e9
}
fn _bench<Fsetup, Ftest, S>(count: usize, name: &'static str, f_setup: Fsetup, f_test: Ftest)
where Fsetup: Fn() -> S,
Ftest: Fn(S)
{
let mut times = Vec::new();
for _ in 0..count {
let state = f_setup();
let start = Instant::now();
f_test(state);
times.push(duration_nanos(Instant::now().duration_since(start)));
}
println!("{}", name);
println!("{:?}", times);
}
fn time<F>(mut f: F) -> f64
where F: FnMut()
{
let start = Instant::now();
f();
let end = Instant::now();
let dur = duration_nanos(end.duration_since(start));
f64::from(dur)
}
fn transfer(left: &mut Session, right: &mut Session) -> f64 {
let mut buf = [0u8; 262144];
let mut read_time = 0f64;
while left.wants_write() {
let sz = left.write_tls(&mut buf.as_mut()).unwrap();
if sz == 0 {
return read_time;
}
let mut offs = 0;
loop {
let start = Instant::now();
offs += right.read_tls(&mut buf[offs..sz].as_ref()).unwrap();
let end = Instant::now();
read_time += f64::from(duration_nanos(end.duration_since(start)));
if sz == offs {
break;
}
}
}
read_time
}
fn drain(d: &mut Session, expect_len: usize) {
let mut left = expect_len;
let mut buf = [0u8; 8192];
loop {
let sz = d.read(&mut buf).unwrap();
left -= sz;
if left == 0 {
break;
}
}
}
#[derive(PartialEq, Clone, Copy)]
enum ClientAuth {
No,
Yes,
}
#[derive(PartialEq, Clone, Copy)]
enum Resumption {
No,
SessionID,
Tickets,
}
impl Resumption {
fn label(&self) -> &'static str {
match *self {
Resumption::No => "no-resume",
Resumption::SessionID => "sessionid",
Resumption::Tickets => "tickets",
}
}
}
// copied from tests/api.rs
#[derive(PartialEq, Clone, Copy)]
enum KeyType {
RSA,
ECDSA
}
impl KeyType {
fn for_suite(suite: &'static rustls::SupportedCipherSuite) -> KeyType {
if suite.sign == SignatureAlgorithm::ECDSA {
KeyType::ECDSA
} else {
KeyType::RSA
}
}
fn path_for(&self, part: &str) -> String {
match self {
KeyType::RSA => format!("test-ca/rsa/{}", part),
KeyType::ECDSA => format!("test-ca/ecdsa/{}", part),
}
}
fn get_chain(&self) -> Vec<rustls::Certificate> {
pemfile::certs(&mut io::BufReader::new(fs::File::open(self.path_for("end.fullchain"))
.unwrap()))
.unwrap()
}
fn get_key(&self) -> rustls::PrivateKey {
pemfile::pkcs8_private_keys(&mut io::BufReader::new(fs::File::open(self.path_for("end.key"))
.unwrap()))
.unwrap()[0]
.clone()
}
fn get_client_chain(&self) -> Vec<rustls::Certificate> {
pemfile::certs(&mut io::BufReader::new(fs::File::open(self.path_for("client.fullchain"))
.unwrap()))
.unwrap()
}
fn get_client_key(&self) -> rustls::PrivateKey {
pemfile::pkcs8_private_keys(&mut io::BufReader::new(fs::File::open(self.path_for("client.key"))
.unwrap()))
.unwrap()[0]
.clone()
}
}
fn make_server_config(version: rustls::ProtocolVersion,
suite: &'static rustls::SupportedCipherSuite,
client_auth: ClientAuth,
resume: Resumption)
-> ServerConfig {
let kt = KeyType::for_suite(suite);
let client_auth = match client_auth {
ClientAuth::Yes => {
let roots = kt.get_chain();
let mut client_auth_roots = RootCertStore::empty();
for root in roots {
client_auth_roots.add(&root).unwrap();
}
AllowAnyAuthenticatedClient::new(client_auth_roots)
},
ClientAuth::No => {
NoClientAuth::new()
}
};
let mut cfg = ServerConfig::new(client_auth);
cfg.set_single_cert(kt.get_chain(), kt.get_key())
.expect("bad certs/private key?");
if resume == Resumption::SessionID {
cfg.set_persistence(ServerSessionMemoryCache::new(128));
} else if resume == Resumption::Tickets {
cfg.ticketer = Ticketer::new();
} else {
cfg.set_persistence(Arc::new(NoServerSessionStorage {}));
}
cfg.versions.clear();
cfg.versions.push(version);
cfg
}
fn make_client_config(version: rustls::ProtocolVersion,
suite: &'static rustls::SupportedCipherSuite,
clientauth: ClientAuth,
resume: Resumption)
-> ClientConfig {
let kt = KeyType::for_suite(suite);
let mut cfg = ClientConfig::new();
let mut rootbuf = io::BufReader::new(fs::File::open(kt.path_for("ca.cert")).unwrap());
cfg.root_store.add_pem_file(&mut rootbuf).unwrap();
cfg.ciphersuites.clear();
cfg.ciphersuites.push(suite);
cfg.versions.clear();
cfg.versions.push(version);
if clientauth == ClientAuth::Yes {
cfg.set_single_client_cert(kt.get_client_chain(), kt.get_client_key());
}
if resume != Resumption::No {
cfg.set_persistence(ClientSessionMemoryCache::new(128));
} else {
cfg.set_persistence(Arc::new(NoClientSessionStorage {}));
}
cfg
}
fn bench_handshake(version: rustls::ProtocolVersion,
suite: &'static rustls::SupportedCipherSuite,
clientauth: ClientAuth,
resume: Resumption) {
let client_config = Arc::new(make_client_config(version, suite, clientauth, resume));
let server_config = Arc::new(make_server_config(version, suite, clientauth, resume));
if !suite.usable_for_version(version) {
return;
}
let rounds = if resume == Resumption::No { 512 } else { 4096 };
let mut client_time = 0f64;
let mut server_time = 0f64;
for _ in 0..rounds {
let dns_name = webpki::DNSNameRef::try_from_ascii_str("localhost").unwrap();
let mut client = ClientSession::new(&client_config, dns_name);
let mut server = ServerSession::new(&server_config);
server_time += time(|| {
transfer(&mut client, &mut server);
server.process_new_packets().unwrap()
});
client_time += time(|| {
transfer(&mut server, &mut client);
client.process_new_packets().unwrap()
});
server_time += time(|| {
transfer(&mut client, &mut server);
server.process_new_packets().unwrap()
});
client_time += time(|| {
transfer(&mut server, &mut client);
client.process_new_packets().unwrap()
});
}
println!("handshakes\t{:?}\t{:?}\tclient\t{}\t{}\t{:.2}\thandshake/s",
version,
suite.suite,
if clientauth == ClientAuth::Yes {
"mutual"
} else {
"server-auth"
},
resume.label(),
f64::from(rounds) / client_time);
println!("handshakes\t{:?}\t{:?}\tserver\t{}\t{}\t{:.2}\thandshake/s",
version,
suite.suite,
if clientauth == ClientAuth::Yes {
"mutual"
} else {
"server-auth"
},
resume.label(),
f64::from(rounds) / server_time);
}
fn do_handshake(client: &mut ClientSession, server: &mut ServerSession) {
while server.is_handshaking() || client.is_handshaking() {
transfer(client, server);
server.process_new_packets().unwrap();
transfer(server, client);
client.process_new_packets().unwrap();
}
}
fn bench_bulk(version: rustls::ProtocolVersion, suite: &'static rustls::SupportedCipherSuite,
plaintext_size: u32) {
let client_config =
Arc::new(make_client_config(version, suite, ClientAuth::No, Resumption::No));
let server_config = Arc::new(make_server_config(version, suite, ClientAuth::No, Resumption::No));
if !suite.usable_for_version(version) {
return;
}
let dns_name = webpki::DNSNameRef::try_from_ascii_str("localhost").unwrap();
let mut client = ClientSession::new(&client_config, dns_name);
let mut server = ServerSession::new(&server_config);
do_handshake(&mut client, &mut server);
let mut buf = Vec::new();
buf.resize(plaintext_size as usize, 0u8);
let total_data = if plaintext_size < 8192 {
64 * 1024 * 1024
} else {
1024 * 1024 * 1024
};
let rounds = total_data / plaintext_size;
let mut time_send = 0f64;
let mut time_recv = 0f64;
for _ in 0..rounds {
time_send += time(|| {
server.write_all(&buf).unwrap();
()
});
time_recv += transfer(&mut server, &mut client);
time_recv += time(|| {
client.process_new_packets().unwrap()
});
drain(&mut client, buf.len());
}
let total_mbs = f64::from(plaintext_size * rounds) / (1024. * 1024.);
println!("bulk\t{:?}\t{:?}\tsend\t{:.2}\tMB/s",
version,
suite.suite,
total_mbs / time_send);
println!("bulk\t{:?}\t{:?}\trecv\t{:.2}\tMB/s",
version,
suite.suite,
total_mbs / time_recv);
}
fn lookup_suite(name: &str) -> &'static rustls::SupportedCipherSuite {
for suite in &rustls::ALL_CIPHERSUITES {
if format!("{:?}", suite.suite).to_lowercase() == name.to_lowercase() {
return suite;
}
}
panic!("unknown suite {:?}", name);
}
fn selected_tests(mut args: env::Args) {
let mode = args.next()
.expect("first argument must be mode");
match mode.as_ref() {
"bulk" => {
match args.next() {
Some(suite) => {
let len = args.next()
.map(|arg| arg.parse::<u32>()
.expect("3rd arg must be integer"))
.unwrap_or(1048576);
let suite = lookup_suite(&suite);
bench_bulk(rustls::ProtocolVersion::TLSv1_3, suite, len);
bench_bulk(rustls::ProtocolVersion::TLSv1_2, suite, len);
}
None => {
panic!("bulk needs ciphersuite argument");
}
}
}
"handshake" | "handshake-resume" | "handshake-ticket" => {
match args.next() {
Some(suite) => {
let suite = lookup_suite(&suite);
let resume = if mode == "handshake" {
Resumption::No
} else if mode == "handshake-resume" {
Resumption::SessionID
} else {
Resumption::Tickets
};
bench_handshake(rustls::ProtocolVersion::TLSv1_3, suite, ClientAuth::No, resume);
bench_handshake(rustls::ProtocolVersion::TLSv1_2, suite, ClientAuth::No, resume);
}
None => {
panic!("handshake* needs ciphersuite argument");
}
}
}
_ => {
panic!("unsupported mode {:?}", mode);
}
}
}
fn all_tests() {
for version in &[rustls::ProtocolVersion::TLSv1_3, rustls::ProtocolVersion::TLSv1_2] {
for suite in &rustls::ALL_CIPHERSUITES {
bench_bulk(*version, suite, 1024 * 1024);
bench_handshake(*version, suite, ClientAuth::No, Resumption::No);
bench_handshake(*version, suite, ClientAuth::Yes, Resumption::No);
bench_handshake(*version, suite, ClientAuth::No, Resumption::SessionID);
bench_handshake(*version, suite, ClientAuth::Yes, Resumption::SessionID);
bench_handshake(*version, suite, ClientAuth::No, Resumption::Tickets);
bench_handshake(*version, suite, ClientAuth::Yes, Resumption::Tickets);
}
}
}
fn main() {
let mut args = env::args();
if args.len() > 1 {
args.next();
selected_tests(args);
} else {
all_tests();
}
}
| 30.762557 | 103 | 0.547722 |
b9e836e164ca7e257da582ecfecfa725c87b4b8b | 5,583 | // Copyright 2021 Datafuse Labs.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::sync::Arc;
use common_base::tokio;
use common_base::tokio::sync::mpsc;
use common_base::tokio::sync::RwLock;
use common_base::ProgressValues;
use common_base::TrySpawn;
use common_datablocks::DataBlock;
use common_datavalues::DataSchemaRef;
use common_exception::ErrorCode;
use common_exception::Result;
use futures::StreamExt;
use serde::Deserialize;
use serde::Serialize;
use crate::interpreters::InterpreterFactory;
use crate::sessions::DatabendQueryContextRef;
use crate::sessions::SessionManagerRef;
use crate::sessions::SessionRef;
use crate::sql::PlanParser;
#[derive(Deserialize, Debug)]
pub struct HttpQueryRequest {
pub sql: String,
}
#[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq)]
pub enum ExecuteStateName {
Running,
Failed,
Succeeded,
}
pub(crate) enum ExecuteState {
Running(ExecuteRunning),
Stopped(ExecuteStopped),
}
impl ExecuteState {
pub(crate) fn extract(&self) -> (ExecuteStateName, Option<ErrorCode>) {
match self {
ExecuteState::Running(_) => (ExecuteStateName::Running, None),
ExecuteState::Stopped(v) => match &v.reason {
Ok(_) => (ExecuteStateName::Succeeded, None),
Err(e) => (ExecuteStateName::Failed, Some(e.clone())),
},
}
}
}
use ExecuteState::*;
pub(crate) type ExecuteStateRef = Arc<RwLock<ExecuteStateWrapper>>;
pub(crate) struct ExecuteStopped {
progress: Option<ProgressValues>,
reason: Result<()>,
}
pub(crate) struct ExecuteStateWrapper {
pub(crate) state: ExecuteState,
}
impl ExecuteStateWrapper {
pub(crate) fn get_progress(&self) -> Option<ProgressValues> {
match &self.state {
Running(r) => Some(r.context.get_progress_value()),
Stopped(f) => f.progress.clone(),
}
}
}
pub struct HttpQueryHandle {
pub abort_sender: mpsc::Sender<()>,
}
impl HttpQueryHandle {
pub fn abort(&self) {
let sender = self.abort_sender.clone();
tokio::spawn(async move {
sender.send(()).await.ok();
});
}
}
pub(crate) struct ExecuteRunning {
// used to kill query
session: SessionRef,
// mainly used to get progress for now
context: DatabendQueryContextRef,
}
impl ExecuteState {
pub(crate) async fn try_create(
request: &HttpQueryRequest,
session_manager: &SessionManagerRef,
block_tx: mpsc::Sender<DataBlock>,
) -> Result<(ExecuteStateRef, DataSchemaRef)> {
let sql = &request.sql;
let session = session_manager.create_session("http-statement")?;
let context = session.create_context().await?;
context.attach_query_str(sql);
let plan = PlanParser::parse(sql, context.clone()).await?;
let schema = plan.schema();
let interpreter = InterpreterFactory::get(context.clone(), plan.clone())?;
let data_stream = interpreter.execute(None).await?;
let mut data_stream = context.try_create_abortable(data_stream)?;
let (abort_tx, mut abort_rx) = mpsc::channel(2);
context.attach_http_query(HttpQueryHandle {
abort_sender: abort_tx,
});
let running_state = ExecuteRunning {
session,
context: context.clone(),
};
let state = Arc::new(RwLock::new(ExecuteStateWrapper {
state: Running(running_state),
}));
let state_clone = state.clone();
context
.try_spawn(async move {
loop {
if let Some(block_r) = data_stream.next().await {
match block_r {
Ok(block) => tokio::select! {
_ = block_tx.send(block) => { },
_ = abort_rx.recv() => {
ExecuteState::stop(&state, Err(ErrorCode::AbortedQuery("query aborted")), true).await;
break;
},
},
Err(err) => {
ExecuteState::stop(&state, Err(err), false).await;
break
}
};
} else {
ExecuteState::stop(&state, Ok(()), false).await;
break;
}
}
log::debug!("drop block sender!");
})?;
Ok((state_clone, schema))
}
pub(crate) async fn stop(this: &ExecuteStateRef, reason: Result<()>, kill: bool) {
let mut guard = this.write().await;
if let Running(r) = &guard.state {
// release session
let progress = Some(r.context.get_progress_value());
if kill {
r.session.force_kill_query();
}
guard.state = Stopped(ExecuteStopped { progress, reason });
};
}
}
| 31.365169 | 122 | 0.583915 |
488f2b775bf0b48d5aec7e1d228d29fcae48bc24 | 197 | // aux-build:empty-plugin.rs
// ignore-stage1
#![feature(plugin)]
#![plugin(empty_plugin(args))]
//~^ ERROR malformed `plugin` attribute
//~| WARNING compiler plugins are deprecated
fn main() {}
| 19.7 | 44 | 0.700508 |
1c015d7014713de0f1cb0e2e97574d1b75b9afeb | 7,535 | use crate::test::*;
use crate::public_interface::API;
use crate::public_interface::constants::*;
use crate::public_interface::sane_limits::*;
use crate::public_interface::decode_utils::*;
use crate::test::parsers::*;
use crate::test::pairings::mnt4::*;
use super::*;
#[derive(Clone, Debug)]
pub(crate) struct Mnt4Report {
pub(crate) modulus_limbs: usize,
pub(crate) num_pairs: usize,
pub(crate) group_order_limbs: usize,
pub(crate) x_is_negative: bool,
pub(crate) x_bit_length: usize,
pub(crate) x_hamming_weight: usize,
pub(crate) exp_w0_bit_length: usize,
pub(crate) exp_w0_hamming: usize,
pub(crate) exp_w0_is_negative: bool,
pub(crate) exp_w1_bit_length: usize,
pub(crate) exp_w1_hamming: usize,
pub(crate) run_microseconds: u64,
}
extern crate csv;
use std::path::Path;
use csv::{Writer};
use std::fs::File;
pub(crate) struct Mnt4ReportWriter {
writer: Writer<File>
}
impl Mnt4ReportWriter {
pub(crate) fn new_for_path<P: AsRef<Path>>(path: P) -> Self {
let mut writer = Writer::from_path(path).expect("must open a test file");
writer.write_record(&[
"modulus_limbs",
"group_limbs",
"num_pairs",
"x_is_negative",
"x_bit_length",
"x_hamming_weight",
"exp_w0_bit_length",
"exp_w0_hamming",
"exp_w0_is_negative",
"exp_w1_bit_length",
"exp_w1_hamming",
"run_microseconds"
]).expect("must write header");
writer.flush().expect("must finalize writing");
Self {
writer
}
}
pub fn write_report(&mut self, report: Mnt4Report) {
let x_is_negative = if report.x_is_negative {
"1"
} else {
"0"
};
let exp_w0_is_negative = if report.exp_w0_is_negative {
"1"
} else {
"0"
};
self.writer.write_record(&[
report.modulus_limbs.to_string(),
report.group_order_limbs.to_string(),
report.num_pairs.to_string(),
x_is_negative.to_owned(),
report.x_bit_length.to_string(),
report.x_hamming_weight.to_string(),
report.exp_w0_bit_length.to_string(),
report.exp_w0_hamming.to_string(),
exp_w0_is_negative.to_owned(),
report.exp_w1_bit_length.to_string(),
report.exp_w1_hamming.to_string(),
report.run_microseconds.to_string(),
]
).expect("must write a record");
self.writer.flush().expect("must write to disk");
}
}
pub(crate) fn process_for_curve_and_bit_sizes(
curve: JsonMnt4PairingCurveParameters,
bits: usize,
hamming: usize,
w_0_bits: usize,
w_0_hamming: usize,
w_1_bits: usize,
w_1_hamming: usize,
num_pairs: usize
) -> Vec<(Mnt4Report, Vec<u8>, Vec<u8>)> {
use std::time::Instant;
let mut reports = vec![];
let new_x = make_x_bit_length_and_hamming_weight(bits, hamming);
// println!("New x = {} for {} bits and {} hamming", new_x, bits, hamming);
let new_w0 = make_x_bit_length_and_hamming_weight(w_0_bits, w_0_hamming);
let new_w1 = make_x_bit_length_and_hamming_weight(w_1_bits, w_1_hamming);
// println!("New w1 = {} for {} bits and {} hamming", new_w1, w_1_bits, w_1_hamming);
let exp_w0_is_negative = true;
for x_is_negative in vec![true] {
// for x_is_negative in vec![false, true] {
let mut new_curve = curve.clone();
new_curve.x = (new_x.clone(), x_is_negative);
new_curve.exp_w0 = (new_w0.clone(), exp_w0_is_negative);
new_curve.exp_w1 = new_w1.clone();
let limbs = crate::test::calculate_num_limbs(&new_curve.q).expect("must work");
let group_order_limbs = crate::test::num_units_for_group_order(&new_curve.r).expect("must work");
let mut input_data = vec![OPERATION_PAIRING];
let calldata = assemble_single_curve_params(new_curve, num_pairs, false);
if calldata.is_err() {
continue
};
let calldata = calldata.unwrap();
input_data.extend(calldata);
let now = Instant::now();
let res = API::run(&input_data);
let elapsed = now.elapsed();
if let Ok(res_data) = res {
let report = Mnt4Report {
modulus_limbs: limbs,
group_order_limbs,
num_pairs: num_pairs,
x_is_negative: x_is_negative,
x_bit_length: bits,
x_hamming_weight: hamming,
exp_w0_bit_length: w_0_bits,
exp_w0_hamming: w_0_hamming,
exp_w0_is_negative: exp_w0_is_negative,
exp_w1_bit_length: w_1_bits,
exp_w1_hamming: w_1_hamming,
run_microseconds: elapsed.as_micros() as u64,
};
reports.push((report, res_data, input_data));
} else {
println!("MNT4 error {:?}", res.err().unwrap());
// println!("Data = {}", hex::encode(&input_data));
}
}
reports
}
// pub(crate) fn estimate_gas_meter_difference(
// curve: JsonMnt4PairingCurveParameters,
// bits: usize,
// hamming: usize,
// w_0_bits: usize,
// w_0_hamming: usize,
// w_1_bits: usize,
// w_1_hamming: usize,
// num_pairs: usize
// ) -> Vec<i64> {
// use std::time::Instant;
// let gas_factor = 15u64;
// let mut reports = vec![];
// let new_x = make_x_bit_length_and_hamming_weight(bits, hamming);
// let new_w0 = make_x_bit_length_and_hamming_weight(w_0_bits, w_0_hamming);
// let new_w1 = make_x_bit_length_and_hamming_weight(w_1_bits, w_1_hamming);
// let exp_w0_is_negative = true;
// for x_is_negative in vec![true] {
// // for x_is_negative in vec![false, true] {
// let mut new_curve = curve.clone();
// new_curve.x = (new_x.clone(), x_is_negative);
// new_curve.exp_w0 = (new_w0.clone(), exp_w0_is_negative);
// new_curve.exp_w1 = new_w1.clone();
// let mut input_data = vec![OPERATION_PAIRING];
// let calldata = assemble_single_curve_params(new_curve, num_pairs, false);
// if calldata.is_err() {
// continue
// };
// let calldata = calldata.unwrap();
// input_data.extend(calldata);
// let now = Instant::now();
// let res = API::run(&input_data);
// let elapsed = now.elapsed();
// if res.is_ok() {
// let gas_estimated = crate::gas_meter::GasMeter::meter(&input_data);
// if gas_estimated.is_ok() {
// let running_gas = (elapsed.as_micros() as u64) * gas_factor;
// let difference = (gas_estimated.unwrap() as i64) - (running_gas as i64);
// reports.push(difference);
// } else {
// println!("MNT4 gas estimation error {:?}", gas_estimated.err().unwrap());
// println!("Data = {}", hex::encode(&input_data));
// }
// } else {
// println!("MNT4 error {:?}", res.err().unwrap());
// // println!("Data = {}", hex::encode(&input_data));
// }
// }
// reports
// }
| 35.046512 | 105 | 0.570803 |
dd917060336d4a099ab115c5b44acac03df7a316 | 2,877 | use crate::aliases::{Archive, BridgeConfig, GrpcClient};
use crate::handler::common::{bad_request, extract_from_token, internal_server_error, parse_auth_header, JsonResult};
use daml_grpc::data::command::{DamlCommand, DamlCreateAndExerciseCommand};
use daml_grpc::data::DamlTransactionTree;
use daml_grpc::{CommandExecutor, DamlSimpleExecutorBuilder};
use daml_json::request::{DamlJsonCreateAndExerciseRequest, DamlJsonCreateAndExerciseResponse};
use daml_json::request_converter::JsonToGrpcRequestConverter;
use daml_json::response_converter::GrpcToJsonResponseConverter;
use daml_json::value_encode::JsonValueEncoder;
/// DOCME
pub struct CreateAndExerciseHandler {
config: BridgeConfig,
archive: Archive,
client: GrpcClient,
}
impl CreateAndExerciseHandler {
pub fn new(config: BridgeConfig, archive: Archive, client: GrpcClient) -> Self {
Self {
config,
archive,
client,
}
}
/// DOCME
pub async fn create_and_exercise(
&self,
create: DamlJsonCreateAndExerciseRequest,
auth_header: Option<&str>,
) -> JsonResult<DamlJsonCreateAndExerciseResponse> {
let create_command = self.make_command(&create).await?;
let transaction = self.execute_command(create_command, auth_header).await?;
self.make_response(&transaction)
}
async fn make_command(
&self,
create_and_exercise: &DamlJsonCreateAndExerciseRequest,
) -> JsonResult<DamlCreateAndExerciseCommand> {
let archive_locked = &self.archive.read().await;
let request_converter = JsonToGrpcRequestConverter::new(archive_locked);
request_converter.convert_create_and_exercise_request(create_and_exercise).map_err(bad_request)
}
async fn execute_command(
&self,
command: DamlCreateAndExerciseCommand,
auth_header: Option<&str>,
) -> JsonResult<DamlTransactionTree> {
let (token, parsed_token) = parse_auth_header(auth_header)?;
let (acting_party, _ledger_id, application_id) = extract_from_token(&parsed_token)?;
DamlSimpleExecutorBuilder::new(&self.client)
.application_id(application_id)
.act_as(acting_party)
.auth_token(token)
.build()
.map_err(internal_server_error)?
.execute_for_transaction_tree(DamlCommand::CreateAndExercise(command))
.await
.map_err(internal_server_error)
}
fn make_response(&self, transaction: &DamlTransactionTree) -> JsonResult<DamlJsonCreateAndExerciseResponse> {
GrpcToJsonResponseConverter::new(JsonValueEncoder::new(
self.config.encode_decimal_as_string(),
self.config.encode_int64_as_string(),
))
.convert_create_and_exercise_response(transaction)
.map_err(internal_server_error)
}
}
| 38.878378 | 116 | 0.707682 |
f98f16887317a915eac29213d69600c20fc108f1 | 10,242 | use crate::bucket_stats::BucketStats;
use crate::MaxSearch;
use memmap2::MmapMut;
use rand::{thread_rng, Rng};
use solana_measure::measure::Measure;
use std::fs::{remove_file, OpenOptions};
use std::io::Seek;
use std::io::SeekFrom;
use std::io::Write;
use std::path::PathBuf;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
/*
1 2
2 4
3 8
4 16
5 32
6 64
7 128
8 256
9 512
10 1,024
11 2,048
12 4,096
13 8,192
14 16,384
23 8,388,608
24 16,777,216
*/
const DEFAULT_CAPACITY_POW2: u8 = 5;
#[repr(C)]
struct Header {
lock: AtomicU64,
}
/// A Header UID of 0 indicates that the header is unlocked
pub(crate) const UID_UNLOCKED: u64 = 0;
impl Header {
fn try_lock(&self, uid: u64) -> bool {
Ok(UID_UNLOCKED)
== self
.lock
.compare_exchange(UID_UNLOCKED, uid, Ordering::Acquire, Ordering::Relaxed)
}
fn unlock(&self) -> u64 {
self.lock.swap(UID_UNLOCKED, Ordering::Release)
}
fn uid(&self) -> u64 {
self.lock.load(Ordering::Relaxed)
}
}
pub struct BucketStorage {
drives: Arc<Vec<PathBuf>>,
path: PathBuf,
mmap: MmapMut,
pub cell_size: u64,
pub capacity_pow2: u8,
pub used: AtomicU64,
pub stats: Arc<BucketStats>,
pub max_search: MaxSearch,
}
#[derive(Debug)]
pub enum BucketStorageError {
AlreadyAllocated,
}
impl Drop for BucketStorage {
fn drop(&mut self) {
let _ = remove_file(&self.path);
}
}
impl BucketStorage {
pub fn new_with_capacity(
drives: Arc<Vec<PathBuf>>,
num_elems: u64,
elem_size: u64,
capacity_pow2: u8,
max_search: MaxSearch,
mut stats: Arc<BucketStats>,
) -> Self {
let cell_size = elem_size * num_elems + std::mem::size_of::<Header>() as u64;
let (mmap, path) = Self::new_map(&drives, cell_size as usize, capacity_pow2, &mut stats);
Self {
path,
mmap,
drives,
cell_size,
used: AtomicU64::new(0),
capacity_pow2,
stats,
max_search,
}
}
pub fn max_search(&self) -> u64 {
self.max_search as u64
}
pub fn new(
drives: Arc<Vec<PathBuf>>,
num_elems: u64,
elem_size: u64,
max_search: MaxSearch,
stats: Arc<BucketStats>,
) -> Self {
Self::new_with_capacity(
drives,
num_elems,
elem_size,
DEFAULT_CAPACITY_POW2,
max_search,
stats,
)
}
pub fn uid(&self, ix: u64) -> u64 {
if ix >= self.num_cells() {
panic!("bad index size");
}
let ix = (ix * self.cell_size) as usize;
let hdr_slice: &[u8] = &self.mmap[ix..ix + std::mem::size_of::<Header>()];
unsafe {
let hdr = hdr_slice.as_ptr() as *const Header;
return hdr.as_ref().unwrap().uid();
}
}
pub fn allocate(&self, ix: u64, uid: u64) -> Result<(), BucketStorageError> {
if ix >= self.num_cells() {
panic!("allocate: bad index size");
}
if UID_UNLOCKED == uid {
panic!("allocate: bad uid");
}
let mut e = Err(BucketStorageError::AlreadyAllocated);
let ix = (ix * self.cell_size) as usize;
//debug!("ALLOC {} {}", ix, uid);
let hdr_slice: &[u8] = &self.mmap[ix..ix + std::mem::size_of::<Header>()];
unsafe {
let hdr = hdr_slice.as_ptr() as *const Header;
if hdr.as_ref().unwrap().try_lock(uid) {
e = Ok(());
self.used.fetch_add(1, Ordering::Relaxed);
}
};
e
}
pub fn free(&self, ix: u64, uid: u64) {
if ix >= self.num_cells() {
panic!("free: bad index size");
}
if UID_UNLOCKED == uid {
panic!("free: bad uid");
}
let ix = (ix * self.cell_size) as usize;
//debug!("FREE {} {}", ix, uid);
let hdr_slice: &[u8] = &self.mmap[ix..ix + std::mem::size_of::<Header>()];
unsafe {
let hdr = hdr_slice.as_ptr() as *const Header;
//debug!("FREE uid: {}", hdr.as_ref().unwrap().uid());
let previous_uid = hdr.as_ref().unwrap().unlock();
assert_eq!(
previous_uid, uid,
"free: unlocked a header with a differet uid: {}",
previous_uid
);
self.used.fetch_sub(1, Ordering::Relaxed);
}
}
pub fn get<T: Sized>(&self, ix: u64) -> &T {
if ix >= self.num_cells() {
panic!("bad index size");
}
let start = (ix * self.cell_size) as usize + std::mem::size_of::<Header>();
let end = start + std::mem::size_of::<T>();
let item_slice: &[u8] = &self.mmap[start..end];
unsafe {
let item = item_slice.as_ptr() as *const T;
&*item
}
}
pub fn get_empty_cell_slice<T: Sized>(&self) -> &[T] {
let len = 0;
let item_slice: &[u8] = &self.mmap[0..0];
unsafe {
let item = item_slice.as_ptr() as *const T;
std::slice::from_raw_parts(item, len as usize)
}
}
pub fn get_cell_slice<T: Sized>(&self, ix: u64, len: u64) -> &[T] {
if ix >= self.num_cells() {
panic!("bad index size");
}
let ix = self.cell_size * ix;
let start = ix as usize + std::mem::size_of::<Header>();
let end = start + std::mem::size_of::<T>() * len as usize;
//debug!("GET slice {} {}", start, end);
let item_slice: &[u8] = &self.mmap[start..end];
unsafe {
let item = item_slice.as_ptr() as *const T;
std::slice::from_raw_parts(item, len as usize)
}
}
pub fn get_mut<T: Sized>(&self, ix: u64) -> &mut T {
if ix >= self.num_cells() {
panic!("bad index size");
}
let start = (ix * self.cell_size) as usize + std::mem::size_of::<Header>();
let end = start + std::mem::size_of::<T>();
let item_slice: &[u8] = &self.mmap[start..end];
unsafe {
let item = item_slice.as_ptr() as *mut T;
&mut *item
}
}
pub fn get_mut_cell_slice<T: Sized>(&self, ix: u64, len: u64) -> &mut [T] {
if ix >= self.num_cells() {
panic!("bad index size");
}
let ix = self.cell_size * ix;
let start = ix as usize + std::mem::size_of::<Header>();
let end = start + std::mem::size_of::<T>() * len as usize;
//debug!("GET mut slice {} {}", start, end);
let item_slice: &[u8] = &self.mmap[start..end];
unsafe {
let item = item_slice.as_ptr() as *mut T;
std::slice::from_raw_parts_mut(item, len as usize)
}
}
fn new_map(
drives: &[PathBuf],
cell_size: usize,
capacity_pow2: u8,
stats: &mut Arc<BucketStats>,
) -> (MmapMut, PathBuf) {
let mut m0 = Measure::start("");
let capacity = 1u64 << capacity_pow2;
let r = thread_rng().gen_range(0, drives.len());
let drive = &drives[r];
let pos = format!("{}", thread_rng().gen_range(0, u128::MAX),);
let file = drive.join(pos);
let mut data = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(file.clone())
.map_err(|e| {
panic!(
"Unable to create data file {} in current dir({:?}): {:?}",
file.display(),
std::env::current_dir(),
e
);
})
.unwrap();
// Theoretical performance optimization: write a zero to the end of
// the file so that we won't have to resize it later, which may be
// expensive.
//debug!("GROWING file {}", capacity * cell_size as u64);
data.seek(SeekFrom::Start(capacity * cell_size as u64 - 1))
.unwrap();
data.write_all(&[0]).unwrap();
data.seek(SeekFrom::Start(0)).unwrap();
m0.stop();
let mut m1 = Measure::start("");
data.flush().unwrap(); // can we skip this?
m1.stop();
let mut m2 = Measure::start("");
let res = (unsafe { MmapMut::map_mut(&data).unwrap() }, file);
m2.stop();
stats.new_file_us.fetch_add(m0.as_us(), Ordering::Relaxed);
stats.flush_file_us.fetch_add(m0.as_us(), Ordering::Relaxed);
stats.mmap_us.fetch_add(m0.as_us(), Ordering::Relaxed);
res
}
pub fn grow(&mut self) {
let mut m = Measure::start("grow");
let old_cap = self.num_cells();
let old_map = &self.mmap;
let old_file = self.path.clone();
let increment = 1;
let index_grow = 1 << increment;
let (new_map, new_file) = Self::new_map(
&self.drives,
self.cell_size as usize,
self.capacity_pow2 + increment,
&mut self.stats,
);
(0..old_cap as usize).into_iter().for_each(|i| {
let old_ix = i * self.cell_size as usize;
let new_ix = old_ix * index_grow;
let dst_slice: &[u8] = &new_map[new_ix..new_ix + self.cell_size as usize];
let src_slice: &[u8] = &old_map[old_ix..old_ix + self.cell_size as usize];
unsafe {
let dst = dst_slice.as_ptr() as *mut u8;
let src = src_slice.as_ptr() as *const u8;
std::ptr::copy_nonoverlapping(src, dst, self.cell_size as usize);
};
});
self.mmap = new_map;
self.path = new_file;
self.capacity_pow2 += increment;
remove_file(old_file).unwrap();
m.stop();
let sz = 1 << self.capacity_pow2;
{
let mut max = self.stats.max_size.lock().unwrap();
*max = std::cmp::max(*max, sz);
}
self.stats.resizes.fetch_add(1, Ordering::Relaxed);
self.stats.resize_us.fetch_add(m.as_us(), Ordering::Relaxed);
}
pub fn num_cells(&self) -> u64 {
1 << self.capacity_pow2
}
}
| 30.573134 | 97 | 0.519625 |
f91d1ebcb1479dace65ac77946abc167c2410ab6 | 70 | pub mod api;
// fic remotec2
#[macro_use]
pub mod rpc;
pub mod types;
| 11.666667 | 15 | 0.7 |
8745b1f22a503c9fef0fed5f2865c8a7e8b06441 | 836 | //! All kinds of errors in this crate.
use crate::cells::{Coord, State};
use ca_rules::ParseRuleError;
use displaydoc::Display;
use thiserror::Error;
/// All kinds of errors in this crate.
#[derive(Clone, Debug, PartialEq, Eq, Display, Error)]
pub enum Error {
/// Unable to set cell at {0:?}.
SetCellError(Coord),
/// Invalid rule: {0:?}.
ParseRuleError(#[from] ParseRuleError),
/// B0S8 rules are not supported yet. Please use the inverted rule.
B0S8Error,
/// Symmetry or transformation requires the world to be square.
SquareWorldError,
/// Symmetry or transformation requires the world to have no diagonal width.
DiagonalWidthError,
/// Width / height / period should be positive.
NonPositiveError,
/// Cell at {0:?} has invalid state: {1:?}.
InvalidState(Coord, State),
}
| 32.153846 | 80 | 0.679426 |
71904acae724d8c5b7d5984dfbf2b26afde3417d | 1,248 | // Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![feature(crate_in_paths)]
use crate::m::f;
use crate as root;
mod m {
pub fn f() -> u8 { 1 }
pub fn g() -> u8 { 2 }
pub fn h() -> u8 { 3 }
// OK, visibilities are implicitly absolute like imports
pub(in crate::m) struct S;
}
mod n {
use crate::m::f;
use crate as root;
pub fn check() {
assert_eq!(f(), 1);
assert_eq!(crate::m::g(), 2);
assert_eq!(root::m::h(), 3);
}
}
mod p {
use {super::f, crate::m::g, self::root::m::h};
use crate as root;
pub fn check() {
assert_eq!(f(), 1);
assert_eq!(g(), 2);
assert_eq!(h(), 3);
}
}
fn main() {
assert_eq!(f(), 1);
assert_eq!(crate::m::g(), 2);
assert_eq!(root::m::h(), 3);
n::check();
p::check();
}
| 23.54717 | 68 | 0.581731 |
16b92b30bdfb067ec12ae056b1a54ca8c07148fc | 538 | use std::os::raw::c_int;
use serde::{Deserialize, Serialize};
use tarantool::tuple::{AsTuple, FunctionArgs, FunctionCtx, Tuple};
#[derive(Serialize, Deserialize)]
struct Args {
pub fields: Vec<i32>,
}
impl AsTuple for Args {}
#[no_mangle]
pub extern "C" fn harder(_: FunctionCtx, args: FunctionArgs) -> c_int {
let args: Tuple = args.into();
let args = args.into_struct::<Args>().unwrap();
println!("field_count = {}", args.fields.len());
for val in args.fields {
println!("val={}", val);
}
0
}
| 20.692308 | 71 | 0.63197 |
cc9c3dfac53a633c20e8e76b598713b60164401d | 12,420 | // Copyright 2019 Parity Technologies (UK) Ltd.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
mod util;
use futures::{future, prelude::*};
use libp2p_core::identity;
use libp2p_core::multiaddr::multiaddr;
use libp2p_core::nodes::network::{Network, NetworkEvent, NetworkReachError, PeerState, UnknownPeerDialErr, IncomingError};
use libp2p_core::{PeerId, Transport, upgrade};
use libp2p_swarm::{
ProtocolsHandler,
KeepAlive,
SubstreamProtocol,
ProtocolsHandlerEvent,
ProtocolsHandlerUpgrErr,
protocols_handler::NodeHandlerWrapperBuilder
};
use rand::seq::SliceRandom;
use std::io;
// TODO: replace with DummyProtocolsHandler after https://github.com/servo/rust-smallvec/issues/139 ?
struct TestHandler<TSubstream>(std::marker::PhantomData<TSubstream>);
impl<TSubstream> Default for TestHandler<TSubstream> {
fn default() -> Self {
TestHandler(std::marker::PhantomData)
}
}
impl<TSubstream> ProtocolsHandler for TestHandler<TSubstream>
where
TSubstream: tokio_io::AsyncRead + tokio_io::AsyncWrite
{
type InEvent = (); // TODO: cannot be Void (https://github.com/servo/rust-smallvec/issues/139)
type OutEvent = (); // TODO: cannot be Void (https://github.com/servo/rust-smallvec/issues/139)
type Error = io::Error;
type Substream = TSubstream;
type InboundProtocol = upgrade::DeniedUpgrade;
type OutboundProtocol = upgrade::DeniedUpgrade;
type OutboundOpenInfo = (); // TODO: cannot be Void (https://github.com/servo/rust-smallvec/issues/139)
fn listen_protocol(&self) -> SubstreamProtocol<Self::InboundProtocol> {
SubstreamProtocol::new(upgrade::DeniedUpgrade)
}
fn inject_fully_negotiated_inbound(
&mut self,
_: <Self::InboundProtocol as upgrade::InboundUpgrade<Self::Substream>>::Output
) { panic!() }
fn inject_fully_negotiated_outbound(
&mut self,
_: <Self::OutboundProtocol as upgrade::OutboundUpgrade<Self::Substream>>::Output,
_: Self::OutboundOpenInfo
) { panic!() }
fn inject_event(&mut self, _: Self::InEvent) {
panic!()
}
fn inject_dial_upgrade_error(&mut self, _: Self::OutboundOpenInfo, _: ProtocolsHandlerUpgrErr<<Self::OutboundProtocol as upgrade::OutboundUpgrade<Self::Substream>>::Error>) {
}
fn connection_keep_alive(&self) -> KeepAlive { KeepAlive::No }
fn poll(&mut self) -> Poll<ProtocolsHandlerEvent<Self::OutboundProtocol, Self::OutboundOpenInfo, Self::OutEvent>, Self::Error> {
Ok(Async::NotReady)
}
}
#[test]
fn deny_incoming_connec() {
// Checks whether refusing an incoming connection on a swarm triggers the correct events.
let mut swarm1: Network<_, _, _, NodeHandlerWrapperBuilder<TestHandler<_>>, _> = {
let local_key = identity::Keypair::generate_ed25519();
let local_public_key = local_key.public();
let transport = libp2p_tcp::TcpConfig::new()
.upgrade()
.authenticate(libp2p_secio::SecioConfig::new(local_key))
.multiplex(libp2p_mplex::MplexConfig::new());
Network::new(transport, local_public_key.into())
};
let mut swarm2 = {
let local_key = identity::Keypair::generate_ed25519();
let local_public_key = local_key.public();
let transport = libp2p_tcp::TcpConfig::new()
.upgrade()
.authenticate(libp2p_secio::SecioConfig::new(local_key))
.multiplex(libp2p_mplex::MplexConfig::new());
Network::new(transport, local_public_key.into())
};
swarm1.listen_on("/ip4/127.0.0.1/tcp/0".parse().unwrap()).unwrap();
let address =
if let Async::Ready(NetworkEvent::NewListenerAddress { listen_addr, .. }) = swarm1.poll() {
listen_addr
} else {
panic!("Was expecting the listen address to be reported")
};
swarm2
.peer(swarm1.local_peer_id().clone())
.into_not_connected().unwrap()
.connect(address.clone(), TestHandler::default().into_node_handler_builder());
let future = future::poll_fn(|| -> Poll<(), io::Error> {
match swarm1.poll() {
Async::Ready(NetworkEvent::IncomingConnection(inc)) => drop(inc),
Async::Ready(_) => unreachable!(),
Async::NotReady => (),
}
match swarm2.poll() {
Async::Ready(NetworkEvent::DialError {
new_state: PeerState::NotConnected,
peer_id,
multiaddr,
error: NetworkReachError::Transport(_)
}) => {
assert_eq!(peer_id, *swarm1.local_peer_id());
assert_eq!(multiaddr, address);
return Ok(Async::Ready(()));
},
Async::Ready(_) => unreachable!(),
Async::NotReady => (),
}
Ok(Async::NotReady)
});
tokio::runtime::current_thread::Runtime::new().unwrap().block_on(future).unwrap();
}
#[test]
fn dial_self() {
// Check whether dialing ourselves correctly fails.
//
// Dialing the same address we're listening should result in three events:
//
// - The incoming connection notification (before we know the incoming peer ID).
// - The error about the incoming connection (once we've determined that it's our own ID).
// - The error about the dialing (once we've determined that it's our own ID).
//
// The last two items can happen in any order.
let mut swarm = {
let local_key = identity::Keypair::generate_ed25519();
let local_public_key = local_key.public();
let transport = libp2p_tcp::TcpConfig::new()
.upgrade()
.authenticate(libp2p_secio::SecioConfig::new(local_key))
.multiplex(libp2p_mplex::MplexConfig::new())
.and_then(|(peer, mplex), _| {
// Gracefully close the connection to allow protocol
// negotiation to complete.
util::CloseMuxer::new(mplex).map(move |mplex| (peer, mplex))
});
Network::new(transport, local_public_key.into())
};
swarm.listen_on("/ip4/127.0.0.1/tcp/0".parse().unwrap()).unwrap();
let (address, mut swarm) =
future::lazy(move || {
if let Async::Ready(NetworkEvent::NewListenerAddress { listen_addr, .. }) = swarm.poll() {
Ok::<_, void::Void>((listen_addr, swarm))
} else {
panic!("Was expecting the listen address to be reported")
}
})
.wait()
.unwrap();
swarm.dial(address.clone(), TestHandler::default().into_node_handler_builder()).unwrap();
let mut got_dial_err = false;
let mut got_inc_err = false;
let future = future::poll_fn(|| -> Poll<(), io::Error> {
loop {
match swarm.poll() {
Async::Ready(NetworkEvent::UnknownPeerDialError {
multiaddr,
error: UnknownPeerDialErr::FoundLocalPeerId,
handler: _
}) => {
assert_eq!(multiaddr, address);
assert!(!got_dial_err);
got_dial_err = true;
if got_inc_err {
return Ok(Async::Ready(()));
}
},
Async::Ready(NetworkEvent::IncomingConnectionError {
local_addr,
send_back_addr: _,
error: IncomingError::FoundLocalPeerId
}) => {
assert_eq!(address, local_addr);
assert!(!got_inc_err);
got_inc_err = true;
if got_dial_err {
return Ok(Async::Ready(()));
}
},
Async::Ready(NetworkEvent::IncomingConnection(inc)) => {
assert_eq!(*inc.local_addr(), address);
inc.accept(TestHandler::default().into_node_handler_builder());
},
Async::Ready(ev) => {
panic!("Unexpected event: {:?}", ev)
}
Async::NotReady => break Ok(Async::NotReady),
}
}
});
tokio::runtime::current_thread::Runtime::new().unwrap().block_on(future).unwrap();
}
#[test]
fn dial_self_by_id() {
// Trying to dial self by passing the same `PeerId` shouldn't even be possible in the first
// place.
let mut swarm: Network<_, _, _, NodeHandlerWrapperBuilder<TestHandler<_>>, _> = {
let local_key = identity::Keypair::generate_ed25519();
let local_public_key = local_key.public();
let transport = libp2p_tcp::TcpConfig::new()
.upgrade()
.authenticate(libp2p_secio::SecioConfig::new(local_key))
.multiplex(libp2p_mplex::MplexConfig::new());
Network::new(transport, local_public_key.into())
};
let peer_id = swarm.local_peer_id().clone();
assert!(swarm.peer(peer_id).into_not_connected().is_none());
}
#[test]
fn multiple_addresses_err() {
// Tries dialing multiple addresses, and makes sure there's one dialing error per addresses.
let mut swarm = {
let local_key = identity::Keypair::generate_ed25519();
let local_public_key = local_key.public();
let transport = libp2p_tcp::TcpConfig::new()
.upgrade()
.authenticate(libp2p_secio::SecioConfig::new(local_key))
.multiplex(libp2p_mplex::MplexConfig::new());
Network::new(transport, local_public_key.into())
};
let mut addresses = Vec::new();
for _ in 0 .. 3 {
addresses.push(multiaddr![Ip4([0, 0, 0, 0]), Tcp(rand::random::<u16>())]);
}
for _ in 0 .. 5 {
addresses.push(multiaddr![Udp(rand::random::<u16>())]);
}
addresses.shuffle(&mut rand::thread_rng());
let target = PeerId::random();
swarm.peer(target.clone())
.into_not_connected().unwrap()
.connect_iter(addresses.clone(), TestHandler::default().into_node_handler_builder())
.unwrap();
let future = future::poll_fn(|| -> Poll<(), io::Error> {
loop {
match swarm.poll() {
Async::Ready(NetworkEvent::DialError {
new_state,
peer_id,
multiaddr,
error: NetworkReachError::Transport(_)
}) => {
assert_eq!(peer_id, target);
let expected = addresses.remove(0);
assert_eq!(multiaddr, expected);
if addresses.is_empty() {
assert_eq!(new_state, PeerState::NotConnected);
return Ok(Async::Ready(()));
} else {
match new_state {
PeerState::Dialing { num_pending_addresses } => {
assert_eq!(num_pending_addresses.get(), addresses.len());
},
_ => panic!()
}
}
},
Async::Ready(_) => unreachable!(),
Async::NotReady => break Ok(Async::NotReady),
}
}
});
tokio::runtime::current_thread::Runtime::new().unwrap().block_on(future).unwrap();
}
| 38.452012 | 178 | 0.592432 |
dec89735d42e24c31a97e2e30b21efdcecccea2b | 458 | // Copyright 2015-2016 Jarrod Petz.
// See the file at the top-level directory of this distribution
// <COPYRIGHT http://rust-lang.org/COPYRIGHT>
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0>
// OR the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>
// at your option.
//
// This file may not be copied, modified, or distributed except according to
// those terms.
| 35.230769 | 76 | 0.727074 |
de1082c8d080dd1f3ab668b93b33f3e74e411e00 | 9,355 | use crate::data::StreamData;
use crate::state::State;
use async_pipe::{self, PipeReader, PipeWriter};
use bytes::Bytes;
use http::{HeaderMap, HeaderValue};
use http_body::{Body, SizeHint};
use pin_project_lite::pin_project;
use std::borrow::Cow;
use std::marker::Unpin;
use std::pin::Pin;
use std::sync::{Arc, Mutex};
use std::task::{Context, Poll};
use tokio::io::{self, AsyncRead, ReadBuf};
use glommio::Local;
const DEFAULT_BUF_SIZE: usize = 8 * 1024;
/// An [HttpBody](https://docs.rs/hyper/0.14.11/hyper/body/trait.HttpBody.html) implementation which handles data streaming in an efficient way.
///
/// It is similar to [Body](https://docs.rs/hyper/0.14.11/hyper/body/struct.Body.html).
pub struct StreamBody {
inner: Inner,
}
enum Inner {
Once(OnceInner),
Channel(ChannelInner),
}
struct OnceInner {
data: Option<Bytes>,
reached_eof: bool,
state: Arc<Mutex<State>>,
}
pin_project! {
struct ChannelInner {
#[pin]
reader: PipeReader,
buf: Box<[u8]>,
len: usize,
reached_eof: bool,
state: Arc<Mutex<State>>,
}
}
impl StreamBody {
/// Creates an empty body.
pub fn empty() -> StreamBody {
StreamBody {
inner: Inner::Once(OnceInner {
data: None,
reached_eof: true,
state: Arc::new(Mutex::new(State {
is_current_stream_data_consumed: true,
waker: None,
})),
}),
}
}
/// Creates a body stream with an associated writer half.
///
/// Useful when wanting to stream chunks from another thread.
pub fn channel() -> (PipeWriter, StreamBody) {
StreamBody::channel_with_capacity(DEFAULT_BUF_SIZE)
}
/// Creates a body stream with an associated writer half having a specific size of internal buffer.
///
/// Useful when wanting to stream chunks from another thread.
pub fn channel_with_capacity(capacity: usize) -> (PipeWriter, StreamBody) {
let (w, r) = async_pipe::pipe();
let mut buffer = Vec::with_capacity(capacity);
unsafe {
buffer.set_len(capacity);
}
let body = StreamBody {
inner: Inner::Channel(ChannelInner {
reader: r,
buf: buffer.into_boxed_slice(),
len: 0,
reached_eof: false,
state: Arc::new(Mutex::new(State {
is_current_stream_data_consumed: true,
waker: None,
})),
}),
};
(w, body)
}
/// A helper method to convert an [AsyncRead](https://docs.rs/tokio/0.2.16/tokio/io/trait.AsyncRead.html) to a `StreamBody`. If there is any error
/// thrown during the reading/writing, it will be logged via [log::error!](https://docs.rs/log/0.4.10/log/macro.error.html).
pub fn from_reader<R: AsyncRead + Unpin + Send + 'static>(mut r: R) -> StreamBody {
let (mut w, body) = StreamBody::channel();
Local::local(async move {
if let Err(err) = io::copy(&mut r, &mut w).await {
log::error!(
"{}: StreamBody: Something went wrong while piping the provided reader to the body: {}",
env!("CARGO_PKG_NAME"),
err
)
}
}).detach();
body
}
}
impl Body for StreamBody {
type Data = StreamData;
type Error = io::Error;
fn poll_data(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Result<Self::Data, Self::Error>>> {
match self.inner {
Inner::Once(ref mut inner) => {
let mut state;
match inner.state.lock() {
Ok(s) => state = s,
Err(err) => {
return Poll::Ready(Some(Err(io::Error::new(
io::ErrorKind::Other,
format!(
"{}: StreamBody [Once Data]: Failed to lock the stream state on poll data: {}",
env!("CARGO_PKG_NAME"),
err
),
))));
}
}
if !state.is_current_stream_data_consumed {
state.waker = Some(cx.waker().clone());
return Poll::Pending;
}
if inner.reached_eof {
return Poll::Ready(None);
}
if let Some(ref bytes) = inner.data {
state.is_current_stream_data_consumed = false;
inner.reached_eof = true;
let data = StreamData::new(&bytes[..], Arc::clone(&inner.state));
return Poll::Ready(Some(Ok(data)));
}
return Poll::Ready(None);
}
Inner::Channel(ref mut inner) => {
let mut inner_me = Pin::new(inner).project();
let mut state;
match inner_me.state.lock() {
Ok(s) => state = s,
Err(err) => {
return Poll::Ready(Some(Err(io::Error::new(
io::ErrorKind::Other,
format!(
"{}: StreamBody [Channel Data]: Failed to lock the stream state on poll data: {}",
env!("CARGO_PKG_NAME"),
err
),
))));
}
}
if !state.is_current_stream_data_consumed {
state.waker = Some(cx.waker().clone());
return Poll::Pending;
}
if *inner_me.reached_eof {
return Poll::Ready(None);
}
let mut buf = ReadBuf::new(&mut inner_me.buf);
let poll_status = inner_me.reader.poll_read(cx, &mut buf);
match poll_status {
Poll::Pending => Poll::Pending,
Poll::Ready(result) => match result {
Ok(_) => {
if (buf.capacity() - buf.remaining()) > 0 {
state.is_current_stream_data_consumed = false;
let data = StreamData::new(buf.filled(), Arc::clone(&inner_me.state));
Poll::Ready(Some(Ok(data)))
}else{
*inner_me.reached_eof = true;
Poll::Ready(None)
}
}
Err(err) => Poll::Ready(Some(Err(err))),
}
}
}
}
}
fn poll_trailers(
self: Pin<&mut Self>,
_cx: &mut Context,
) -> Poll<Result<Option<HeaderMap<HeaderValue>>, Self::Error>> {
Poll::Ready(Ok(None))
}
fn is_end_stream(&self) -> bool {
match self.inner {
Inner::Once(ref inner) => inner.reached_eof,
Inner::Channel(ref inner) => inner.reached_eof,
}
}
fn size_hint(&self) -> SizeHint {
match self.inner {
Inner::Once(ref inner) => match inner.data {
Some(ref data) => SizeHint::with_exact(data.len() as u64),
None => SizeHint::with_exact(0),
},
Inner::Channel(_) => SizeHint::default(),
}
}
}
impl From<Bytes> for StreamBody {
#[inline]
fn from(chunk: Bytes) -> StreamBody {
if chunk.is_empty() {
StreamBody::empty()
} else {
StreamBody {
inner: Inner::Once(OnceInner {
data: Some(chunk),
reached_eof: false,
state: Arc::new(Mutex::new(State {
is_current_stream_data_consumed: true,
waker: None,
})),
}),
}
}
}
}
impl From<Vec<u8>> for StreamBody {
#[inline]
fn from(vec: Vec<u8>) -> StreamBody {
StreamBody::from(Bytes::from(vec))
}
}
impl From<&'static [u8]> for StreamBody {
#[inline]
fn from(slice: &'static [u8]) -> StreamBody {
StreamBody::from(Bytes::from(slice))
}
}
impl From<Cow<'static, [u8]>> for StreamBody {
#[inline]
fn from(cow: Cow<'static, [u8]>) -> StreamBody {
match cow {
Cow::Borrowed(b) => StreamBody::from(b),
Cow::Owned(o) => StreamBody::from(o),
}
}
}
impl From<String> for StreamBody {
#[inline]
fn from(s: String) -> StreamBody {
StreamBody::from(Bytes::from(s.into_bytes()))
}
}
impl From<&'static str> for StreamBody {
#[inline]
fn from(slice: &'static str) -> StreamBody {
StreamBody::from(Bytes::from(slice.as_bytes()))
}
}
impl From<Cow<'static, str>> for StreamBody {
#[inline]
fn from(cow: Cow<'static, str>) -> StreamBody {
match cow {
Cow::Borrowed(b) => StreamBody::from(b),
Cow::Owned(o) => StreamBody::from(o),
}
}
}
| 31.183333 | 150 | 0.480171 |
e64a5ff81c536a02d363d154eaa937498f454ae1 | 9,641 | // Generated from definition io.k8s.api.apps.v1beta2.StatefulSetStatus
/// StatefulSetStatus represents the current state of a StatefulSet.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct StatefulSetStatus {
/// collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
pub collision_count: Option<i32>,
/// Represents the latest available observations of a statefulset's current state.
pub conditions: Option<Vec<crate::v1_12::api::apps::v1beta2::StatefulSetCondition>>,
/// currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
pub current_replicas: Option<i32>,
/// currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence \[0,currentReplicas).
pub current_revision: Option<String>,
/// observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
pub observed_generation: Option<i64>,
/// readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition.
pub ready_replicas: Option<i32>,
/// replicas is the number of Pods created by the StatefulSet controller.
pub replicas: i32,
/// updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence \[replicas-updatedReplicas,replicas)
pub update_revision: Option<String>,
/// updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
pub updated_replicas: Option<i32>,
}
impl<'de> serde::Deserialize<'de> for StatefulSetStatus {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_collision_count,
Key_conditions,
Key_current_replicas,
Key_current_revision,
Key_observed_generation,
Key_ready_replicas,
Key_replicas,
Key_update_revision,
Key_updated_replicas,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"collisionCount" => Field::Key_collision_count,
"conditions" => Field::Key_conditions,
"currentReplicas" => Field::Key_current_replicas,
"currentRevision" => Field::Key_current_revision,
"observedGeneration" => Field::Key_observed_generation,
"readyReplicas" => Field::Key_ready_replicas,
"replicas" => Field::Key_replicas,
"updateRevision" => Field::Key_update_revision,
"updatedReplicas" => Field::Key_updated_replicas,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = StatefulSetStatus;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "struct StatefulSetStatus")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_collision_count: Option<i32> = None;
let mut value_conditions: Option<Vec<crate::v1_12::api::apps::v1beta2::StatefulSetCondition>> = None;
let mut value_current_replicas: Option<i32> = None;
let mut value_current_revision: Option<String> = None;
let mut value_observed_generation: Option<i64> = None;
let mut value_ready_replicas: Option<i32> = None;
let mut value_replicas: Option<i32> = None;
let mut value_update_revision: Option<String> = None;
let mut value_updated_replicas: Option<i32> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_collision_count => value_collision_count = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_conditions => value_conditions = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_current_replicas => value_current_replicas = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_current_revision => value_current_revision = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_observed_generation => value_observed_generation = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_ready_replicas => value_ready_replicas = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_replicas => value_replicas = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_update_revision => value_update_revision = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_updated_replicas => value_updated_replicas = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(StatefulSetStatus {
collision_count: value_collision_count,
conditions: value_conditions,
current_replicas: value_current_replicas,
current_revision: value_current_revision,
observed_generation: value_observed_generation,
ready_replicas: value_ready_replicas,
replicas: value_replicas.ok_or_else(|| serde::de::Error::missing_field("replicas"))?,
update_revision: value_update_revision,
updated_replicas: value_updated_replicas,
})
}
}
deserializer.deserialize_struct(
"StatefulSetStatus",
&[
"collisionCount",
"conditions",
"currentReplicas",
"currentRevision",
"observedGeneration",
"readyReplicas",
"replicas",
"updateRevision",
"updatedReplicas",
],
Visitor,
)
}
}
impl serde::Serialize for StatefulSetStatus {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
"StatefulSetStatus",
1 +
self.collision_count.as_ref().map_or(0, |_| 1) +
self.conditions.as_ref().map_or(0, |_| 1) +
self.current_replicas.as_ref().map_or(0, |_| 1) +
self.current_revision.as_ref().map_or(0, |_| 1) +
self.observed_generation.as_ref().map_or(0, |_| 1) +
self.ready_replicas.as_ref().map_or(0, |_| 1) +
self.update_revision.as_ref().map_or(0, |_| 1) +
self.updated_replicas.as_ref().map_or(0, |_| 1),
)?;
if let Some(value) = &self.collision_count {
serde::ser::SerializeStruct::serialize_field(&mut state, "collisionCount", value)?;
}
if let Some(value) = &self.conditions {
serde::ser::SerializeStruct::serialize_field(&mut state, "conditions", value)?;
}
if let Some(value) = &self.current_replicas {
serde::ser::SerializeStruct::serialize_field(&mut state, "currentReplicas", value)?;
}
if let Some(value) = &self.current_revision {
serde::ser::SerializeStruct::serialize_field(&mut state, "currentRevision", value)?;
}
if let Some(value) = &self.observed_generation {
serde::ser::SerializeStruct::serialize_field(&mut state, "observedGeneration", value)?;
}
if let Some(value) = &self.ready_replicas {
serde::ser::SerializeStruct::serialize_field(&mut state, "readyReplicas", value)?;
}
serde::ser::SerializeStruct::serialize_field(&mut state, "replicas", &self.replicas)?;
if let Some(value) = &self.update_revision {
serde::ser::SerializeStruct::serialize_field(&mut state, "updateRevision", value)?;
}
if let Some(value) = &self.updated_replicas {
serde::ser::SerializeStruct::serialize_field(&mut state, "updatedReplicas", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 50.742105 | 221 | 0.594129 |
2fa90677a476da8601add715d140ba05fd2b840b | 4,150 | use ast;
use scanner::Token;
trait Emitter {
type T;
fn emit(&self, into: &mut Vec<Self::T>);
}
impl Emitter for ast::Program {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
for f in self.functions.iter() {
f.emit(into);
}
}
}
impl Emitter for ast::Function {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
self.return_type.emit(into);
into.push(Token::Symbol(self.name.clone()));
into.push(Token::OpenPar);
// TODO: parameters
into.push(Token::ClosePar);
self.block.emit(into);
}
}
impl Emitter for ast::Parameter {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
unimplemented!();
}
}
impl Emitter for ast::Type {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
into.push(Token::Symbol(self.name.clone()));
}
}
impl Emitter for ast::Block {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
into.push(Token::OpenCur);
for stmt in self.statements.iter() {
stmt.emit(into);
}
into.push(Token::CloseCur);
}
}
impl Emitter for ast::Statement {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
match self {
&ast::Statement::Return(ref expr) => {
into.push(Token::Symbol("return".to_string()));
expr.emit(into);
into.push(Token::Semi);
}
}
}
}
impl Emitter for ast::Expr {
type T = Token;
fn emit(&self, into: &mut Vec<Self::T>) {
match self {
&ast::Expr::NumLiteral(ref s) => {
into.push(Token::NumLiteral(s.clone()));
}
}
}
}
fn tokens_to_string(tokens: &[Token]) -> String {
let mut s = String::new();
let mut last_token_num_or_sym = false;
for t in tokens {
match t {
Token::OpenPar => {
s.push_str("(");
last_token_num_or_sym = false;
}
Token::ClosePar => {
s.push_str(")");
last_token_num_or_sym = false;
}
Token::OpenCur => {
s.push_str("{");
last_token_num_or_sym = false;
}
Token::CloseCur => {
s.push_str("}");
last_token_num_or_sym = false;
}
Token::Semi => {
s.push_str(";");
last_token_num_or_sym = false;
}
Token::Symbol(ref name) => {
if last_token_num_or_sym {
s.push_str(" ");
}
s.push_str(name);
last_token_num_or_sym = true;
}
Token::NumLiteral(ref num) => {
if last_token_num_or_sym {
s.push_str(" ");
}
s.push_str(num);
last_token_num_or_sym = true;
}
}
}
return s;
}
#[test]
fn test_codegen_c() {
let ast = ast::Program {
functions: vec![
ast::Function {
return_type: ast::Type {
name: "int".to_string(),
},
name: "main".to_string(),
parameters: vec![],
block: ast::Block {
statements: vec![
ast::Statement::Return(Box::new(ast::Expr::NumLiteral("0".to_string()))),
],
},
},
],
};
let exp_tokens = vec![
Token::Symbol("int".to_string()),
Token::Symbol("main".to_string()),
Token::OpenPar,
Token::ClosePar,
Token::OpenCur,
Token::Symbol("return".to_string()),
Token::NumLiteral("0".to_string()),
Token::Semi,
Token::CloseCur,
];
let mut tokens = vec![];
ast.emit(&mut tokens);
assert_eq!(exp_tokens, tokens);
let exp_code = "int main(){return 0;}";
let code = tokens_to_string(&tokens[..]);
assert_eq!(exp_code, &code);
}
| 25.304878 | 97 | 0.466747 |
e203cc5e426cb5bf41f22c2c0afd36cdcd62abf4 | 942 | // Copyright 2017 Amagicom AB.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[macro_use]
extern crate error_chain;
use pfctl::PfCtl;
use std::env;
error_chain! {}
quick_main!(run);
fn run() -> Result<()> {
let mut pf = PfCtl::new().chain_err(|| "Unable to connect to PF")?;
for anchor_name in env::args().skip(1) {
pf.try_add_anchor(&anchor_name, pfctl::AnchorKind::Filter)
.chain_err(|| "Unable to add filter anchor")?;
pf.try_add_anchor(&anchor_name, pfctl::AnchorKind::Redirect)
.chain_err(|| "Unable to add redirect anchor")?;
println!("Added {} as both a redirect and filter anchor", anchor_name);
}
Ok(())
}
| 30.387097 | 79 | 0.663482 |
29b66d69cf03eed56fffa2abc31af04ecf1a4e5f | 24,210 | //! Builder-pattern objects for configuration various git operations.
use std::ffi::{CStr, CString};
use std::mem;
use std::path::Path;
use std::ptr;
use libc::{c_char, size_t, c_void, c_uint, c_int};
use {raw, panic, Error, Repository, FetchOptions, IntoCString};
use {CheckoutNotificationType, DiffFile, Remote};
use util::{self, Binding};
/// A builder struct which is used to build configuration for cloning a new git
/// repository.
pub struct RepoBuilder<'cb> {
bare: bool,
branch: Option<CString>,
local: bool,
hardlinks: bool,
checkout: Option<CheckoutBuilder<'cb>>,
fetch_opts: Option<FetchOptions<'cb>>,
clone_local: Option<CloneLocal>,
remote_create: Option<Box<RemoteCreate<'cb>>>,
}
/// Type of callback passed to `RepoBuilder::remote_create`.
///
/// The second and third arguments are the remote's name and the remote's url.
pub type RemoteCreate<'cb> = for<'a> FnMut(&'a Repository, &str, &str)
-> Result<Remote<'a>, Error> + 'cb;
/// A builder struct for configuring checkouts of a repository.
pub struct CheckoutBuilder<'cb> {
their_label: Option<CString>,
our_label: Option<CString>,
ancestor_label: Option<CString>,
target_dir: Option<CString>,
paths: Vec<CString>,
path_ptrs: Vec<*const c_char>,
file_perm: Option<i32>,
dir_perm: Option<i32>,
disable_filters: bool,
checkout_opts: u32,
progress: Option<Box<Progress<'cb>>>,
notify: Option<Box<Notify<'cb>>>,
notify_flags: CheckoutNotificationType,
}
/// Checkout progress notification callback.
///
/// The first argument is the path for the notification, the next is the numver
/// of completed steps so far, and the final is the total number of steps.
pub type Progress<'a> = FnMut(Option<&Path>, usize, usize) + 'a;
/// Checkout notifications callback.
///
/// The first argument is the notification type, the next is the path for the
/// the notification, followed by the baseline diff, target diff, and workdir diff.
///
/// The callback must return a bool specifying whether the checkout should
/// continue.
pub type Notify<'a> = FnMut(CheckoutNotificationType, Option<&Path>,
Option<DiffFile>, Option<DiffFile>,
Option<DiffFile>) -> bool + 'a;
impl<'cb> Default for RepoBuilder<'cb> {
fn default() -> Self {
Self::new()
}
}
/// Options that can be passed to `RepoBuilder::clone_local`.
#[derive(Clone, Copy)]
pub enum CloneLocal {
/// Auto-detect (default)
///
/// Here libgit2 will bypass the git-aware transport for local paths, but
/// use a normal fetch for `file://` urls.
Auto = raw::GIT_CLONE_LOCAL_AUTO as isize,
/// Bypass the git-aware transport even for `file://` urls.
Local = raw::GIT_CLONE_LOCAL as isize,
/// Never bypass the git-aware transport
None = raw::GIT_CLONE_NO_LOCAL as isize,
/// Bypass the git-aware transport, but don't try to use hardlinks.
NoLinks = raw::GIT_CLONE_LOCAL_NO_LINKS as isize,
#[doc(hidden)]
__Nonexhaustive = 0xff,
}
impl<'cb> RepoBuilder<'cb> {
/// Creates a new repository builder with all of the default configuration.
///
/// When ready, the `clone()` method can be used to clone a new repository
/// using this configuration.
pub fn new() -> RepoBuilder<'cb> {
::init();
RepoBuilder {
bare: false,
branch: None,
local: true,
clone_local: None,
hardlinks: true,
checkout: None,
fetch_opts: None,
remote_create: None,
}
}
/// Indicate whether the repository will be cloned as a bare repository or
/// not.
pub fn bare(&mut self, bare: bool) -> &mut RepoBuilder<'cb> {
self.bare = bare;
self
}
/// Specify the name of the branch to check out after the clone.
///
/// If not specified, the remote's default branch will be used.
pub fn branch(&mut self, branch: &str) -> &mut RepoBuilder<'cb> {
self.branch = Some(CString::new(branch).unwrap());
self
}
/// Configures options for bypassing the git-aware transport on clone.
///
/// Bypassing it means that instead of a fetch libgit2 will copy the object
/// database directory instead of figuring out what it needs, which is
/// faster. If possible, it will hardlink the files to save space.
pub fn clone_local(&mut self, clone_local: CloneLocal) -> &mut RepoBuilder<'cb> {
self.clone_local = Some(clone_local);
self
}
/// Set the flag for bypassing the git aware transport mechanism for local
/// paths.
///
/// If `true`, the git-aware transport will be bypassed for local paths. If
/// `false`, the git-aware transport will not be bypassed.
#[deprecated(note = "use `clone_local` instead")]
#[doc(hidden)]
pub fn local(&mut self, local: bool) -> &mut RepoBuilder<'cb> {
self.local = local;
self
}
/// Set the flag for whether hardlinks are used when using a local git-aware
/// transport mechanism.
#[deprecated(note = "use `clone_local` instead")]
#[doc(hidden)]
pub fn hardlinks(&mut self, links: bool) -> &mut RepoBuilder<'cb> {
self.hardlinks = links;
self
}
/// Configure the checkout which will be performed by consuming a checkout
/// builder.
pub fn with_checkout(&mut self, checkout: CheckoutBuilder<'cb>)
-> &mut RepoBuilder<'cb> {
self.checkout = Some(checkout);
self
}
/// Options which control the fetch, including callbacks.
///
/// The callbacks are used for reporting fetch progress, and for acquiring
/// credentials in the event they are needed.
pub fn fetch_options(&mut self, fetch_opts: FetchOptions<'cb>)
-> &mut RepoBuilder<'cb> {
self.fetch_opts = Some(fetch_opts);
self
}
/// Configures a callback used to create the git remote, prior to its being
/// used to perform the clone operation.
pub fn remote_create<F>(&mut self, f: F) -> &mut RepoBuilder<'cb>
where F: for<'a> FnMut(&'a Repository, &str, &str)
-> Result<Remote<'a>, Error> + 'cb,
{
self.remote_create = Some(Box::new(f));
self
}
/// Clone a remote repository.
///
/// This will use the options configured so far to clone the specified url
/// into the specified local path.
pub fn clone(&mut self, url: &str, into: &Path) -> Result<Repository, Error> {
let mut opts: raw::git_clone_options = unsafe { mem::zeroed() };
unsafe {
try_call!(raw::git_clone_init_options(&mut opts,
raw::GIT_CLONE_OPTIONS_VERSION));
}
opts.bare = self.bare as c_int;
opts.checkout_branch = self.branch.as_ref().map(|s| {
s.as_ptr()
}).unwrap_or(ptr::null());
if let Some(ref local) = self.clone_local {
opts.local = *local as raw::git_clone_local_t;
} else {
opts.local = match (self.local, self.hardlinks) {
(true, false) => raw::GIT_CLONE_LOCAL_NO_LINKS,
(false, _) => raw::GIT_CLONE_NO_LOCAL,
(true, _) => raw::GIT_CLONE_LOCAL_AUTO,
};
}
if let Some(ref mut cbs) = self.fetch_opts {
opts.fetch_opts = cbs.raw();
}
if let Some(ref mut c) = self.checkout {
unsafe {
c.configure(&mut opts.checkout_opts);
}
}
if let Some(ref mut callback) = self.remote_create {
opts.remote_cb = Some(remote_create_cb);
opts.remote_cb_payload = callback as *mut _ as *mut _;
}
let url = try!(CString::new(url));
let into = try!(into.into_c_string());
let mut raw = ptr::null_mut();
unsafe {
try_call!(raw::git_clone(&mut raw, url, into, &opts));
Ok(Binding::from_raw(raw))
}
}
}
extern fn remote_create_cb(out: *mut *mut raw::git_remote,
repo: *mut raw::git_repository,
name: *const c_char,
url: *const c_char,
payload: *mut c_void) -> c_int {
unsafe {
let repo = Repository::from_raw(repo);
let code = panic::wrap(|| {
let name = CStr::from_ptr(name).to_str().unwrap();
let url = CStr::from_ptr(url).to_str().unwrap();
let f = payload as *mut Box<RemoteCreate>;
match (*f)(&repo, name, url) {
Ok(remote) => {
*out = ::remote::remote_into_raw(remote);
0
}
Err(e) => e.raw_code(),
}
});
mem::forget(repo);
code.unwrap_or(-1)
}
}
impl<'cb> Default for CheckoutBuilder<'cb> {
fn default() -> Self {
Self::new()
}
}
impl<'cb> CheckoutBuilder<'cb> {
/// Creates a new builder for checkouts with all of its default
/// configuration.
pub fn new() -> CheckoutBuilder<'cb> {
::init();
CheckoutBuilder {
disable_filters: false,
dir_perm: None,
file_perm: None,
path_ptrs: Vec::new(),
paths: Vec::new(),
target_dir: None,
ancestor_label: None,
our_label: None,
their_label: None,
checkout_opts: raw::GIT_CHECKOUT_SAFE as u32,
progress: None,
notify: None,
notify_flags: CheckoutNotificationType::empty(),
}
}
/// Indicate that this checkout should perform a dry run by checking for
/// conflicts but not make any actual changes.
pub fn dry_run(&mut self) -> &mut CheckoutBuilder<'cb> {
self.checkout_opts &= !((1 << 4) - 1);
self.checkout_opts |= raw::GIT_CHECKOUT_NONE as u32;
self
}
/// Take any action necessary to get the working directory to match the
/// target including potentially discarding modified files.
pub fn force(&mut self) -> &mut CheckoutBuilder<'cb> {
self.checkout_opts &= !((1 << 4) - 1);
self.checkout_opts |= raw::GIT_CHECKOUT_FORCE as u32;
self
}
/// Indicate that the checkout should be performed safely, allowing new
/// files to be created but not overwriting extisting files or changes.
///
/// This is the default.
pub fn safe(&mut self) -> &mut CheckoutBuilder<'cb> {
self.checkout_opts &= !((1 << 4) - 1);
self.checkout_opts |= raw::GIT_CHECKOUT_SAFE as u32;
self
}
fn flag(&mut self, bit: raw::git_checkout_strategy_t,
on: bool) -> &mut CheckoutBuilder<'cb> {
if on {
self.checkout_opts |= bit as u32;
} else {
self.checkout_opts &= !(bit as u32);
}
self
}
/// In safe mode, create files that don't exist.
///
/// Defaults to false.
pub fn recreate_missing(&mut self, allow: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_RECREATE_MISSING, allow)
}
/// In safe mode, apply safe file updates even when there are conflicts
/// instead of canceling the checkout.
///
/// Defaults to false.
pub fn allow_conflicts(&mut self, allow: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_ALLOW_CONFLICTS, allow)
}
/// Remove untracked files from the working dir.
///
/// Defaults to false.
pub fn remove_untracked(&mut self, remove: bool)
-> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_REMOVE_UNTRACKED, remove)
}
/// Remove ignored files from the working dir.
///
/// Defaults to false.
pub fn remove_ignored(&mut self, remove: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_REMOVE_IGNORED, remove)
}
/// Only update the contents of files that already exist.
///
/// If set, files will not be created or deleted.
///
/// Defaults to false.
pub fn update_only(&mut self, update: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_UPDATE_ONLY, update)
}
/// Prevents checkout from writing the updated files' information to the
/// index.
///
/// Defaults to true.
pub fn update_index(&mut self, update: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_DONT_UPDATE_INDEX, !update)
}
/// Indicate whether the index and git attributes should be refreshed from
/// disk before any operations.
///
/// Defaults to true,
pub fn refresh(&mut self, refresh: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_NO_REFRESH, !refresh)
}
/// Skip files with unmerged index entries.
///
/// Defaults to false.
pub fn skip_unmerged(&mut self, skip: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_SKIP_UNMERGED, skip)
}
/// Indicate whether the checkout should proceed on conflicts by using the
/// stage 2 version of the file ("ours").
///
/// Defaults to false.
pub fn use_ours(&mut self, ours: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_USE_OURS, ours)
}
/// Indicate whether the checkout should proceed on conflicts by using the
/// stage 3 version of the file ("theirs").
///
/// Defaults to false.
pub fn use_theirs(&mut self, theirs: bool) -> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_USE_THEIRS, theirs)
}
/// Indicate whether ignored files should be overwritten during the checkout.
///
/// Defaults to true.
pub fn overwrite_ignored(&mut self, overwrite: bool)
-> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_DONT_OVERWRITE_IGNORED, !overwrite)
}
/// Indicate whether a normal merge file should be written for conflicts.
///
/// Defaults to false.
pub fn conflict_style_merge(&mut self, on: bool)
-> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_CONFLICT_STYLE_MERGE, on)
}
/// Specify for which notification types to invoke the notification
/// callback.
///
/// Defaults to none.
pub fn notify_on(&mut self, notification_types: CheckoutNotificationType)
-> &mut CheckoutBuilder<'cb> {
self.notify_flags = notification_types;
self
}
/// Indicates whether to include common ancestor data in diff3 format files
/// for conflicts.
///
/// Defaults to false.
pub fn conflict_style_diff3(&mut self, on: bool)
-> &mut CheckoutBuilder<'cb> {
self.flag(raw::GIT_CHECKOUT_CONFLICT_STYLE_DIFF3, on)
}
/// Indicate whether to apply filters like CRLF conversion.
pub fn disable_filters(&mut self, disable: bool)
-> &mut CheckoutBuilder<'cb> {
self.disable_filters = disable;
self
}
/// Set the mode with which new directories are created.
///
/// Default is 0755
pub fn dir_perm(&mut self, perm: i32) -> &mut CheckoutBuilder<'cb> {
self.dir_perm = Some(perm);
self
}
/// Set the mode with which new files are created.
///
/// The default is 0644 or 0755 as dictated by the blob.
pub fn file_perm(&mut self, perm: i32) -> &mut CheckoutBuilder<'cb> {
self.file_perm = Some(perm);
self
}
/// Add a path to be checked out.
///
/// If no paths are specified, then all files are checked out. Otherwise
/// only these specified paths are checked out.
pub fn path<T: IntoCString>(&mut self, path: T)
-> &mut CheckoutBuilder<'cb> {
let path = path.into_c_string().unwrap();
self.path_ptrs.push(path.as_ptr());
self.paths.push(path);
self
}
/// Set the directory to check out to
pub fn target_dir(&mut self, dst: &Path) -> &mut CheckoutBuilder<'cb> {
self.target_dir = Some(dst.into_c_string().unwrap());
self
}
/// The name of the common ancestor side of conflicts
pub fn ancestor_label(&mut self, label: &str) -> &mut CheckoutBuilder<'cb> {
self.ancestor_label = Some(CString::new(label).unwrap());
self
}
/// The name of the common our side of conflicts
pub fn our_label(&mut self, label: &str) -> &mut CheckoutBuilder<'cb> {
self.our_label = Some(CString::new(label).unwrap());
self
}
/// The name of the common their side of conflicts
pub fn their_label(&mut self, label: &str) -> &mut CheckoutBuilder<'cb> {
self.their_label = Some(CString::new(label).unwrap());
self
}
/// Set a callback to receive notifications of checkout progress.
pub fn progress<F>(&mut self, cb: F) -> &mut CheckoutBuilder<'cb>
where F: FnMut(Option<&Path>, usize, usize) + 'cb {
self.progress = Some(Box::new(cb) as Box<Progress<'cb>>);
self
}
/// Set a callback to receive checkout notifications.
///
/// Callbacks are invoked prior to modifying any files on disk.
/// Returning `false` from the callback will cancel the checkout.
pub fn notify<F>(&mut self, cb: F) -> &mut CheckoutBuilder<'cb>
where F: FnMut(CheckoutNotificationType, Option<&Path>, Option<DiffFile>,
Option<DiffFile>, Option<DiffFile>) -> bool + 'cb
{
self.notify = Some(Box::new(cb) as Box<Notify<'cb>>);
self
}
/// Configure a raw checkout options based on this configuration.
///
/// This method is unsafe as there is no guarantee that this structure will
/// outlive the provided checkout options.
pub unsafe fn configure(&mut self, opts: &mut raw::git_checkout_options) {
opts.version = raw::GIT_CHECKOUT_OPTIONS_VERSION;
opts.disable_filters = self.disable_filters as c_int;
opts.dir_mode = self.dir_perm.unwrap_or(0) as c_uint;
opts.file_mode = self.file_perm.unwrap_or(0) as c_uint;
if !self.path_ptrs.is_empty() {
opts.paths.strings = self.path_ptrs.as_ptr() as *mut _;
opts.paths.count = self.path_ptrs.len() as size_t;
}
if let Some(ref c) = self.target_dir {
opts.target_directory = c.as_ptr();
}
if let Some(ref c) = self.ancestor_label {
opts.ancestor_label = c.as_ptr();
}
if let Some(ref c) = self.our_label {
opts.our_label = c.as_ptr();
}
if let Some(ref c) = self.their_label {
opts.their_label = c.as_ptr();
}
if self.progress.is_some() {
let f: raw::git_checkout_progress_cb = progress_cb;
opts.progress_cb = Some(f);
opts.progress_payload = self as *mut _ as *mut _;
}
if self.notify.is_some() {
let f: raw::git_checkout_notify_cb = notify_cb;
opts.notify_cb = Some(f);
opts.notify_payload = self as *mut _ as *mut _;
opts.notify_flags = self.notify_flags.bits() as c_uint;
}
opts.checkout_strategy = self.checkout_opts as c_uint;
}
}
extern fn progress_cb(path: *const c_char,
completed: size_t,
total: size_t,
data: *mut c_void) {
panic::wrap(|| unsafe {
let payload = &mut *(data as *mut CheckoutBuilder);
let callback = match payload.progress {
Some(ref mut c) => c,
None => return,
};
let path = if path.is_null() {
None
} else {
Some(util::bytes2path(CStr::from_ptr(path).to_bytes()))
};
callback(path, completed as usize, total as usize)
});
}
extern fn notify_cb(why: raw::git_checkout_notify_t,
path: *const c_char,
baseline: *const raw::git_diff_file,
target: *const raw::git_diff_file,
workdir: *const raw::git_diff_file,
data: *mut c_void) -> c_int {
// pack callback etc
panic::wrap(|| unsafe {
let payload = &mut *(data as *mut CheckoutBuilder);
let callback = match payload.notify {
Some(ref mut c) => c,
None => return 0,
};
let path = if path.is_null() {
None
} else {
Some(util::bytes2path(CStr::from_ptr(path).to_bytes()))
};
let baseline = if baseline.is_null() {
None
} else {
Some(DiffFile::from_raw(baseline))
};
let target = if target.is_null() {
None
} else {
Some(DiffFile::from_raw(target))
};
let workdir = if workdir.is_null() {
None
} else {
Some(DiffFile::from_raw(workdir))
};
let why = CheckoutNotificationType::from_bits_truncate(why as u32);
let keep_going = callback(why, path, baseline, target, workdir);
if keep_going {0} else {1}
}).unwrap_or(2)
}
#[cfg(test)]
mod tests {
use std::fs;
use std::path::Path;
use tempdir::TempDir;
use super::{CheckoutBuilder, RepoBuilder};
use {CheckoutNotificationType, Repository};
#[test]
fn smoke() {
let r = RepoBuilder::new().clone("/path/to/nowhere", Path::new("foo"));
assert!(r.is_err());
}
#[test]
fn smoke2() {
let td = TempDir::new("test").unwrap();
Repository::init_bare(&td.path().join("bare")).unwrap();
let url = if cfg!(unix) {
format!("file://{}/bare", td.path().display())
} else {
format!("file:///{}/bare", td.path().display().to_string()
.replace("\\", "/"))
};
let dst = td.path().join("foo");
RepoBuilder::new().clone(&url, &dst).unwrap();
fs::remove_dir_all(&dst).unwrap();
assert!(RepoBuilder::new().branch("foo")
.clone(&url, &dst).is_err());
}
/// Issue regression test #365
#[test]
fn notify_callback() {
let td = TempDir::new("test").unwrap();
let cd = TempDir::new("external-checkout").unwrap();
{
let repo = Repository::init(&td.path()).unwrap();
let mut config = repo.config().unwrap();
config.set_str("user.name", "name").unwrap();
config.set_str("user.email", "email").unwrap();
let mut index = repo.index().unwrap();
let p = Path::new(td.path()).join("file");
println!("using path {:?}", p);
fs::File::create(&p).unwrap();
index.add_path(&Path::new("file")).unwrap();
let id = index.write_tree().unwrap();
let tree = repo.find_tree(id).unwrap();
let sig = repo.signature().unwrap();
repo.commit(Some("HEAD"), &sig, &sig, "initial",
&tree, &[]).unwrap();
}
let repo = Repository::open_bare(&td.path().join(".git")).unwrap();
let tree = repo
.revparse_single(&"master")
.unwrap()
.peel_to_tree()
.unwrap();
let mut index = repo.index().unwrap();
index.read_tree(&tree).unwrap();
let mut checkout_opts = CheckoutBuilder::new();
checkout_opts.target_dir(&cd.path());
checkout_opts.notify_on(CheckoutNotificationType::all());
checkout_opts.notify(|_notif, _path, baseline, target, workdir| {
assert!(baseline.is_none());
assert_eq!(target.unwrap().path(), Some(Path::new("file")));
assert!(workdir.is_none());
true
});
repo.checkout_index(Some(&mut index), Some(&mut checkout_opts))
.unwrap();
}
}
| 34.487179 | 85 | 0.579843 |
f8e02cee5ff565a759aabb16298539929e0c4221 | 810 |
use class::ClassFunctions;
use {Ability, ArmorProficiency, HitDice, Selections, Skill, WeaponProficiency};
pub struct Cleric;
impl ClassFunctions for Cleric {
fn hit_dice(&self) -> HitDice {
HitDice::D8
}
fn armors(&self) -> Vec<ArmorProficiency> {
vec![ArmorProficiency::Light,
ArmorProficiency::Medium,
ArmorProficiency::Shield]
}
fn weapons(&self) -> Vec<WeaponProficiency> {
WeaponProficiency::simple()
}
fn saving_throws(&self) -> [Ability; 2] {
[Ability::Charisma, Ability::Wisdom]
}
fn skills_choice(&self) -> Selections<Skill> {
Selections::new(vec![], 2, vec![
Skill::History, Skill::Insight, Skill::Medicine,
Skill::Persuasion, Skill::Religion])
}
}
| 25.3125 | 80 | 0.602469 |
721975840d2f412de2114dd661e9ae32fd0f935b | 965 | use napi::{CallContext, JsString, Module, Result};
#[js_function(1)]
fn concat_string(ctx: CallContext) -> Result<JsString> {
let in_string = ctx.get::<JsString>(0)?;
let out_string = format!("{} + Rust 🦀 string!", in_string.as_str()?);
ctx.env.create_string_from_std(out_string)
}
#[js_function(1)]
fn concat_latin1_string(ctx: CallContext) -> Result<JsString> {
let in_string = ctx.get::<JsString>(0)?;
let out_string = format!("{} + Rust 🦀 string!", in_string.as_latin1_string()?);
ctx.env.create_string_from_std(out_string)
}
#[js_function]
fn create_latin1(ctx: CallContext) -> Result<JsString> {
let bytes = vec![169, 191];
ctx.env.create_string_latin1(bytes.as_slice())
}
pub fn register_js(module: &mut Module) -> Result<()> {
module.create_named_method("concatString", concat_string)?;
module.create_named_method("concatLatin1String", concat_latin1_string)?;
module.create_named_method("createLatin1", create_latin1)?;
Ok(())
}
| 33.275862 | 81 | 0.720207 |
294464504d11d10c6f475c5c60142aa41ca82073 | 3,221 | mod install;
mod sessions;
#[cfg(test)]
mod tests;
use crate::install::populate_data_dir;
use sessions::{assert_session, assert_session_ne, get_active_session, list_sessions};
use std::process;
use zellij_client::{os_input_output::get_client_os_input, start_client, ClientInfo};
use zellij_server::{os_input_output::get_server_os_input, start_server};
use zellij_utils::{
cli::{CliArgs, Command, SessionCommand, Sessions},
consts::{ZELLIJ_TMP_DIR, ZELLIJ_TMP_LOG_DIR},
logging::*,
setup::{get_default_data_dir, Setup},
structopt::StructOpt,
};
pub fn main() {
configure_logger();
let opts = CliArgs::from_args();
if let Some(Command::Sessions(Sessions::ListSessions)) = opts.command {
list_sessions();
}
atomic_create_dir(&*ZELLIJ_TMP_DIR).unwrap();
atomic_create_dir(&*ZELLIJ_TMP_LOG_DIR).unwrap();
if let Some(path) = opts.server {
let os_input = match get_server_os_input() {
Ok(server_os_input) => server_os_input,
Err(e) => {
eprintln!("failed to open terminal:\n{}", e);
process::exit(1);
}
};
start_server(Box::new(os_input), path);
} else {
let (config, layout, config_options) = match Setup::from_options(&opts) {
Ok(results) => results,
Err(e) => {
eprintln!("{}", e);
process::exit(1);
}
};
let os_input = match get_client_os_input() {
Ok(os_input) => os_input,
Err(e) => {
eprintln!("failed to open terminal:\n{}", e);
process::exit(1);
}
};
if let Some(Command::Sessions(Sessions::Attach {
mut session_name,
force,
options,
})) = opts.command.clone()
{
if let Some(session) = session_name.as_ref() {
assert_session(session);
} else {
session_name = Some(get_active_session());
}
let config_options = match options {
Some(SessionCommand::Options(o)) => config_options.merge(o),
None => config_options,
};
start_client(
Box::new(os_input),
opts,
config,
config_options.clone(),
ClientInfo::Attach(session_name.unwrap(), force, config_options),
None,
);
} else {
let session_name = opts
.session
.clone()
.unwrap_or_else(|| names::Generator::default().next().unwrap());
assert_session_ne(&session_name);
// Determine and initialize the data directory
let data_dir = opts.data_dir.clone().unwrap_or_else(get_default_data_dir);
#[cfg(not(disable_automatic_asset_installation))]
populate_data_dir(&data_dir);
start_client(
Box::new(os_input),
opts,
config,
config_options,
ClientInfo::New(session_name),
layout,
);
}
}
}
| 31.578431 | 86 | 0.5371 |
0adcdec0b5f9446bb5d126065459f8fa321bc949 | 15,616 | #[cfg(feature = "tutorial5")]
mod tutorial5 {
use std::os::raw::c_void;
use std::process;
extern crate glib;
use self::glib::object::ObjectType;
use self::glib::*;
extern crate gdk;
use self::gdk::prelude::*;
extern crate gtk;
use self::gtk::*;
extern crate gstreamer as gst;
extern crate gstreamer_video as gst_video;
use self::gst_video::prelude::*;
use std::ops;
// Custom struct to keep our window reference alive
// and to store the timeout id so that we can remove
// it from the main context again later and drop the
// references it keeps inside its closures
struct AppWindow {
main_window: Window,
timeout_id: Option<glib::SourceId>,
}
impl ops::Deref for AppWindow {
type Target = Window;
fn deref(&self) -> &Window {
&self.main_window
}
}
impl Drop for AppWindow {
fn drop(&mut self) {
if let Some(source_id) = self.timeout_id.take() {
glib::source_remove(source_id);
}
}
}
// Extract tags from streams of @stype and add the info in the UI.
fn add_streams_info(playbin: &gst::Element, textbuf: >k::TextBuffer, stype: &str) {
let propname: &str = &format!("n-{}", stype);
let signame: &str = &format!("get-{}-tags", stype);
match playbin.get_property(propname).unwrap().get() {
Some(x) => {
for i in 0..x {
let tags = playbin.emit(signame, &[&i]).unwrap().unwrap();
if let Some(tags) = tags.get::<gst::TagList>() {
textbuf.insert_at_cursor(&format!("{} stream {}:\n ", stype, i));
if let Some(codec) = tags.get::<gst::tags::VideoCodec>() {
textbuf.insert_at_cursor(&format!(
" codec: {} \n",
codec.get().unwrap()
));
}
if let Some(codec) = tags.get::<gst::tags::AudioCodec>() {
textbuf.insert_at_cursor(&format!(
" codec: {} \n",
codec.get().unwrap()
));
}
if let Some(lang) = tags.get::<gst::tags::LanguageCode>() {
textbuf.insert_at_cursor(&format!(
" language: {} \n",
lang.get().unwrap()
));
}
if let Some(bitrate) = tags.get::<gst::tags::Bitrate>() {
textbuf.insert_at_cursor(&format!(
" bitrate: {} \n",
bitrate.get().unwrap()
));
}
}
}
}
None => {
eprintln!("Could not get {}!", propname);
}
}
}
// Extract metadata from all the streams and write it to the text widget in the GUI
fn analyze_streams(playbin: &gst::Element, textbuf: >k::TextBuffer) {
{
textbuf.set_text("");
}
add_streams_info(playbin, textbuf, "video");
add_streams_info(playbin, textbuf, "audio");
add_streams_info(playbin, textbuf, "text");
}
// This creates all the GTK+ widgets that compose our application, and registers the callbacks
fn create_ui(playbin: &gst::Element) -> AppWindow {
let main_window = Window::new(WindowType::Toplevel);
main_window.connect_delete_event(|_, _| {
gtk::main_quit();
Inhibit(false)
});
let play_button = gtk::Button::new_from_icon_name(
Some("media-playback-start"),
gtk::IconSize::SmallToolbar,
);
let pipeline = playbin.clone();
play_button.connect_clicked(move |_| {
let pipeline = &pipeline;
pipeline
.set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state");
});
let pause_button = gtk::Button::new_from_icon_name(
Some("media-playback-pause"),
gtk::IconSize::SmallToolbar,
);
let pipeline = playbin.clone();
pause_button.connect_clicked(move |_| {
let pipeline = &pipeline;
pipeline
.set_state(gst::State::Paused)
.expect("Unable to set the pipeline to the `Paused` state");
});
let stop_button = gtk::Button::new_from_icon_name(
Some("media-playback-stop"),
gtk::IconSize::SmallToolbar,
);
let pipeline = playbin.clone();
stop_button.connect_clicked(move |_| {
let pipeline = &pipeline;
pipeline
.set_state(gst::State::Ready)
.expect("Unable to set the pipeline to the `Ready` state");
});
let slider = gtk::Scale::new_with_range(
gtk::Orientation::Horizontal,
0.0 as f64,
100.0 as f64,
1.0 as f64,
);
let pipeline = playbin.clone();
let slider_update_signal_id = slider.connect_value_changed(move |slider| {
let pipeline = &pipeline;
let value = slider.get_value() as u64;
if pipeline
.seek_simple(
gst::SeekFlags::FLUSH | gst::SeekFlags::KEY_UNIT,
value * gst::SECOND,
)
.is_err()
{
eprintln!("Seeking to {} failed", value);
}
});
slider.set_draw_value(false);
let pipeline = playbin.clone();
let lslider = slider.clone();
// Update the UI (seekbar) every second
let timeout_id = gtk::timeout_add_seconds(1, move || {
let pipeline = &pipeline;
let lslider = &lslider;
if let Some(dur) = pipeline.query_duration::<gst::ClockTime>() {
let seconds = dur / gst::SECOND;
lslider.set_range(0.0, seconds.map(|v| v as f64).unwrap_or(0.0));
}
if let Some(pos) = pipeline.query_position::<gst::ClockTime>() {
let seconds = pos / gst::SECOND;
lslider.block_signal(&slider_update_signal_id);
lslider.set_value(seconds.map(|v| v as f64).unwrap_or(0.0));
lslider.unblock_signal(&slider_update_signal_id);
}
Continue(true)
});
let controls = Box::new(Orientation::Horizontal, 0);
controls.pack_start(&play_button, false, false, 0);
controls.pack_start(&pause_button, false, false, 0);
controls.pack_start(&stop_button, false, false, 0);
controls.pack_start(&slider, true, true, 2);
let video_window = DrawingArea::new();
let video_overlay = playbin
.clone()
.dynamic_cast::<gst_video::VideoOverlay>()
.unwrap();
video_window.connect_realize(move |video_window| {
let video_overlay = &video_overlay;
let gdk_window = video_window.get_window().unwrap();
if !gdk_window.ensure_native() {
println!("Can't create native window for widget");
process::exit(-1);
}
let display_type_name = gdk_window.get_display().get_type().name();
#[cfg(all(target_os = "linux", feature = "tutorial5-x11"))]
{
// Check if we're using X11 or ...
if display_type_name == "GdkX11Display" {
extern "C" {
pub fn gdk_x11_window_get_xid(
window: *mut glib::object::GObject,
) -> *mut c_void;
}
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let xid = gdk_x11_window_get_xid(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(xid as usize);
}
} else {
println!("Add support for display type '{}'", display_type_name);
process::exit(-1);
}
}
#[cfg(all(target_os = "macos", feature = "tutorial5-quartz"))]
{
if display_type_name == "GdkQuartzDisplay" {
extern "C" {
pub fn gdk_quartz_window_get_nsview(
window: *mut glib::object::GObject,
) -> *mut c_void;
}
#[allow(clippy::cast_ptr_alignment)]
unsafe {
let window = gdk_quartz_window_get_nsview(gdk_window.as_ptr() as *mut _);
video_overlay.set_window_handle(window as usize);
}
} else {
println!(
"Unsupported display type '{}', compile with `--feature `",
display_type_name
);
process::exit(-1);
}
}
});
let streams_list = gtk::TextView::new();
streams_list.set_editable(false);
let pipeline_weak = playbin.downgrade();
let streams_list_weak = glib::SendWeakRef::from(streams_list.downgrade());
let bus = playbin.get_bus().unwrap();
#[allow(clippy::single_match)]
bus.connect_message(move |_, msg| match msg.view() {
gst::MessageView::Application(application) => {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
let streams_list = match streams_list_weak.upgrade() {
Some(streams_list) => streams_list,
None => return,
};
if application.get_structure().map(|s| s.get_name()) == Some("tags-changed") {
let textbuf = streams_list
.get_buffer()
.expect("Couldn't get buffer from text_view");
analyze_streams(&pipeline, &textbuf);
}
}
_ => (),
});
let vbox = Box::new(Orientation::Horizontal, 0);
vbox.pack_start(&video_window, true, true, 0);
vbox.pack_start(&streams_list, false, false, 2);
let main_box = Box::new(Orientation::Vertical, 0);
main_box.pack_start(&vbox, true, true, 0);
main_box.pack_start(&controls, false, false, 0);
main_window.add(&main_box);
main_window.set_default_size(640, 480);
main_window.show_all();
AppWindow {
main_window,
timeout_id: Some(timeout_id),
}
}
// We are possibly in a GStreamer working thread, so we notify the main
// thread of this event through a message in the bus
fn post_app_message(playbin: &gst::Element) {
let mbuilder = gst::Message::new_application(gst::Structure::new_empty("tags-changed"));
let _ = playbin.post_message(&mbuilder.build());
}
pub fn run() {
// Make sure the right features were activated
#[allow(clippy::eq_op)]
{
if !cfg!(feature = "tutorial5-x11") && !cfg!(feature = "tutorial5-quartz") {
eprintln!(
"No Gdk backend selected, compile with --features tutorial5[-x11][-quartz]."
);
return;
}
}
// Initialize GTK
if let Err(err) = gtk::init() {
eprintln!("Failed to initialize GTK: {}", err);
return;
}
// Initialize GStreamer
if let Err(err) = gst::init() {
eprintln!("Failed to initialize Gst: {}", err);
return;
}
let uri = "https://www.freedesktop.org/software/gstreamer-sdk/\
data/media/sintel_trailer-480p.webm";
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
playbin.set_property("uri", &uri).unwrap();
playbin
.connect("video-tags-changed", false, |args| {
let pipeline = args[0].get::<gst::Element>().unwrap();
post_app_message(&pipeline);
None
})
.unwrap();
playbin
.connect("audio-tags-changed", false, |args| {
let pipeline = args[0].get::<gst::Element>().unwrap();
post_app_message(&pipeline);
None
})
.unwrap();
playbin
.connect("text-tags-changed", false, move |args| {
let pipeline = args[0].get::<gst::Element>().unwrap();
post_app_message(&pipeline);
None
})
.unwrap();
let window = create_ui(&playbin);
let bus = playbin.get_bus().unwrap();
bus.add_signal_watch();
let pipeline_weak = playbin.downgrade();
bus.connect_message(move |_, msg| {
let pipeline = match pipeline_weak.upgrade() {
Some(pipeline) => pipeline,
None => return,
};
match msg.view() {
// This is called when an End-Of-Stream message is posted on the bus.
// We just set the pipeline to READY (which stops playback).
gst::MessageView::Eos(..) => {
println!("End-Of-Stream reached.");
pipeline
.set_state(gst::State::Ready)
.expect("Unable to set the pipeline to the `Ready` state");
}
// This is called when an error message is posted on the bus
gst::MessageView::Error(err) => {
println!(
"Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()),
err.get_error(),
err.get_debug()
);
}
// This is called when the pipeline changes states. We use it to
// keep track of the current state.
gst::MessageView::StateChanged(state_changed) => {
if state_changed
.get_src()
.map(|s| s == pipeline)
.unwrap_or(false)
{
println!("State set to {:?}", state_changed.get_current());
}
}
_ => (),
}
});
playbin
.set_state(gst::State::Playing)
.expect("Unable to set the playbin to the `Playing` state");
gtk::main();
window.hide();
playbin
.set_state(gst::State::Null)
.expect("Unable to set the playbin to the `Null` state");
bus.remove_signal_watch();
}
}
#[cfg(feature = "tutorial5")]
fn main() {
tutorial5::run();
}
#[cfg(not(feature = "tutorial5"))]
fn main() {
println!("Please compile with --features tutorial5[-x11][-quartz]");
}
| 35.571754 | 98 | 0.485656 |
146a74a4550e49ea5b1f38d2caf165e712246b37 | 665 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Linux-specific definitions
#![stable(feature = "raw_ext", since = "1.1.0")]
pub mod raw;
pub mod fs {
#![stable(feature = "raw_ext", since = "1.1.0")]
pub use sys::fs::MetadataExt;
}
| 31.666667 | 68 | 0.702256 |
5000abc33321d4ca370539dd9f279f1cc26e901a | 14,754 | use nu_protocol::hir::*;
use nu_source::{Span, Spanned, SpannedItem};
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum LocationType {
Command,
Flag(String), // command name
Argument(Option<String>, Option<String>), // command name, argument name
Variable,
}
pub type CompletionLocation = Spanned<LocationType>;
// TODO The below is very similar to shapes / expression_to_flat_shape. Check back October 2020
// to see if we're close enough to just make use of those.
struct Flatten<'s> {
line: &'s str,
command: Option<String>,
flag: Option<String>,
}
impl<'s> Flatten<'s> {
/// Converts a SpannedExpression into a completion location for use in NuCompleter
fn expression(&self, e: &SpannedExpression) -> Vec<CompletionLocation> {
match &e.expr {
Expression::Block(block) => self.completion_locations(block),
Expression::Invocation(block) => self.completion_locations(block),
Expression::List(exprs) => exprs.iter().flat_map(|v| self.expression(v)).collect(),
Expression::Table(headers, cells) => headers
.iter()
.flat_map(|v| self.expression(v))
.chain(
cells
.iter()
.flat_map(|v| v.iter().flat_map(|v| self.expression(v))),
)
.collect(),
Expression::Command => vec![LocationType::Command.spanned(e.span)],
Expression::FullColumnPath(path) => self.expression(&path.head),
Expression::Variable(_, _) => vec![LocationType::Variable.spanned(e.span)],
Expression::Boolean(_)
| Expression::FilePath(_)
| Expression::Literal(Literal::ColumnPath(_))
| Expression::Literal(Literal::GlobPattern(_))
| Expression::Literal(Literal::Number(_))
| Expression::Literal(Literal::Size(_, _))
| Expression::Literal(Literal::String(_)) => {
vec![
LocationType::Argument(self.command.clone(), self.flag.clone()).spanned(e.span),
]
}
Expression::Binary(binary) => {
let mut result = Vec::new();
result.append(&mut self.expression(&binary.left));
result.append(&mut self.expression(&binary.right));
result
}
Expression::Range(range) => {
let mut result = Vec::new();
if let Some(left) = &range.left {
result.append(&mut self.expression(left));
}
if let Some(right) = &range.right {
result.append(&mut self.expression(right));
}
result
}
Expression::ExternalWord
| Expression::ExternalCommand(_)
| Expression::Synthetic(_)
| Expression::Literal(Literal::Operator(_))
| Expression::Literal(Literal::Bare(_))
| Expression::Garbage => Vec::new(),
}
}
fn internal_command(&self, internal: &InternalCommand) -> Vec<CompletionLocation> {
let mut result = Vec::new();
match internal.args.head.expr {
Expression::Command => {
result.push(LocationType::Command.spanned(internal.name_span));
}
Expression::Literal(Literal::String(_)) => {
result.push(LocationType::Command.spanned(internal.name_span));
}
_ => (),
}
if let Some(positionals) = &internal.args.positional {
let mut positionals = positionals.iter();
if internal.name == "run_external" {
if let Some(external_command) = positionals.next() {
result.push(LocationType::Command.spanned(external_command.span));
}
}
result.extend(positionals.flat_map(|positional| match positional.expr {
Expression::Garbage => {
let garbage = positional.span.slice(self.line);
let location = if garbage.starts_with('-') {
LocationType::Flag(internal.name.clone())
} else {
// TODO we may be able to map this to the name of a positional,
// but we'll need a signature
LocationType::Argument(Some(internal.name.clone()), None)
};
vec![location.spanned(positional.span)]
}
_ => self.expression(positional),
}));
}
if let Some(named) = &internal.args.named {
for (name, kind) in &named.named {
match kind {
NamedValue::PresentSwitch(span) => {
result.push(LocationType::Flag(internal.name.clone()).spanned(*span));
}
NamedValue::Value(span, expr) => {
result.push(LocationType::Flag(internal.name.clone()).spanned(*span));
result.append(&mut self.with_flag(name.clone()).expression(expr));
}
_ => (),
}
}
}
result
}
fn pipeline(&self, pipeline: &Pipeline) -> Vec<CompletionLocation> {
let mut result = Vec::new();
for command in &pipeline.list {
match command {
ClassifiedCommand::Internal(internal) => {
let engine = self.with_command(internal.name.clone());
result.append(&mut engine.internal_command(internal));
}
ClassifiedCommand::Expr(expr) => result.append(&mut self.expression(expr)),
_ => (),
}
}
result
}
/// Flattens the block into a Vec of completion locations
pub fn completion_locations(&self, block: &Block) -> Vec<CompletionLocation> {
block
.block
.iter()
.flat_map(|g| g.pipelines.iter().flat_map(|v| self.pipeline(v)))
.collect()
}
pub fn new(line: &'s str) -> Flatten<'s> {
Flatten {
line,
command: None,
flag: None,
}
}
pub fn with_command(&self, command: String) -> Flatten<'s> {
Flatten {
line: self.line,
command: Some(command),
flag: None,
}
}
pub fn with_flag(&self, flag: String) -> Flatten<'s> {
Flatten {
line: self.line,
command: self.command.clone(),
flag: Some(flag),
}
}
}
/// Characters that precede a command name
const BEFORE_COMMAND_CHARS: &[char] = &['|', '(', ';'];
/// Determines the completion location for a given block at the given cursor position
pub fn completion_location(line: &str, block: &Block, pos: usize) -> Vec<CompletionLocation> {
let completion_engine = Flatten::new(line);
let locations = completion_engine.completion_locations(block);
if locations.is_empty() {
vec![LocationType::Command.spanned(Span::unknown())]
} else {
let mut command = None;
let mut prev = None;
for loc in locations {
// We don't use span.contains because we want to include the end. This handles the case
// where the cursor is just after the text (i.e., no space between cursor and text)
if loc.span.start() <= pos && pos <= loc.span.end() {
// The parser sees the "-" in `cmd -` as an argument, but the user is likely
// expecting a flag.
return match loc.item {
LocationType::Argument(ref cmd, _) => {
if loc.span.slice(line) == "-" {
let cmd = cmd.clone();
let span = loc.span;
vec![
loc,
LocationType::Flag(cmd.unwrap_or_default()).spanned(span),
]
} else {
vec![loc]
}
}
_ => vec![loc],
};
} else if pos < loc.span.start() {
break;
}
if let LocationType::Command = loc.item {
command = Some(String::from(loc.span.slice(line)));
}
prev = Some(loc);
}
if let Some(prev) = prev {
// Cursor is between locations (or at the end). Look at the line to see if the cursor
// is after some character that would imply we're in the command position.
let start = prev.span.end();
if line[start..pos].contains(BEFORE_COMMAND_CHARS) {
vec![LocationType::Command.spanned(Span::new(pos, pos))]
} else {
// TODO this should be able to be mapped to a command
vec![LocationType::Argument(command, None).spanned(Span::new(pos, pos))]
}
} else {
// Cursor is before any possible completion location, so must be a command
vec![LocationType::Command.spanned(Span::unknown())]
}
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::*;
use nu_parser::{classify_block, lex, parse_block, ParserScope};
use nu_protocol::{Signature, SyntaxShape};
#[derive(Clone, Debug)]
struct VecRegistry(Vec<Signature>);
impl From<Vec<Signature>> for VecRegistry {
fn from(v: Vec<Signature>) -> Self {
VecRegistry(v)
}
}
impl ParserScope for VecRegistry {
fn has_signature(&self, name: &str) -> bool {
self.0.iter().any(|v| v.name == name)
}
fn get_signature(&self, name: &str) -> Option<nu_protocol::Signature> {
self.0.iter().find(|v| v.name == name).map(Clone::clone)
}
fn get_alias(&self, _name: &str) -> Option<Vec<Spanned<String>>> {
None
}
fn add_alias(&self, _name: &str, _replacement: Vec<Spanned<String>>) {
todo!()
}
fn add_definition(&self, _block: Arc<Block>) {}
fn get_definitions(&self) -> Vec<Arc<Block>> {
vec![]
}
fn enter_scope(&self) {}
fn exit_scope(&self) {}
}
mod completion_location {
use super::*;
use nu_parser::ParserScope;
fn completion_location(
line: &str,
scope: &dyn ParserScope,
pos: usize,
) -> Vec<LocationType> {
let (tokens, _) = lex(line, 0);
let (lite_block, _) = parse_block(tokens);
scope.enter_scope();
let (block, _) = classify_block(&lite_block, scope);
scope.exit_scope();
super::completion_location(line, &block, pos)
.into_iter()
.map(|v| v.item)
.collect()
}
#[test]
fn completes_internal_command_names() {
let registry: VecRegistry =
vec![Signature::build("echo").rest(SyntaxShape::Any, "the values to echo")].into();
let line = "echo 1 | echo 2";
assert_eq!(
completion_location(line, ®istry, 10),
vec![LocationType::Command],
);
}
#[test]
fn completes_external_command_names() {
let registry: VecRegistry = Vec::new().into();
let line = "echo 1 | echo 2";
assert_eq!(
completion_location(line, ®istry, 10),
vec![LocationType::Command],
);
}
#[test]
fn completes_command_names_when_cursor_immediately_after_command_name() {
let registry: VecRegistry = Vec::new().into();
let line = "echo 1 | echo 2";
assert_eq!(
completion_location(line, ®istry, 4),
vec![LocationType::Command],
);
}
#[test]
fn completes_variables() {
let registry: VecRegistry = Vec::new().into();
let line = "echo $nu.env.";
assert_eq!(
completion_location(line, ®istry, 13),
vec![LocationType::Variable],
);
}
#[test]
fn completes_flags() {
let registry: VecRegistry = vec![Signature::build("du")
.switch("recursive", "the values to echo", None)
.rest(SyntaxShape::Any, "blah")]
.into();
let line = "du --recurs";
assert_eq!(
completion_location(line, ®istry, 7),
vec![LocationType::Flag("du".to_string())],
);
}
#[test]
fn completes_incomplete_nested_structure() {
let registry: VecRegistry = vec![Signature::build("sys")].into();
let line = "echo (sy";
assert_eq!(
completion_location(line, ®istry, 8),
vec![LocationType::Command],
);
}
#[test]
fn has_correct_command_name_for_argument() {
let registry: VecRegistry = vec![Signature::build("cd")].into();
let line = "cd ";
assert_eq!(
completion_location(line, ®istry, 3),
vec![LocationType::Argument(Some("cd".to_string()), None)],
);
}
#[test]
fn completes_flags_with_just_a_single_hyphen() {
let registry: VecRegistry = vec![Signature::build("du")
.switch("recursive", "the values to echo", None)
.rest(SyntaxShape::Any, "blah")]
.into();
let line = "du -";
assert_eq!(
completion_location(line, ®istry, 3),
vec![
LocationType::Argument(Some("du".to_string()), None),
LocationType::Flag("du".to_string()),
],
);
}
#[test]
fn completes_arguments() {
let registry: VecRegistry =
vec![Signature::build("echo").rest(SyntaxShape::Any, "the values to echo")].into();
let line = "echo 1 | echo 2";
assert_eq!(
completion_location(line, ®istry, 6),
vec![LocationType::Argument(Some("echo".to_string()), None)],
);
}
}
}
| 33.762014 | 100 | 0.502643 |
edfe9d8855de6a35ed62f7d49edcbc13c49ce3a9 | 1,575 | extern crate libmount;
extern crate argparse;
extern crate env_logger;
#[macro_use] extern crate log;
use std::path::PathBuf;
use std::process::exit;
use argparse::{ArgumentParser, Parse, StoreOption};
fn main() {
env_logger::init();
let mut target = PathBuf::new();
let mut size = None::<usize>;
let mut mode = None::<String>;
let mut uid = None::<u32>;
let mut gid = None::<u32>;
{
let mut ap = ArgumentParser::new();
ap.set_description("Tmpfs mount utility. Similar to `mount --tmpfs`");
ap.refer(&mut target).add_argument("target", Parse,
"Target directory to mount tmpfs to").required();
ap.refer(&mut size).add_option(&["--size"], StoreOption,
"Set size of the filesystem");
ap.refer(&mut mode).add_option(&["--mode"], StoreOption,
"Set mode of the root directory");
ap.refer(&mut uid).add_option(&["--uid"], StoreOption,
"Set uid of the directory");
ap.refer(&mut gid).add_option(&["--gid"], StoreOption,
"Set gid of the directory");
ap.parse_args_or_exit();
}
let mut mnt = libmount::Tmpfs::new(target);
if let Some(x) = size { mnt = mnt.size_bytes(x); };
if let Some(ref x) = mode {
mnt = mnt.mode(u32::from_str_radix(x, 8).expect("valid octal mode"));
}
if let Some(x) = uid { mnt = mnt.uid(x); }
if let Some(x) = gid { mnt = mnt.gid(x); }
match mnt.mount() {
Ok(()) => {}
Err(e) => {
error!("{}", e);
exit(1);
}
}
}
| 32.142857 | 78 | 0.563175 |
f58ddb2f27893025e01b72693da4f880189f1500 | 316 | use crate::spec::abi::Abi;
// All the calling conventions trigger an assertion(Unsupported calling convention) in llvm on arm
pub fn abi_blacklist() -> Vec<Abi> {
vec![
Abi::Stdcall,
Abi::Fastcall,
Abi::Vectorcall,
Abi::Thiscall,
Abi::Win64,
Abi::SysV64,
]
}
| 22.571429 | 98 | 0.594937 |
56badf8bd0f88f93e7dcdee786fb41a93e94b6cd | 25,556 | #![doc = "generated by AutoRust"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::de::{value, Deserializer, IntoDeserializer};
use serde::{Deserialize, Serialize, Serializer};
use std::str::FromStr;
#[doc = "Error details."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorResponse {
#[doc = "The error object."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<error_response::Error>,
}
impl azure_core::Continuable for ErrorResponse {
fn continuation(&self) -> Option<String> {
None
}
}
impl ErrorResponse {
pub fn new() -> Self {
Self::default()
}
}
pub mod error_response {
use super::*;
#[doc = "The error object."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Error {
#[doc = "The error code."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[doc = "The error message."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[doc = "The error details."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
}
impl Error {
pub fn new() -> Self {
Self::default()
}
}
}
#[doc = "Common fields that are returned in the response for all Azure Resource Manager resources"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Resource {
#[doc = "Fully qualified resource ID for the resource. Ex - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The name of the resource"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "The type of the resource. E.g. \"Microsoft.Compute/virtualMachines\" or \"Microsoft.Storage/storageAccounts\""]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
impl Resource {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "availabilityStatus of a resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct AvailabilityStatus {
#[doc = "Azure Resource Manager Identity for the availabilityStatuses resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "current."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Microsoft.ResourceHealth/AvailabilityStatuses."]
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[doc = "Azure Resource Manager geo location of the resource."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub location: Option<String>,
#[doc = "Properties of availability state."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<availability_status::Properties>,
}
impl AvailabilityStatus {
pub fn new() -> Self {
Self::default()
}
}
pub mod availability_status {
use super::*;
#[doc = "Properties of availability state."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Properties {
#[doc = "Availability status of the resource. When it is null, this availabilityStatus object represents an availability impacting event"]
#[serde(rename = "availabilityState", default, skip_serializing_if = "Option::is_none")]
pub availability_state: Option<properties::AvailabilityState>,
#[doc = "Title description of the availability status."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[doc = "Summary description of the availability status."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
#[doc = "Details of the availability status."]
#[serde(rename = "detailedStatus", default, skip_serializing_if = "Option::is_none")]
pub detailed_status: Option<String>,
#[doc = "When the resource's availabilityState is Unavailable, it describes where the health impacting event was originated. Examples are planned, unplanned, user initiated or an outage etc."]
#[serde(rename = "reasonType", default, skip_serializing_if = "Option::is_none")]
pub reason_type: Option<String>,
#[doc = "When the resource's availabilityState is Unavailable, it provides the Timestamp for when the health impacting event was received."]
#[serde(rename = "rootCauseAttributionTime", default, skip_serializing_if = "Option::is_none")]
pub root_cause_attribution_time: Option<String>,
#[doc = "In case of an availability impacting event, it describes when the health impacting event was originated. Examples are Lifecycle, Downtime, Fault Analysis etc."]
#[serde(rename = "healthEventType", default, skip_serializing_if = "Option::is_none")]
pub health_event_type: Option<String>,
#[doc = "In case of an availability impacting event, it describes where the health impacting event was originated. Examples are PlatformInitiated, UserInitiated etc."]
#[serde(rename = "healthEventCause", default, skip_serializing_if = "Option::is_none")]
pub health_event_cause: Option<String>,
#[doc = "In case of an availability impacting event, it describes the category of a PlatformInitiated health impacting event. Examples are Planned, Unplanned etc."]
#[serde(rename = "healthEventCategory", default, skip_serializing_if = "Option::is_none")]
pub health_event_category: Option<String>,
#[doc = "It is a unique Id that identifies the event"]
#[serde(rename = "healthEventId", default, skip_serializing_if = "Option::is_none")]
pub health_event_id: Option<String>,
#[doc = "When the resource's availabilityState is Unavailable and the reasonType is not User Initiated, it provides the date and time for when the issue is expected to be resolved."]
#[serde(rename = "resolutionETA", default, skip_serializing_if = "Option::is_none")]
pub resolution_eta: Option<String>,
#[doc = "Timestamp for when last change in health status occurred."]
#[serde(rename = "occurredTime", default, skip_serializing_if = "Option::is_none")]
pub occurred_time: Option<String>,
#[doc = "Chronicity of the availability transition."]
#[serde(rename = "reasonChronicity", default, skip_serializing_if = "Option::is_none")]
pub reason_chronicity: Option<properties::ReasonChronicity>,
#[doc = "Timestamp for when the health was last checked. "]
#[serde(rename = "reportedTime", default, skip_serializing_if = "Option::is_none")]
pub reported_time: Option<String>,
#[doc = "An annotation describing a change in the availabilityState to Available from Unavailable with a reasonType of type Unplanned"]
#[serde(rename = "recentlyResolved", default, skip_serializing_if = "Option::is_none")]
pub recently_resolved: Option<properties::RecentlyResolved>,
#[doc = "Lists actions the user can take based on the current availabilityState of the resource."]
#[serde(rename = "recommendedActions", default, skip_serializing_if = "Vec::is_empty")]
pub recommended_actions: Vec<RecommendedAction>,
#[doc = "Lists the service impacting events that may be affecting the health of the resource."]
#[serde(rename = "serviceImpactingEvents", default, skip_serializing_if = "Vec::is_empty")]
pub service_impacting_events: Vec<ServiceImpactingEvent>,
}
impl Properties {
pub fn new() -> Self {
Self::default()
}
}
pub mod properties {
use super::*;
#[doc = "Availability status of the resource. When it is null, this availabilityStatus object represents an availability impacting event"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "AvailabilityState")]
pub enum AvailabilityState {
Available,
Unavailable,
Degraded,
Unknown,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for AvailabilityState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for AvailabilityState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for AvailabilityState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Available => serializer.serialize_unit_variant("AvailabilityState", 0u32, "Available"),
Self::Unavailable => serializer.serialize_unit_variant("AvailabilityState", 1u32, "Unavailable"),
Self::Degraded => serializer.serialize_unit_variant("AvailabilityState", 2u32, "Degraded"),
Self::Unknown => serializer.serialize_unit_variant("AvailabilityState", 3u32, "Unknown"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "Chronicity of the availability transition."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ReasonChronicity")]
pub enum ReasonChronicity {
Transient,
Persistent,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ReasonChronicity {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ReasonChronicity {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ReasonChronicity {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Transient => serializer.serialize_unit_variant("ReasonChronicity", 0u32, "Transient"),
Self::Persistent => serializer.serialize_unit_variant("ReasonChronicity", 1u32, "Persistent"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "An annotation describing a change in the availabilityState to Available from Unavailable with a reasonType of type Unplanned"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct RecentlyResolved {
#[doc = "Timestamp for when the availabilityState changed to Unavailable"]
#[serde(rename = "unavailableOccurredTime", default, skip_serializing_if = "Option::is_none")]
pub unavailable_occurred_time: Option<String>,
#[doc = "Timestamp when the availabilityState changes to Available."]
#[serde(rename = "resolvedTime", default, skip_serializing_if = "Option::is_none")]
pub resolved_time: Option<String>,
#[doc = "Brief description of cause of the resource becoming unavailable."]
#[serde(rename = "unavailabilitySummary", default, skip_serializing_if = "Option::is_none")]
pub unavailability_summary: Option<String>,
}
impl RecentlyResolved {
pub fn new() -> Self {
Self::default()
}
}
}
}
#[doc = "The List availabilityStatus operation response."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct AvailabilityStatusListResult {
#[doc = "The list of availabilityStatuses."]
pub value: Vec<AvailabilityStatus>,
#[doc = "The URI to fetch the next page of availabilityStatuses. Call ListNext() with this URI to fetch the next page of availabilityStatuses."]
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl azure_core::Continuable for AvailabilityStatusListResult {
fn continuation(&self) -> Option<String> {
self.next_link.clone()
}
}
impl AvailabilityStatusListResult {
pub fn new(value: Vec<AvailabilityStatus>) -> Self {
Self { value, next_link: None }
}
}
#[doc = "Object of impacted region."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ImpactedRegion {
#[doc = "The impacted region id."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[doc = "The impacted region name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
}
impl ImpactedRegion {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "impactedResource with health status"]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ImpactedResourceStatus {
#[serde(flatten)]
pub resource: Resource,
#[doc = "Properties of impacted resource status."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<impacted_resource_status::Properties>,
}
impl ImpactedResourceStatus {
pub fn new() -> Self {
Self::default()
}
}
pub mod impacted_resource_status {
use super::*;
#[doc = "Properties of impacted resource status."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Properties {
#[doc = "Impacted resource status of the resource."]
#[serde(rename = "availabilityState", default, skip_serializing_if = "Option::is_none")]
pub availability_state: Option<properties::AvailabilityState>,
#[doc = "Title description of the impacted resource status."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[doc = "Summary description of the impacted resource status."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub summary: Option<String>,
#[doc = "When the resource's availabilityState is Unavailable, it describes where the health impacting event was originated."]
#[serde(rename = "reasonType", default, skip_serializing_if = "Option::is_none")]
pub reason_type: Option<properties::ReasonType>,
#[doc = "Timestamp for when last change in health status occurred."]
#[serde(rename = "occurredTime", default, skip_serializing_if = "Option::is_none")]
pub occurred_time: Option<String>,
}
impl Properties {
pub fn new() -> Self {
Self::default()
}
}
pub mod properties {
use super::*;
#[doc = "Impacted resource status of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "AvailabilityState")]
pub enum AvailabilityState {
Available,
Unavailable,
Degraded,
Unknown,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for AvailabilityState {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for AvailabilityState {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for AvailabilityState {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Available => serializer.serialize_unit_variant("AvailabilityState", 0u32, "Available"),
Self::Unavailable => serializer.serialize_unit_variant("AvailabilityState", 1u32, "Unavailable"),
Self::Degraded => serializer.serialize_unit_variant("AvailabilityState", 2u32, "Degraded"),
Self::Unknown => serializer.serialize_unit_variant("AvailabilityState", 3u32, "Unknown"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
#[doc = "When the resource's availabilityState is Unavailable, it describes where the health impacting event was originated."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
#[serde(remote = "ReasonType")]
pub enum ReasonType {
Unplanned,
Planned,
UserInitiated,
#[serde(skip_deserializing)]
UnknownValue(String),
}
impl FromStr for ReasonType {
type Err = value::Error;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
Self::deserialize(s.into_deserializer())
}
}
impl<'de> Deserialize<'de> for ReasonType {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let deserialized = Self::from_str(&s).unwrap_or(Self::UnknownValue(s));
Ok(deserialized)
}
}
impl Serialize for ReasonType {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Unplanned => serializer.serialize_unit_variant("ReasonType", 0u32, "Unplanned"),
Self::Planned => serializer.serialize_unit_variant("ReasonType", 1u32, "Planned"),
Self::UserInitiated => serializer.serialize_unit_variant("ReasonType", 2u32, "UserInitiated"),
Self::UnknownValue(s) => serializer.serialize_str(s.as_str()),
}
}
}
}
}
#[doc = "Operation available in the resourcehealth resource provider."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Operation {
#[doc = "Name of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[doc = "Properties of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
impl Operation {
pub fn new() -> Self {
Self::default()
}
}
pub mod operation {
use super::*;
#[doc = "Properties of the operation."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Display {
#[doc = "Provider name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[doc = "Resource name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[doc = "Operation name."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[doc = "Description of the operation."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
impl Display {
pub fn new() -> Self {
Self::default()
}
}
}
#[doc = "Lists the operations response."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct OperationListResult {
#[doc = "List of operations available in the resourcehealth resource provider."]
pub value: Vec<Operation>,
}
impl OperationListResult {
pub fn new(value: Vec<Operation>) -> Self {
Self { value }
}
}
#[doc = "Lists actions the user can take based on the current availabilityState of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct RecommendedAction {
#[doc = "Recommended action."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub action: Option<String>,
#[doc = "Link to the action"]
#[serde(rename = "actionUrl", default, skip_serializing_if = "Option::is_none")]
pub action_url: Option<String>,
#[doc = "Substring of action, it describes which text should host the action url."]
#[serde(rename = "actionUrlText", default, skip_serializing_if = "Option::is_none")]
pub action_url_text: Option<String>,
}
impl RecommendedAction {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Lists the service impacting events that may be affecting the health of the resource."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ServiceImpactingEvent {
#[doc = "Timestamp for when the event started."]
#[serde(rename = "eventStartTime", default, skip_serializing_if = "Option::is_none")]
pub event_start_time: Option<String>,
#[doc = "Timestamp for when event was submitted/detected."]
#[serde(rename = "eventStatusLastModifiedTime", default, skip_serializing_if = "Option::is_none")]
pub event_status_last_modified_time: Option<String>,
#[doc = "Correlation id for the event"]
#[serde(rename = "correlationId", default, skip_serializing_if = "Option::is_none")]
pub correlation_id: Option<String>,
#[doc = "Status of the service impacting event."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<service_impacting_event::Status>,
#[doc = "Properties of the service impacting event."]
#[serde(rename = "incidentProperties", default, skip_serializing_if = "Option::is_none")]
pub incident_properties: Option<service_impacting_event::IncidentProperties>,
}
impl ServiceImpactingEvent {
pub fn new() -> Self {
Self::default()
}
}
pub mod service_impacting_event {
use super::*;
#[doc = "Status of the service impacting event."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Status {
#[doc = "Current status of the event"]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<String>,
}
impl Status {
pub fn new() -> Self {
Self::default()
}
}
#[doc = "Properties of the service impacting event."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct IncidentProperties {
#[doc = "Title of the incident."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[doc = "Service impacted by the event."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub service: Option<String>,
#[doc = "Region impacted by the event."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub region: Option<String>,
#[doc = "Type of Event."]
#[serde(rename = "incidentType", default, skip_serializing_if = "Option::is_none")]
pub incident_type: Option<String>,
}
impl IncidentProperties {
pub fn new() -> Self {
Self::default()
}
}
}
#[doc = "Banner type of emerging issue."]
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct StatusBanner {
#[doc = "The banner title."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
#[doc = "The details of banner."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[doc = "The cloud type of this banner."]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub cloud: Option<String>,
#[doc = "The last time modified on this banner."]
#[serde(rename = "lastModifiedTime", default, skip_serializing_if = "Option::is_none")]
pub last_modified_time: Option<String>,
}
impl StatusBanner {
pub fn new() -> Self {
Self::default()
}
}
| 45.964029 | 200 | 0.631476 |
338479813b60a69bdd898fdfd9e36e1b4805a9d5 | 459 | mod config;
mod healthcheck;
mod request;
mod request_builder;
mod retry;
mod service;
mod sink;
mod integration_tests;
use self::config::CloudwatchLogsSinkConfig;
use crate::{config::SinkDescription, internal_events::TemplateRenderingError};
inventory::submit! {
SinkDescription::new::<CloudwatchLogsSinkConfig>("aws_cloudwatch_logs")
}
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub struct CloudwatchKey {
group: String,
stream: String,
}
| 19.956522 | 78 | 0.764706 |
1a195d071a4994d3cfe42d39b509e13c33a2e518 | 9,254 | // Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0.
use collections::HashMap;
use futures::executor::block_on;
use kvproto::kvrpcpb::{Context, GetRequest, LockInfo};
use raftstore::coprocessor::RegionInfoProvider;
use raftstore::router::RaftStoreBlackHole;
use std::sync::{atomic::AtomicU64, Arc};
use tikv::server::gc_worker::{AutoGcConfig, GcConfig, GcSafePointProvider, GcWorker};
use tikv::storage::config::Config;
use tikv::storage::kv::RocksEngine;
use tikv::storage::lock_manager::DummyLockManager;
use tikv::storage::{
txn::commands, Engine, PerfStatisticsDelta, PrewriteResult, Result, Statistics, Storage,
TestEngineBuilder, TestStorageBuilder, TxnStatus,
};
use txn_types::{Key, KvPair, Mutation, TimeStamp, Value};
/// A builder to build a `SyncTestStorage`.
///
/// Only used for test purpose.
pub struct SyncTestStorageBuilder<E: Engine> {
engine: E,
config: Option<Config>,
gc_config: Option<GcConfig>,
}
impl SyncTestStorageBuilder<RocksEngine> {
pub fn new() -> Self {
Self {
engine: TestEngineBuilder::new().build().unwrap(),
config: None,
gc_config: None,
}
}
}
impl<E: Engine> SyncTestStorageBuilder<E> {
pub fn from_engine(engine: E) -> Self {
Self {
engine,
config: None,
gc_config: None,
}
}
pub fn config(mut self, config: Config) -> Self {
self.config = Some(config);
self
}
pub fn gc_config(mut self, gc_config: GcConfig) -> Self {
self.gc_config = Some(gc_config);
self
}
pub fn build(mut self) -> Result<SyncTestStorage<E>> {
let mut builder =
TestStorageBuilder::from_engine_and_lock_mgr(self.engine.clone(), DummyLockManager {});
if let Some(config) = self.config.take() {
builder = builder.config(config);
}
let mut gc_worker = GcWorker::new(
self.engine,
RaftStoreBlackHole,
self.gc_config.unwrap_or_default(),
Default::default(),
);
gc_worker.start()?;
Ok(SyncTestStorage {
store: builder.build()?,
gc_worker,
})
}
}
/// A `Storage` like structure with sync API.
///
/// Only used for test purpose.
#[derive(Clone)]
pub struct SyncTestStorage<E: Engine> {
gc_worker: GcWorker<E, RaftStoreBlackHole>,
store: Storage<E, DummyLockManager>,
}
impl<E: Engine> SyncTestStorage<E> {
pub fn start_auto_gc<S: GcSafePointProvider, R: RegionInfoProvider>(
&mut self,
cfg: AutoGcConfig<S, R>,
) {
self.gc_worker
.start_auto_gc(cfg, Arc::new(AtomicU64::new(0)))
.unwrap();
}
pub fn get_storage(&self) -> Storage<E, DummyLockManager> {
self.store.clone()
}
pub fn get_engine(&self) -> E {
self.store.get_engine()
}
pub fn get(
&self,
ctx: Context,
key: &Key,
start_ts: impl Into<TimeStamp>,
) -> Result<(Option<Value>, Statistics, PerfStatisticsDelta)> {
block_on(self.store.get(ctx, key.to_owned(), start_ts.into()))
}
#[allow(dead_code)]
pub fn batch_get(
&self,
ctx: Context,
keys: &[Key],
start_ts: impl Into<TimeStamp>,
) -> Result<(Vec<Result<KvPair>>, Statistics, PerfStatisticsDelta)> {
block_on(self.store.batch_get(ctx, keys.to_owned(), start_ts.into()))
}
#[allow(clippy::type_complexity)]
pub fn batch_get_command(
&self,
ctx: Context,
keys: &[&[u8]],
start_ts: u64,
) -> Result<Vec<(Option<Vec<u8>>, Statistics, PerfStatisticsDelta)>> {
let requests: Vec<GetRequest> = keys
.to_owned()
.into_iter()
.map(|key| {
let mut req = GetRequest::default();
req.set_context(ctx.clone());
req.set_key(key.to_owned());
req.set_version(start_ts);
req
})
.collect();
let resp = block_on(self.store.batch_get_command(requests))?;
let mut values = vec![];
for value in resp.into_iter() {
values.push(value?);
}
Ok(values)
}
pub fn scan(
&self,
ctx: Context,
start_key: Key,
end_key: Option<Key>,
limit: usize,
key_only: bool,
start_ts: impl Into<TimeStamp>,
) -> Result<Vec<Result<KvPair>>> {
block_on(self.store.scan(
ctx,
start_key,
end_key,
limit,
0,
start_ts.into(),
key_only,
false,
))
}
pub fn reverse_scan(
&self,
ctx: Context,
start_key: Key,
end_key: Option<Key>,
limit: usize,
key_only: bool,
start_ts: impl Into<TimeStamp>,
) -> Result<Vec<Result<KvPair>>> {
block_on(self.store.scan(
ctx,
start_key,
end_key,
limit,
0,
start_ts.into(),
key_only,
true,
))
}
pub fn prewrite(
&self,
ctx: Context,
mutations: Vec<Mutation>,
primary: Vec<u8>,
start_ts: impl Into<TimeStamp>,
) -> Result<PrewriteResult> {
wait_op!(|cb| self.store.sched_txn_command(
commands::Prewrite::with_context(mutations, primary, start_ts.into(), ctx),
cb,
))
.unwrap()
}
pub fn commit(
&self,
ctx: Context,
keys: Vec<Key>,
start_ts: impl Into<TimeStamp>,
commit_ts: impl Into<TimeStamp>,
) -> Result<TxnStatus> {
wait_op!(|cb| self.store.sched_txn_command(
commands::Commit::new(keys, start_ts.into(), commit_ts.into(), ctx),
cb,
))
.unwrap()
}
pub fn cleanup(
&self,
ctx: Context,
key: Key,
start_ts: impl Into<TimeStamp>,
current_ts: impl Into<TimeStamp>,
) -> Result<()> {
wait_op!(|cb| self.store.sched_txn_command(
commands::Cleanup::new(key, start_ts.into(), current_ts.into(), ctx),
cb,
))
.unwrap()
}
pub fn rollback(
&self,
ctx: Context,
keys: Vec<Key>,
start_ts: impl Into<TimeStamp>,
) -> Result<()> {
wait_op!(|cb| self
.store
.sched_txn_command(commands::Rollback::new(keys, start_ts.into(), ctx), cb))
.unwrap()
}
pub fn scan_locks(
&self,
ctx: Context,
max_ts: impl Into<TimeStamp>,
start_key: Option<Key>,
limit: usize,
) -> Result<Vec<LockInfo>> {
wait_op!(|cb| self.store.sched_txn_command(
commands::ScanLock::new(max_ts.into(), start_key, limit, ctx),
cb,
))
.unwrap()
}
pub fn resolve_lock(
&self,
ctx: Context,
start_ts: impl Into<TimeStamp>,
commit_ts: Option<impl Into<TimeStamp>>,
) -> Result<()> {
let mut txn_status = HashMap::default();
txn_status.insert(
start_ts.into(),
commit_ts.map(Into::into).unwrap_or_else(TimeStamp::zero),
);
wait_op!(|cb| self.store.sched_txn_command(
commands::ResolveLockReadPhase::new(txn_status, None, ctx),
cb,
))
.unwrap()
}
pub fn resolve_lock_batch(
&self,
ctx: Context,
txns: Vec<(TimeStamp, TimeStamp)>,
) -> Result<()> {
let txn_status: HashMap<TimeStamp, TimeStamp> = txns.into_iter().collect();
wait_op!(|cb| self.store.sched_txn_command(
commands::ResolveLockReadPhase::new(txn_status, None, ctx),
cb,
))
.unwrap()
}
pub fn gc(&self, _: Context, safe_point: impl Into<TimeStamp>) -> Result<()> {
wait_op!(|cb| self.gc_worker.gc(safe_point.into(), cb)).unwrap()
}
pub fn raw_get(&self, ctx: Context, cf: String, key: Vec<u8>) -> Result<Option<Vec<u8>>> {
block_on(self.store.raw_get(ctx, cf, key))
}
pub fn raw_put(&self, ctx: Context, cf: String, key: Vec<u8>, value: Vec<u8>) -> Result<()> {
wait_op!(|cb| self.store.raw_put(ctx, cf, key, value, cb)).unwrap()
}
pub fn raw_delete(&self, ctx: Context, cf: String, key: Vec<u8>) -> Result<()> {
wait_op!(|cb| self.store.raw_delete(ctx, cf, key, cb)).unwrap()
}
pub fn raw_scan(
&self,
ctx: Context,
cf: String,
start_key: Vec<u8>,
end_key: Option<Vec<u8>>,
limit: usize,
) -> Result<Vec<Result<KvPair>>> {
block_on(
self.store
.raw_scan(ctx, cf, start_key, end_key, limit, false, false),
)
}
pub fn reverse_raw_scan(
&self,
ctx: Context,
cf: String,
start_key: Vec<u8>,
end_key: Option<Vec<u8>>,
limit: usize,
) -> Result<Vec<Result<KvPair>>> {
block_on(
self.store
.raw_scan(ctx, cf, start_key, end_key, limit, false, true),
)
}
}
| 27.541667 | 99 | 0.545818 |
90c155c93f66826665a3e0edea962402ffc7c517 | 20,937 | // This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use crate::Accessible;
use crate::AccessibleRole;
use crate::Align;
use crate::Buildable;
use crate::ConstraintTarget;
use crate::LayoutManager;
use crate::Overflow;
use crate::Widget;
use glib::object::Cast;
use glib::object::IsA;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use glib::StaticType;
use glib::ToValue;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
pub struct Picture(Object<ffi::GtkPicture, ffi::GtkPictureClass>) @extends Widget, @implements Accessible, Buildable, ConstraintTarget;
match fn {
get_type => || ffi::gtk_picture_get_type(),
}
}
impl Picture {
#[doc(alias = "gtk_picture_new")]
pub fn new() -> Picture {
assert_initialized_main_thread!();
unsafe { Widget::from_glib_none(ffi::gtk_picture_new()).unsafe_cast() }
}
#[doc(alias = "gtk_picture_new_for_file")]
pub fn new_for_file<P: IsA<gio::File>>(file: Option<&P>) -> Picture {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_picture_new_for_file(
file.map(|p| p.as_ref()).to_glib_none().0,
))
.unsafe_cast()
}
}
#[doc(alias = "gtk_picture_new_for_filename")]
pub fn new_for_filename<P: AsRef<std::path::Path>>(filename: P) -> Picture {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_picture_new_for_filename(
filename.as_ref().to_glib_none().0,
))
.unsafe_cast()
}
}
#[doc(alias = "gtk_picture_new_for_paintable")]
pub fn new_for_paintable<P: IsA<gdk::Paintable>>(paintable: Option<&P>) -> Picture {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_picture_new_for_paintable(
paintable.map(|p| p.as_ref()).to_glib_none().0,
))
.unsafe_cast()
}
}
#[doc(alias = "gtk_picture_new_for_pixbuf")]
pub fn new_for_pixbuf(pixbuf: Option<&gdk_pixbuf::Pixbuf>) -> Picture {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_picture_new_for_pixbuf(pixbuf.to_glib_none().0))
.unsafe_cast()
}
}
#[doc(alias = "gtk_picture_new_for_resource")]
pub fn new_for_resource(resource_path: Option<&str>) -> Picture {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_picture_new_for_resource(
resource_path.to_glib_none().0,
))
.unsafe_cast()
}
}
#[doc(alias = "gtk_picture_get_alternative_text")]
pub fn get_alternative_text(&self) -> Option<glib::GString> {
unsafe { from_glib_none(ffi::gtk_picture_get_alternative_text(self.to_glib_none().0)) }
}
#[doc(alias = "gtk_picture_get_can_shrink")]
pub fn get_can_shrink(&self) -> bool {
unsafe { from_glib(ffi::gtk_picture_get_can_shrink(self.to_glib_none().0)) }
}
#[doc(alias = "gtk_picture_get_file")]
pub fn get_file(&self) -> Option<gio::File> {
unsafe { from_glib_none(ffi::gtk_picture_get_file(self.to_glib_none().0)) }
}
#[doc(alias = "gtk_picture_get_keep_aspect_ratio")]
pub fn get_keep_aspect_ratio(&self) -> bool {
unsafe {
from_glib(ffi::gtk_picture_get_keep_aspect_ratio(
self.to_glib_none().0,
))
}
}
#[doc(alias = "gtk_picture_get_paintable")]
pub fn get_paintable(&self) -> Option<gdk::Paintable> {
unsafe { from_glib_none(ffi::gtk_picture_get_paintable(self.to_glib_none().0)) }
}
#[doc(alias = "gtk_picture_set_alternative_text")]
pub fn set_alternative_text(&self, alternative_text: Option<&str>) {
unsafe {
ffi::gtk_picture_set_alternative_text(
self.to_glib_none().0,
alternative_text.to_glib_none().0,
);
}
}
#[doc(alias = "gtk_picture_set_can_shrink")]
pub fn set_can_shrink(&self, can_shrink: bool) {
unsafe {
ffi::gtk_picture_set_can_shrink(self.to_glib_none().0, can_shrink.to_glib());
}
}
#[doc(alias = "gtk_picture_set_file")]
pub fn set_file<P: IsA<gio::File>>(&self, file: Option<&P>) {
unsafe {
ffi::gtk_picture_set_file(
self.to_glib_none().0,
file.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
#[doc(alias = "gtk_picture_set_filename")]
pub fn set_filename(&self, filename: Option<&str>) {
unsafe {
ffi::gtk_picture_set_filename(self.to_glib_none().0, filename.to_glib_none().0);
}
}
#[doc(alias = "gtk_picture_set_keep_aspect_ratio")]
pub fn set_keep_aspect_ratio(&self, keep_aspect_ratio: bool) {
unsafe {
ffi::gtk_picture_set_keep_aspect_ratio(
self.to_glib_none().0,
keep_aspect_ratio.to_glib(),
);
}
}
#[doc(alias = "gtk_picture_set_paintable")]
pub fn set_paintable<P: IsA<gdk::Paintable>>(&self, paintable: Option<&P>) {
unsafe {
ffi::gtk_picture_set_paintable(
self.to_glib_none().0,
paintable.map(|p| p.as_ref()).to_glib_none().0,
);
}
}
#[doc(alias = "gtk_picture_set_pixbuf")]
pub fn set_pixbuf(&self, pixbuf: Option<&gdk_pixbuf::Pixbuf>) {
unsafe {
ffi::gtk_picture_set_pixbuf(self.to_glib_none().0, pixbuf.to_glib_none().0);
}
}
#[doc(alias = "gtk_picture_set_resource")]
pub fn set_resource(&self, resource_path: Option<&str>) {
unsafe {
ffi::gtk_picture_set_resource(self.to_glib_none().0, resource_path.to_glib_none().0);
}
}
pub fn connect_property_alternative_text_notify<F: Fn(&Picture) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_alternative_text_trampoline<F: Fn(&Picture) + 'static>(
this: *mut ffi::GtkPicture,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::alternative-text\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_alternative_text_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_property_can_shrink_notify<F: Fn(&Picture) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_can_shrink_trampoline<F: Fn(&Picture) + 'static>(
this: *mut ffi::GtkPicture,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::can-shrink\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_can_shrink_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_property_file_notify<F: Fn(&Picture) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_file_trampoline<F: Fn(&Picture) + 'static>(
this: *mut ffi::GtkPicture,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::file\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_file_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_property_keep_aspect_ratio_notify<F: Fn(&Picture) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_keep_aspect_ratio_trampoline<F: Fn(&Picture) + 'static>(
this: *mut ffi::GtkPicture,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::keep-aspect-ratio\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_keep_aspect_ratio_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
pub fn connect_property_paintable_notify<F: Fn(&Picture) + 'static>(
&self,
f: F,
) -> SignalHandlerId {
unsafe extern "C" fn notify_paintable_trampoline<F: Fn(&Picture) + 'static>(
this: *mut ffi::GtkPicture,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::paintable\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_paintable_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl Default for Picture {
fn default() -> Self {
Self::new()
}
}
#[derive(Clone, Default)]
pub struct PictureBuilder {
alternative_text: Option<String>,
can_shrink: Option<bool>,
file: Option<gio::File>,
keep_aspect_ratio: Option<bool>,
paintable: Option<gdk::Paintable>,
can_focus: Option<bool>,
can_target: Option<bool>,
css_classes: Option<Vec<String>>,
css_name: Option<String>,
cursor: Option<gdk::Cursor>,
focus_on_click: Option<bool>,
focusable: Option<bool>,
halign: Option<Align>,
has_tooltip: Option<bool>,
height_request: Option<i32>,
hexpand: Option<bool>,
hexpand_set: Option<bool>,
layout_manager: Option<LayoutManager>,
margin_bottom: Option<i32>,
margin_end: Option<i32>,
margin_start: Option<i32>,
margin_top: Option<i32>,
name: Option<String>,
opacity: Option<f64>,
overflow: Option<Overflow>,
receives_default: Option<bool>,
sensitive: Option<bool>,
tooltip_markup: Option<String>,
tooltip_text: Option<String>,
valign: Option<Align>,
vexpand: Option<bool>,
vexpand_set: Option<bool>,
visible: Option<bool>,
width_request: Option<i32>,
accessible_role: Option<AccessibleRole>,
}
impl PictureBuilder {
pub fn new() -> Self {
Self::default()
}
pub fn build(self) -> Picture {
let mut properties: Vec<(&str, &dyn ToValue)> = vec![];
if let Some(ref alternative_text) = self.alternative_text {
properties.push(("alternative-text", alternative_text));
}
if let Some(ref can_shrink) = self.can_shrink {
properties.push(("can-shrink", can_shrink));
}
if let Some(ref file) = self.file {
properties.push(("file", file));
}
if let Some(ref keep_aspect_ratio) = self.keep_aspect_ratio {
properties.push(("keep-aspect-ratio", keep_aspect_ratio));
}
if let Some(ref paintable) = self.paintable {
properties.push(("paintable", paintable));
}
if let Some(ref can_focus) = self.can_focus {
properties.push(("can-focus", can_focus));
}
if let Some(ref can_target) = self.can_target {
properties.push(("can-target", can_target));
}
if let Some(ref css_classes) = self.css_classes {
properties.push(("css-classes", css_classes));
}
if let Some(ref css_name) = self.css_name {
properties.push(("css-name", css_name));
}
if let Some(ref cursor) = self.cursor {
properties.push(("cursor", cursor));
}
if let Some(ref focus_on_click) = self.focus_on_click {
properties.push(("focus-on-click", focus_on_click));
}
if let Some(ref focusable) = self.focusable {
properties.push(("focusable", focusable));
}
if let Some(ref halign) = self.halign {
properties.push(("halign", halign));
}
if let Some(ref has_tooltip) = self.has_tooltip {
properties.push(("has-tooltip", has_tooltip));
}
if let Some(ref height_request) = self.height_request {
properties.push(("height-request", height_request));
}
if let Some(ref hexpand) = self.hexpand {
properties.push(("hexpand", hexpand));
}
if let Some(ref hexpand_set) = self.hexpand_set {
properties.push(("hexpand-set", hexpand_set));
}
if let Some(ref layout_manager) = self.layout_manager {
properties.push(("layout-manager", layout_manager));
}
if let Some(ref margin_bottom) = self.margin_bottom {
properties.push(("margin-bottom", margin_bottom));
}
if let Some(ref margin_end) = self.margin_end {
properties.push(("margin-end", margin_end));
}
if let Some(ref margin_start) = self.margin_start {
properties.push(("margin-start", margin_start));
}
if let Some(ref margin_top) = self.margin_top {
properties.push(("margin-top", margin_top));
}
if let Some(ref name) = self.name {
properties.push(("name", name));
}
if let Some(ref opacity) = self.opacity {
properties.push(("opacity", opacity));
}
if let Some(ref overflow) = self.overflow {
properties.push(("overflow", overflow));
}
if let Some(ref receives_default) = self.receives_default {
properties.push(("receives-default", receives_default));
}
if let Some(ref sensitive) = self.sensitive {
properties.push(("sensitive", sensitive));
}
if let Some(ref tooltip_markup) = self.tooltip_markup {
properties.push(("tooltip-markup", tooltip_markup));
}
if let Some(ref tooltip_text) = self.tooltip_text {
properties.push(("tooltip-text", tooltip_text));
}
if let Some(ref valign) = self.valign {
properties.push(("valign", valign));
}
if let Some(ref vexpand) = self.vexpand {
properties.push(("vexpand", vexpand));
}
if let Some(ref vexpand_set) = self.vexpand_set {
properties.push(("vexpand-set", vexpand_set));
}
if let Some(ref visible) = self.visible {
properties.push(("visible", visible));
}
if let Some(ref width_request) = self.width_request {
properties.push(("width-request", width_request));
}
if let Some(ref accessible_role) = self.accessible_role {
properties.push(("accessible-role", accessible_role));
}
let ret = glib::Object::new::<Picture>(&properties).expect("object new");
ret
}
pub fn alternative_text(mut self, alternative_text: &str) -> Self {
self.alternative_text = Some(alternative_text.to_string());
self
}
pub fn can_shrink(mut self, can_shrink: bool) -> Self {
self.can_shrink = Some(can_shrink);
self
}
pub fn file<P: IsA<gio::File>>(mut self, file: &P) -> Self {
self.file = Some(file.clone().upcast());
self
}
pub fn keep_aspect_ratio(mut self, keep_aspect_ratio: bool) -> Self {
self.keep_aspect_ratio = Some(keep_aspect_ratio);
self
}
pub fn paintable<P: IsA<gdk::Paintable>>(mut self, paintable: &P) -> Self {
self.paintable = Some(paintable.clone().upcast());
self
}
pub fn can_focus(mut self, can_focus: bool) -> Self {
self.can_focus = Some(can_focus);
self
}
pub fn can_target(mut self, can_target: bool) -> Self {
self.can_target = Some(can_target);
self
}
pub fn css_classes(mut self, css_classes: Vec<String>) -> Self {
self.css_classes = Some(css_classes);
self
}
pub fn css_name(mut self, css_name: &str) -> Self {
self.css_name = Some(css_name.to_string());
self
}
pub fn cursor(mut self, cursor: &gdk::Cursor) -> Self {
self.cursor = Some(cursor.clone());
self
}
pub fn focus_on_click(mut self, focus_on_click: bool) -> Self {
self.focus_on_click = Some(focus_on_click);
self
}
pub fn focusable(mut self, focusable: bool) -> Self {
self.focusable = Some(focusable);
self
}
pub fn halign(mut self, halign: Align) -> Self {
self.halign = Some(halign);
self
}
pub fn has_tooltip(mut self, has_tooltip: bool) -> Self {
self.has_tooltip = Some(has_tooltip);
self
}
pub fn height_request(mut self, height_request: i32) -> Self {
self.height_request = Some(height_request);
self
}
pub fn hexpand(mut self, hexpand: bool) -> Self {
self.hexpand = Some(hexpand);
self
}
pub fn hexpand_set(mut self, hexpand_set: bool) -> Self {
self.hexpand_set = Some(hexpand_set);
self
}
pub fn layout_manager<P: IsA<LayoutManager>>(mut self, layout_manager: &P) -> Self {
self.layout_manager = Some(layout_manager.clone().upcast());
self
}
pub fn margin_bottom(mut self, margin_bottom: i32) -> Self {
self.margin_bottom = Some(margin_bottom);
self
}
pub fn margin_end(mut self, margin_end: i32) -> Self {
self.margin_end = Some(margin_end);
self
}
pub fn margin_start(mut self, margin_start: i32) -> Self {
self.margin_start = Some(margin_start);
self
}
pub fn margin_top(mut self, margin_top: i32) -> Self {
self.margin_top = Some(margin_top);
self
}
pub fn name(mut self, name: &str) -> Self {
self.name = Some(name.to_string());
self
}
pub fn opacity(mut self, opacity: f64) -> Self {
self.opacity = Some(opacity);
self
}
pub fn overflow(mut self, overflow: Overflow) -> Self {
self.overflow = Some(overflow);
self
}
pub fn receives_default(mut self, receives_default: bool) -> Self {
self.receives_default = Some(receives_default);
self
}
pub fn sensitive(mut self, sensitive: bool) -> Self {
self.sensitive = Some(sensitive);
self
}
pub fn tooltip_markup(mut self, tooltip_markup: &str) -> Self {
self.tooltip_markup = Some(tooltip_markup.to_string());
self
}
pub fn tooltip_text(mut self, tooltip_text: &str) -> Self {
self.tooltip_text = Some(tooltip_text.to_string());
self
}
pub fn valign(mut self, valign: Align) -> Self {
self.valign = Some(valign);
self
}
pub fn vexpand(mut self, vexpand: bool) -> Self {
self.vexpand = Some(vexpand);
self
}
pub fn vexpand_set(mut self, vexpand_set: bool) -> Self {
self.vexpand_set = Some(vexpand_set);
self
}
pub fn visible(mut self, visible: bool) -> Self {
self.visible = Some(visible);
self
}
pub fn width_request(mut self, width_request: i32) -> Self {
self.width_request = Some(width_request);
self
}
pub fn accessible_role(mut self, accessible_role: AccessibleRole) -> Self {
self.accessible_role = Some(accessible_role);
self
}
}
impl fmt::Display for Picture {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("Picture")
}
}
| 31.964885 | 139 | 0.570043 |
e802bc1f44cdd4500f97a88956235f18f3275722 | 703 | use language::operations::{Operation, ParamInfo};
pub struct MultiplayerIsServerOp;
const DOC : &str = "Checks that the code is running on multiplayer server. Operation will fail on client machines or in singleplayer mode.";
pub const OP_CODE: u32 = 417;
pub const IDENT: &str = "multiplayer_is_server";
impl Operation for MultiplayerIsServerOp {
fn op_code(&self) -> u32 {
OP_CODE
}
fn documentation(&self) -> &'static str {
DOC
}
fn identifier(&self) -> &'static str {
IDENT
}
fn param_info(&self) -> ParamInfo {
ParamInfo {
num_required: 0,
num_optional: 0,
param_docs: vec![],
}
}
}
| 21.96875 | 140 | 0.611664 |
8fa20cdf4b7a1638be90d539582b6be86f04a006 | 1,248 | // run-pass
#![allow(dead_code)]
#![allow(unused_mut)]
#![allow(unused_imports)]
// Test how resolving a projection interacts with inference. In this
// case, we were eagerly unifying the type variable for the iterator
// type with `I` from the where clause, ignoring the in-scope `impl`
// for `ByRef`. The right answer was to consider the result ambiguous
// until more type information was available.
#![feature(lang_items)]
#![no_implicit_prelude]
use std::marker::Sized;
use std::option::Option::{None, Some, self};
trait Iterator {
type Item;
fn next(&mut self) -> Option<Self::Item>;
}
trait IteratorExt: Iterator + Sized {
fn by_ref(&mut self) -> ByRef<Self> {
ByRef(self)
}
}
impl<I> IteratorExt for I where I: Iterator {}
struct ByRef<'a, I: 'a + Iterator>(&'a mut I);
impl<'a, I: Iterator> Iterator for ByRef<'a, I> {
type Item = I::Item;
fn next(&mut self) -> Option< <I as Iterator>::Item > {
self.0.next()
}
}
fn is_iterator_of<A, I: Iterator<Item=A>>(_: &I) {}
fn test<A, I: Iterator<Item=A>>(mut it: I) {
is_iterator_of::<A, _>(&it.by_ref());
}
fn test2<A, I1: Iterator<Item=A>, I2: Iterator<Item=I1::Item>>(mut it: I2) {
is_iterator_of::<A, _>(&it)
}
fn main() { }
| 24 | 76 | 0.642628 |
50b3414478c5732637c7408cfd67b910594cd559 | 32,836 | #![macro_use]
//! Async UART
//!
//! Async UART is provided in two flavors - this one and also [crate::buffered_uarte::BufferedUarte].
//! The [Uarte] here is useful for those use-cases where reading the UARTE peripheral is
//! exclusively awaited on. If the [Uarte] is required to be awaited on with some other future,
//! for example when using `futures_util::future::select`, then you should consider
//! [crate::buffered_uarte::BufferedUarte] so that reads may continue while processing these
//! other futures. If you do not then you may lose data between reads.
//!
//! An advantage of the [Uarte] has over [crate::buffered_uarte::BufferedUarte] is that less
//! memory may be used given that buffers are passed in directly to its read and write
//! methods.
use core::marker::PhantomData;
use core::sync::atomic::{compiler_fence, Ordering};
use core::task::Poll;
use embassy_hal_common::drop::OnDrop;
use embassy_hal_common::unborrow;
use futures::future::poll_fn;
// Re-export SVD variants to allow user to directly set values.
pub use pac::uarte0::{baudrate::BAUDRATE_A as Baudrate, config::PARITY_A as Parity};
use crate::chip::{EASY_DMA_SIZE, FORCE_COPY_BUFFER_SIZE};
use crate::gpio::sealed::Pin as _;
use crate::gpio::{self, AnyPin, Pin as GpioPin, PselBits};
use crate::interrupt::{Interrupt, InterruptExt};
use crate::ppi::{AnyConfigurableChannel, ConfigurableChannel, Event, Ppi, Task};
use crate::timer::{Frequency, Instance as TimerInstance, Timer};
use crate::util::slice_in_ram_or;
use crate::{pac, Unborrow};
#[non_exhaustive]
pub struct Config {
pub parity: Parity,
pub baudrate: Baudrate,
}
impl Default for Config {
fn default() -> Self {
Self {
parity: Parity::EXCLUDED,
baudrate: Baudrate::BAUD115200,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "defmt", derive(defmt::Format))]
#[non_exhaustive]
pub enum Error {
BufferTooLong,
BufferZeroLength,
DMABufferNotInDataMemory,
// TODO: add other error variants.
}
/// Interface to the UARTE peripheral using EasyDMA to offload the transmission and reception workload.
///
/// For more details about EasyDMA, consult the module documentation.
pub struct Uarte<'d, T: Instance> {
phantom: PhantomData<&'d mut T>,
tx: UarteTx<'d, T>,
rx: UarteRx<'d, T>,
}
/// Transmitter interface to the UARTE peripheral obtained
/// via [Uarte]::split.
pub struct UarteTx<'d, T: Instance> {
phantom: PhantomData<&'d mut T>,
}
/// Receiver interface to the UARTE peripheral obtained
/// via [Uarte]::split.
pub struct UarteRx<'d, T: Instance> {
phantom: PhantomData<&'d mut T>,
}
impl<'d, T: Instance> Uarte<'d, T> {
/// Create a new UARTE without hardware flow control
pub fn new(
uarte: impl Unborrow<Target = T> + 'd,
irq: impl Unborrow<Target = T::Interrupt> + 'd,
rxd: impl Unborrow<Target = impl GpioPin> + 'd,
txd: impl Unborrow<Target = impl GpioPin> + 'd,
config: Config,
) -> Self {
unborrow!(rxd, txd);
Self::new_inner(uarte, irq, rxd.degrade(), txd.degrade(), None, None, config)
}
/// Create a new UARTE with hardware flow control (RTS/CTS)
pub fn new_with_rtscts(
uarte: impl Unborrow<Target = T> + 'd,
irq: impl Unborrow<Target = T::Interrupt> + 'd,
rxd: impl Unborrow<Target = impl GpioPin> + 'd,
txd: impl Unborrow<Target = impl GpioPin> + 'd,
cts: impl Unborrow<Target = impl GpioPin> + 'd,
rts: impl Unborrow<Target = impl GpioPin> + 'd,
config: Config,
) -> Self {
unborrow!(rxd, txd, cts, rts);
Self::new_inner(
uarte,
irq,
rxd.degrade(),
txd.degrade(),
Some(cts.degrade()),
Some(rts.degrade()),
config,
)
}
fn new_inner(
_uarte: impl Unborrow<Target = T> + 'd,
irq: impl Unborrow<Target = T::Interrupt> + 'd,
rxd: AnyPin,
txd: AnyPin,
cts: Option<AnyPin>,
rts: Option<AnyPin>,
config: Config,
) -> Self {
unborrow!(irq);
let r = T::regs();
rxd.conf().write(|w| w.input().connect().drive().h0h1());
r.psel.rxd.write(|w| unsafe { w.bits(rxd.psel_bits()) });
txd.set_high();
txd.conf().write(|w| w.dir().output().drive().h0h1());
r.psel.txd.write(|w| unsafe { w.bits(txd.psel_bits()) });
if let Some(pin) = &cts {
pin.conf().write(|w| w.input().connect().drive().h0h1());
}
r.psel.cts.write(|w| unsafe { w.bits(cts.psel_bits()) });
if let Some(pin) = &rts {
pin.set_high();
pin.conf().write(|w| w.dir().output().drive().h0h1());
}
r.psel.rts.write(|w| unsafe { w.bits(rts.psel_bits()) });
// Configure
let hardware_flow_control = match (rts.is_some(), cts.is_some()) {
(false, false) => false,
(true, true) => true,
_ => panic!("RTS and CTS pins must be either both set or none set."),
};
r.config.write(|w| {
w.hwfc().bit(hardware_flow_control);
w.parity().variant(config.parity);
w
});
r.baudrate.write(|w| w.baudrate().variant(config.baudrate));
// Disable all interrupts
r.intenclr.write(|w| unsafe { w.bits(0xFFFF_FFFF) });
// Reset rxstarted, txstarted. These are used by drop to know whether a transfer was
// stopped midway or not.
r.events_rxstarted.reset();
r.events_txstarted.reset();
irq.set_handler(Self::on_interrupt);
irq.unpend();
irq.enable();
// Enable
apply_workaround_for_enable_anomaly(&r);
r.enable.write(|w| w.enable().enabled());
let s = T::state();
s.tx_rx_refcount.store(2, Ordering::Relaxed);
Self {
phantom: PhantomData,
tx: UarteTx { phantom: PhantomData },
rx: UarteRx { phantom: PhantomData },
}
}
/// Split the Uarte into a transmitter and receiver, which is
/// particuarly useful when having two tasks correlating to
/// transmitting and receiving.
pub fn split(self) -> (UarteTx<'d, T>, UarteRx<'d, T>) {
(self.tx, self.rx)
}
/// Return the endtx event for use with PPI
pub fn event_endtx(&self) -> Event {
let r = T::regs();
Event::from_reg(&r.events_endtx)
}
fn on_interrupt(_: *mut ()) {
let r = T::regs();
let s = T::state();
if r.events_endrx.read().bits() != 0 {
s.endrx_waker.wake();
r.intenclr.write(|w| w.endrx().clear());
}
if r.events_endtx.read().bits() != 0 {
s.endtx_waker.wake();
r.intenclr.write(|w| w.endtx().clear());
}
}
pub async fn read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
self.rx.read(buffer).await
}
pub async fn write(&mut self, buffer: &[u8]) -> Result<(), Error> {
self.tx.write(buffer).await
}
/// Same as [`write`](Uarte::write) but will fail instead of copying data into RAM. Consult the module level documentation to learn more.
pub async fn write_from_ram(&mut self, buffer: &[u8]) -> Result<(), Error> {
self.tx.write_from_ram(buffer).await
}
pub fn blocking_read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
self.rx.blocking_read(buffer)
}
pub fn blocking_write(&mut self, buffer: &[u8]) -> Result<(), Error> {
self.tx.blocking_write(buffer)
}
/// Same as [`blocking_write`](Uarte::blocking_write) but will fail instead of copying data into RAM. Consult the module level documentation to learn more.
pub fn blocking_write_from_ram(&mut self, buffer: &[u8]) -> Result<(), Error> {
self.tx.blocking_write_from_ram(buffer)
}
}
impl<'d, T: Instance> UarteTx<'d, T> {
pub async fn write(&mut self, buffer: &[u8]) -> Result<(), Error> {
match self.write_from_ram(buffer).await {
Ok(_) => Ok(()),
Err(Error::DMABufferNotInDataMemory) => {
trace!("Copying UARTE tx buffer into RAM for DMA");
let ram_buf = &mut [0; FORCE_COPY_BUFFER_SIZE][..buffer.len()];
ram_buf.copy_from_slice(buffer);
self.write_from_ram(&ram_buf).await
}
Err(error) => Err(error),
}
}
pub async fn write_from_ram(&mut self, buffer: &[u8]) -> Result<(), Error> {
slice_in_ram_or(buffer, Error::DMABufferNotInDataMemory)?;
if buffer.len() == 0 {
return Err(Error::BufferZeroLength);
}
if buffer.len() > EASY_DMA_SIZE {
return Err(Error::BufferTooLong);
}
let ptr = buffer.as_ptr();
let len = buffer.len();
let r = T::regs();
let s = T::state();
let drop = OnDrop::new(move || {
trace!("write drop: stopping");
r.intenclr.write(|w| w.endtx().clear());
r.events_txstopped.reset();
r.tasks_stoptx.write(|w| unsafe { w.bits(1) });
// TX is stopped almost instantly, spinning is fine.
while r.events_endtx.read().bits() == 0 {}
trace!("write drop: stopped");
});
r.txd.ptr.write(|w| unsafe { w.ptr().bits(ptr as u32) });
r.txd.maxcnt.write(|w| unsafe { w.maxcnt().bits(len as _) });
r.events_endtx.reset();
r.intenset.write(|w| w.endtx().set());
compiler_fence(Ordering::SeqCst);
trace!("starttx");
r.tasks_starttx.write(|w| unsafe { w.bits(1) });
poll_fn(|cx| {
s.endtx_waker.register(cx.waker());
if r.events_endtx.read().bits() != 0 {
return Poll::Ready(());
}
Poll::Pending
})
.await;
compiler_fence(Ordering::SeqCst);
r.events_txstarted.reset();
drop.defuse();
Ok(())
}
pub fn blocking_write(&mut self, buffer: &[u8]) -> Result<(), Error> {
match self.blocking_write_from_ram(buffer) {
Ok(_) => Ok(()),
Err(Error::DMABufferNotInDataMemory) => {
trace!("Copying UARTE tx buffer into RAM for DMA");
let ram_buf = &mut [0; FORCE_COPY_BUFFER_SIZE][..buffer.len()];
ram_buf.copy_from_slice(buffer);
self.blocking_write_from_ram(&ram_buf)
}
Err(error) => Err(error),
}
}
pub fn blocking_write_from_ram(&mut self, buffer: &[u8]) -> Result<(), Error> {
slice_in_ram_or(buffer, Error::DMABufferNotInDataMemory)?;
if buffer.len() == 0 {
return Err(Error::BufferZeroLength);
}
if buffer.len() > EASY_DMA_SIZE {
return Err(Error::BufferTooLong);
}
let ptr = buffer.as_ptr();
let len = buffer.len();
let r = T::regs();
r.txd.ptr.write(|w| unsafe { w.ptr().bits(ptr as u32) });
r.txd.maxcnt.write(|w| unsafe { w.maxcnt().bits(len as _) });
r.events_endtx.reset();
r.intenclr.write(|w| w.endtx().clear());
compiler_fence(Ordering::SeqCst);
trace!("starttx");
r.tasks_starttx.write(|w| unsafe { w.bits(1) });
while r.events_endtx.read().bits() == 0 {}
compiler_fence(Ordering::SeqCst);
r.events_txstarted.reset();
Ok(())
}
}
impl<'a, T: Instance> Drop for UarteTx<'a, T> {
fn drop(&mut self) {
trace!("uarte tx drop");
let r = T::regs();
let did_stoptx = r.events_txstarted.read().bits() != 0;
trace!("did_stoptx {}", did_stoptx);
// Wait for txstopped, if needed.
while did_stoptx && r.events_txstopped.read().bits() == 0 {}
let s = T::state();
drop_tx_rx(&r, &s);
}
}
impl<'d, T: Instance> UarteRx<'d, T> {
pub async fn read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
if buffer.len() == 0 {
return Err(Error::BufferZeroLength);
}
if buffer.len() > EASY_DMA_SIZE {
return Err(Error::BufferTooLong);
}
let ptr = buffer.as_ptr();
let len = buffer.len();
let r = T::regs();
let s = T::state();
let drop = OnDrop::new(move || {
trace!("read drop: stopping");
r.intenclr.write(|w| w.endrx().clear());
r.events_rxto.reset();
r.tasks_stoprx.write(|w| unsafe { w.bits(1) });
while r.events_endrx.read().bits() == 0 {}
trace!("read drop: stopped");
});
r.rxd.ptr.write(|w| unsafe { w.ptr().bits(ptr as u32) });
r.rxd.maxcnt.write(|w| unsafe { w.maxcnt().bits(len as _) });
r.events_endrx.reset();
r.intenset.write(|w| w.endrx().set());
compiler_fence(Ordering::SeqCst);
trace!("startrx");
r.tasks_startrx.write(|w| unsafe { w.bits(1) });
poll_fn(|cx| {
s.endrx_waker.register(cx.waker());
if r.events_endrx.read().bits() != 0 {
return Poll::Ready(());
}
Poll::Pending
})
.await;
compiler_fence(Ordering::SeqCst);
r.events_rxstarted.reset();
drop.defuse();
Ok(())
}
pub fn blocking_read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
if buffer.len() == 0 {
return Err(Error::BufferZeroLength);
}
if buffer.len() > EASY_DMA_SIZE {
return Err(Error::BufferTooLong);
}
let ptr = buffer.as_ptr();
let len = buffer.len();
let r = T::regs();
r.rxd.ptr.write(|w| unsafe { w.ptr().bits(ptr as u32) });
r.rxd.maxcnt.write(|w| unsafe { w.maxcnt().bits(len as _) });
r.events_endrx.reset();
r.intenclr.write(|w| w.endrx().clear());
compiler_fence(Ordering::SeqCst);
trace!("startrx");
r.tasks_startrx.write(|w| unsafe { w.bits(1) });
while r.events_endrx.read().bits() == 0 {}
compiler_fence(Ordering::SeqCst);
r.events_rxstarted.reset();
Ok(())
}
}
impl<'a, T: Instance> Drop for UarteRx<'a, T> {
fn drop(&mut self) {
trace!("uarte rx drop");
let r = T::regs();
let did_stoprx = r.events_rxstarted.read().bits() != 0;
trace!("did_stoprx {}", did_stoprx);
// Wait for rxto, if needed.
while did_stoprx && r.events_rxto.read().bits() == 0 {}
let s = T::state();
drop_tx_rx(&r, &s);
}
}
#[cfg(not(any(feature = "_nrf9160", feature = "nrf5340")))]
pub(in crate) fn apply_workaround_for_enable_anomaly(_r: &crate::pac::uarte0::RegisterBlock) {
// Do nothing
}
#[cfg(any(feature = "_nrf9160", feature = "nrf5340"))]
pub(in crate) fn apply_workaround_for_enable_anomaly(r: &crate::pac::uarte0::RegisterBlock) {
use core::ops::Deref;
// Apply workaround for anomalies:
// - nRF9160 - anomaly 23
// - nRF5340 - anomaly 44
let rxenable_reg: *const u32 = ((r.deref() as *const _ as usize) + 0x564) as *const u32;
let txenable_reg: *const u32 = ((r.deref() as *const _ as usize) + 0x568) as *const u32;
// NB Safety: This is taken from Nordic's driver -
// https://github.com/NordicSemiconductor/nrfx/blob/master/drivers/src/nrfx_uarte.c#L197
if unsafe { core::ptr::read_volatile(txenable_reg) } == 1 {
r.tasks_stoptx.write(|w| unsafe { w.bits(1) });
}
// NB Safety: This is taken from Nordic's driver -
// https://github.com/NordicSemiconductor/nrfx/blob/master/drivers/src/nrfx_uarte.c#L197
if unsafe { core::ptr::read_volatile(rxenable_reg) } == 1 {
r.enable.write(|w| w.enable().enabled());
r.tasks_stoprx.write(|w| unsafe { w.bits(1) });
let mut workaround_succeded = false;
// The UARTE is able to receive up to four bytes after the STOPRX task has been triggered.
// On lowest supported baud rate (1200 baud), with parity bit and two stop bits configured
// (resulting in 12 bits per data byte sent), this may take up to 40 ms.
for _ in 0..40000 {
// NB Safety: This is taken from Nordic's driver -
// https://github.com/NordicSemiconductor/nrfx/blob/master/drivers/src/nrfx_uarte.c#L197
if unsafe { core::ptr::read_volatile(rxenable_reg) } == 0 {
workaround_succeded = true;
break;
} else {
// Need to sleep for 1us here
}
}
if !workaround_succeded {
panic!("Failed to apply workaround for UART");
}
let errors = r.errorsrc.read().bits();
// NB Safety: safe to write back the bits we just read to clear them
r.errorsrc.write(|w| unsafe { w.bits(errors) });
r.enable.write(|w| w.enable().disabled());
}
}
pub(in crate) fn drop_tx_rx(r: &pac::uarte0::RegisterBlock, s: &sealed::State) {
if s.tx_rx_refcount.fetch_sub(1, Ordering::Relaxed) == 1 {
// Finally we can disable, and we do so for the peripheral
// i.e. not just rx concerns.
r.enable.write(|w| w.enable().disabled());
gpio::deconfigure_pin(r.psel.rxd.read().bits());
gpio::deconfigure_pin(r.psel.txd.read().bits());
gpio::deconfigure_pin(r.psel.rts.read().bits());
gpio::deconfigure_pin(r.psel.cts.read().bits());
trace!("uarte tx and rx drop: done");
}
}
/// Interface to an UARTE peripheral that uses an additional timer and two PPI channels,
/// allowing it to implement the ReadUntilIdle trait.
pub struct UarteWithIdle<'d, U: Instance, T: TimerInstance> {
tx: UarteTx<'d, U>,
rx: UarteRxWithIdle<'d, U, T>,
}
impl<'d, U: Instance, T: TimerInstance> UarteWithIdle<'d, U, T> {
/// Create a new UARTE without hardware flow control
pub fn new(
uarte: impl Unborrow<Target = U> + 'd,
timer: impl Unborrow<Target = T> + 'd,
ppi_ch1: impl Unborrow<Target = impl ConfigurableChannel + 'd> + 'd,
ppi_ch2: impl Unborrow<Target = impl ConfigurableChannel + 'd> + 'd,
irq: impl Unborrow<Target = U::Interrupt> + 'd,
rxd: impl Unborrow<Target = impl GpioPin> + 'd,
txd: impl Unborrow<Target = impl GpioPin> + 'd,
config: Config,
) -> Self {
unborrow!(rxd, txd);
Self::new_inner(
uarte,
timer,
ppi_ch1,
ppi_ch2,
irq,
rxd.degrade(),
txd.degrade(),
None,
None,
config,
)
}
/// Create a new UARTE with hardware flow control (RTS/CTS)
pub fn new_with_rtscts(
uarte: impl Unborrow<Target = U> + 'd,
timer: impl Unborrow<Target = T> + 'd,
ppi_ch1: impl Unborrow<Target = impl ConfigurableChannel + 'd> + 'd,
ppi_ch2: impl Unborrow<Target = impl ConfigurableChannel + 'd> + 'd,
irq: impl Unborrow<Target = U::Interrupt> + 'd,
rxd: impl Unborrow<Target = impl GpioPin> + 'd,
txd: impl Unborrow<Target = impl GpioPin> + 'd,
cts: impl Unborrow<Target = impl GpioPin> + 'd,
rts: impl Unborrow<Target = impl GpioPin> + 'd,
config: Config,
) -> Self {
unborrow!(rxd, txd, cts, rts);
Self::new_inner(
uarte,
timer,
ppi_ch1,
ppi_ch2,
irq,
rxd.degrade(),
txd.degrade(),
Some(cts.degrade()),
Some(rts.degrade()),
config,
)
}
fn new_inner(
uarte: impl Unborrow<Target = U> + 'd,
timer: impl Unborrow<Target = T> + 'd,
ppi_ch1: impl Unborrow<Target = impl ConfigurableChannel + 'd> + 'd,
ppi_ch2: impl Unborrow<Target = impl ConfigurableChannel + 'd> + 'd,
irq: impl Unborrow<Target = U::Interrupt> + 'd,
rxd: AnyPin,
txd: AnyPin,
cts: Option<AnyPin>,
rts: Option<AnyPin>,
config: Config,
) -> Self {
let baudrate = config.baudrate;
let (tx, rx) = Uarte::new_inner(uarte, irq, rxd, txd, cts, rts, config).split();
let mut timer = Timer::new(timer);
unborrow!(ppi_ch1, ppi_ch2);
let r = U::regs();
// BAUDRATE register values are `baudrate * 2^32 / 16000000`
// source: https://devzone.nordicsemi.com/f/nordic-q-a/391/uart-baudrate-register-values
//
// We want to stop RX if line is idle for 2 bytes worth of time
// That is 20 bits (each byte is 1 start bit + 8 data bits + 1 stop bit)
// This gives us the amount of 16M ticks for 20 bits.
let timeout = 0x8000_0000 / (baudrate as u32 / 40);
timer.set_frequency(Frequency::F16MHz);
timer.cc(0).write(timeout);
timer.cc(0).short_compare_clear();
timer.cc(0).short_compare_stop();
let mut ppi_ch1 = Ppi::new_one_to_two(
ppi_ch1.degrade(),
Event::from_reg(&r.events_rxdrdy),
timer.task_clear(),
timer.task_start(),
);
ppi_ch1.enable();
let mut ppi_ch2 = Ppi::new_one_to_one(
ppi_ch2.degrade(),
timer.cc(0).event_compare(),
Task::from_reg(&r.tasks_stoprx),
);
ppi_ch2.enable();
Self {
tx,
rx: UarteRxWithIdle {
rx,
timer,
ppi_ch1: ppi_ch1,
_ppi_ch2: ppi_ch2,
},
}
}
/// Split the Uarte into a transmitter and receiver, which is
/// particuarly useful when having two tasks correlating to
/// transmitting and receiving.
pub fn split(self) -> (UarteTx<'d, U>, UarteRxWithIdle<'d, U, T>) {
(self.tx, self.rx)
}
pub async fn read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
self.rx.read(buffer).await
}
pub async fn write(&mut self, buffer: &[u8]) -> Result<(), Error> {
self.tx.write(buffer).await
}
pub fn blocking_read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
self.rx.blocking_read(buffer)
}
pub fn blocking_write(&mut self, buffer: &[u8]) -> Result<(), Error> {
self.tx.blocking_write(buffer)
}
pub async fn read_until_idle(&mut self, buffer: &mut [u8]) -> Result<usize, Error> {
self.rx.read_until_idle(buffer).await
}
pub fn blocking_read_until_idle(&mut self, buffer: &mut [u8]) -> Result<usize, Error> {
self.rx.blocking_read_until_idle(buffer)
}
}
pub struct UarteRxWithIdle<'d, U: Instance, T: TimerInstance> {
rx: UarteRx<'d, U>,
timer: Timer<'d, T>,
ppi_ch1: Ppi<'d, AnyConfigurableChannel, 1, 2>,
_ppi_ch2: Ppi<'d, AnyConfigurableChannel, 1, 1>,
}
impl<'d, U: Instance, T: TimerInstance> UarteRxWithIdle<'d, U, T> {
pub async fn read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
self.ppi_ch1.disable();
self.rx.read(buffer).await
}
pub fn blocking_read(&mut self, buffer: &mut [u8]) -> Result<(), Error> {
self.ppi_ch1.disable();
self.rx.blocking_read(buffer)
}
pub async fn read_until_idle(&mut self, buffer: &mut [u8]) -> Result<usize, Error> {
if buffer.len() == 0 {
return Err(Error::BufferZeroLength);
}
if buffer.len() > EASY_DMA_SIZE {
return Err(Error::BufferTooLong);
}
let ptr = buffer.as_ptr();
let len = buffer.len();
let r = U::regs();
let s = U::state();
self.ppi_ch1.enable();
let drop = OnDrop::new(|| {
self.timer.stop();
r.intenclr.write(|w| w.endrx().clear());
r.events_rxto.reset();
r.tasks_stoprx.write(|w| unsafe { w.bits(1) });
while r.events_endrx.read().bits() == 0 {}
});
r.rxd.ptr.write(|w| unsafe { w.ptr().bits(ptr as u32) });
r.rxd.maxcnt.write(|w| unsafe { w.maxcnt().bits(len as _) });
r.events_endrx.reset();
r.intenset.write(|w| w.endrx().set());
compiler_fence(Ordering::SeqCst);
r.tasks_startrx.write(|w| unsafe { w.bits(1) });
poll_fn(|cx| {
s.endrx_waker.register(cx.waker());
if r.events_endrx.read().bits() != 0 {
return Poll::Ready(());
}
Poll::Pending
})
.await;
compiler_fence(Ordering::SeqCst);
let n = r.rxd.amount.read().amount().bits() as usize;
self.timer.stop();
r.events_rxstarted.reset();
drop.defuse();
Ok(n)
}
pub fn blocking_read_until_idle(&mut self, buffer: &mut [u8]) -> Result<usize, Error> {
if buffer.len() == 0 {
return Err(Error::BufferZeroLength);
}
if buffer.len() > EASY_DMA_SIZE {
return Err(Error::BufferTooLong);
}
let ptr = buffer.as_ptr();
let len = buffer.len();
let r = U::regs();
self.ppi_ch1.enable();
r.rxd.ptr.write(|w| unsafe { w.ptr().bits(ptr as u32) });
r.rxd.maxcnt.write(|w| unsafe { w.maxcnt().bits(len as _) });
r.events_endrx.reset();
r.intenclr.write(|w| w.endrx().clear());
compiler_fence(Ordering::SeqCst);
r.tasks_startrx.write(|w| unsafe { w.bits(1) });
while r.events_endrx.read().bits() == 0 {}
compiler_fence(Ordering::SeqCst);
let n = r.rxd.amount.read().amount().bits() as usize;
self.timer.stop();
r.events_rxstarted.reset();
Ok(n)
}
}
pub(crate) mod sealed {
use core::sync::atomic::AtomicU8;
use embassy::waitqueue::AtomicWaker;
use super::*;
pub struct State {
pub endrx_waker: AtomicWaker,
pub endtx_waker: AtomicWaker,
pub tx_rx_refcount: AtomicU8,
}
impl State {
pub const fn new() -> Self {
Self {
endrx_waker: AtomicWaker::new(),
endtx_waker: AtomicWaker::new(),
tx_rx_refcount: AtomicU8::new(0),
}
}
}
pub trait Instance {
fn regs() -> &'static pac::uarte0::RegisterBlock;
fn state() -> &'static State;
}
}
pub trait Instance: Unborrow<Target = Self> + sealed::Instance + 'static + Send {
type Interrupt: Interrupt;
}
macro_rules! impl_uarte {
($type:ident, $pac_type:ident, $irq:ident) => {
impl crate::uarte::sealed::Instance for peripherals::$type {
fn regs() -> &'static pac::uarte0::RegisterBlock {
unsafe { &*pac::$pac_type::ptr() }
}
fn state() -> &'static crate::uarte::sealed::State {
static STATE: crate::uarte::sealed::State = crate::uarte::sealed::State::new();
&STATE
}
}
impl crate::uarte::Instance for peripherals::$type {
type Interrupt = crate::interrupt::$irq;
}
};
}
// ====================
mod eh02 {
use super::*;
impl<'d, T: Instance> embedded_hal_02::blocking::serial::Write<u8> for Uarte<'d, T> {
type Error = Error;
fn bwrite_all(&mut self, buffer: &[u8]) -> Result<(), Self::Error> {
self.blocking_write(buffer)
}
fn bflush(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<'d, T: Instance> embedded_hal_02::blocking::serial::Write<u8> for UarteTx<'d, T> {
type Error = Error;
fn bwrite_all(&mut self, buffer: &[u8]) -> Result<(), Self::Error> {
self.blocking_write(buffer)
}
fn bflush(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<'d, U: Instance, T: TimerInstance> embedded_hal_02::blocking::serial::Write<u8> for UarteWithIdle<'d, U, T> {
type Error = Error;
fn bwrite_all(&mut self, buffer: &[u8]) -> Result<(), Self::Error> {
self.blocking_write(buffer)
}
fn bflush(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
}
#[cfg(feature = "unstable-traits")]
mod eh1 {
use super::*;
impl embedded_hal_1::serial::Error for Error {
fn kind(&self) -> embedded_hal_1::serial::ErrorKind {
match *self {
Self::BufferTooLong => embedded_hal_1::serial::ErrorKind::Other,
Self::BufferZeroLength => embedded_hal_1::serial::ErrorKind::Other,
Self::DMABufferNotInDataMemory => embedded_hal_1::serial::ErrorKind::Other,
}
}
}
// =====================
impl<'d, T: Instance> embedded_hal_1::serial::ErrorType for Uarte<'d, T> {
type Error = Error;
}
impl<'d, T: Instance> embedded_hal_1::serial::blocking::Write for Uarte<'d, T> {
fn write(&mut self, buffer: &[u8]) -> Result<(), Self::Error> {
self.blocking_write(buffer)
}
fn flush(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<'d, T: Instance> embedded_hal_1::serial::ErrorType for UarteTx<'d, T> {
type Error = Error;
}
impl<'d, T: Instance> embedded_hal_1::serial::blocking::Write for UarteTx<'d, T> {
fn write(&mut self, buffer: &[u8]) -> Result<(), Self::Error> {
self.blocking_write(buffer)
}
fn flush(&mut self) -> Result<(), Self::Error> {
Ok(())
}
}
impl<'d, T: Instance> embedded_hal_1::serial::ErrorType for UarteRx<'d, T> {
type Error = Error;
}
impl<'d, U: Instance, T: TimerInstance> embedded_hal_1::serial::ErrorType for UarteWithIdle<'d, U, T> {
type Error = Error;
}
}
cfg_if::cfg_if! {
if #[cfg(all(feature = "unstable-traits", feature = "nightly", feature = "_todo_embedded_hal_serial"))] {
use core::future::Future;
impl<'d, T: Instance> embedded_hal_async::serial::Read for Uarte<'d, T> {
type ReadFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn read<'a>(&'a mut self, buffer: &'a mut [u8]) -> Self::ReadFuture<'a> {
self.read(buffer)
}
}
impl<'d, T: Instance> embedded_hal_async::serial::Write for Uarte<'d, T> {
type WriteFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn write<'a>(&'a mut self, buffer: &'a [u8]) -> Self::WriteFuture<'a> {
self.write(buffer)
}
type FlushFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn flush<'a>(&'a mut self) -> Self::FlushFuture<'a> {
async move { Ok(()) }
}
}
impl<'d, T: Instance> embedded_hal_async::serial::Write for UarteTx<'d, T> {
type WriteFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn write<'a>(&'a mut self, buffer: &'a [u8]) -> Self::WriteFuture<'a> {
self.write(buffer)
}
type FlushFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn flush<'a>(&'a mut self) -> Self::FlushFuture<'a> {
async move { Ok(()) }
}
}
impl<'d, T: Instance> embedded_hal_async::serial::Read for UarteRx<'d, T> {
type ReadFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn read<'a>(&'a mut self, buffer: &'a mut [u8]) -> Self::ReadFuture<'a> {
self.read(buffer)
}
}
impl<'d, U: Instance, T: TimerInstance> embedded_hal_async::serial::Read
for UarteWithIdle<'d, U, T>
{
type ReadFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn read<'a>(&'a mut self, buffer: &'a mut [u8]) -> Self::ReadFuture<'a> {
self.read(buffer)
}
}
impl<'d, U: Instance, T: TimerInstance> embedded_hal_async::serial::Write
for UarteWithIdle<'d, U, T>
{
type WriteFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn write<'a>(&'a mut self, buffer: &'a [u8]) -> Self::WriteFuture<'a> {
self.write(buffer)
}
type FlushFuture<'a> = impl Future<Output = Result<(), Self::Error>> + 'a where Self: 'a;
fn flush<'a>(&'a mut self) -> Self::FlushFuture<'a> {
async move { Ok(()) }
}
}
}
}
| 31.879612 | 159 | 0.551986 |
d5000ac9c18669a3940bbea0a4629eaa6645d614 | 56,518 | // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// The Rust HIR.
pub use self::BindingMode::*;
pub use self::BinOp_::*;
pub use self::BlockCheckMode::*;
pub use self::CaptureClause::*;
pub use self::Decl_::*;
pub use self::Expr_::*;
pub use self::FunctionRetTy::*;
pub use self::ForeignItem_::*;
pub use self::Item_::*;
pub use self::Mutability::*;
pub use self::PrimTy::*;
pub use self::Stmt_::*;
pub use self::Ty_::*;
pub use self::TyParamBound::*;
pub use self::UnOp::*;
pub use self::UnsafeSource::*;
pub use self::Visibility::{Public, Inherited};
pub use self::PathParameters::*;
use hir::def::Def;
use hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX};
use util::nodemap::{NodeMap, FxHashSet};
use syntax_pos::{Span, DUMMY_SP};
use syntax::codemap::{self, Spanned};
use syntax::abi::Abi;
use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::ext::hygiene::SyntaxContext;
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax::tokenstream::TokenStream;
use syntax::util::ThinVec;
use rustc_data_structures::indexed_vec;
use std::collections::BTreeMap;
use std::fmt;
/// HIR doesn't commit to a concrete storage type and have its own alias for a vector.
/// It can be `Vec`, `P<[T]>` or potentially `Box<[T]>`, or some other container with similar
/// behavior. Unlike AST, HIR is mostly a static structure, so we can use an owned slice instead
/// of `Vec` to avoid keeping extra capacity.
pub type HirVec<T> = P<[T]>;
macro_rules! hir_vec {
($elem:expr; $n:expr) => (
$crate::hir::HirVec::from(vec![$elem; $n])
);
($($x:expr),*) => (
$crate::hir::HirVec::from(vec![$($x),*])
);
($($x:expr,)*) => (hir_vec![$($x),*])
}
pub mod check_attr;
pub mod def;
pub mod def_id;
pub mod intravisit;
pub mod itemlikevisit;
pub mod lowering;
pub mod map;
pub mod pat_util;
pub mod print;
pub mod svh;
/// A HirId uniquely identifies a node in the HIR of then current crate. It is
/// composed of the `owner`, which is the DefIndex of the directly enclosing
/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e. the closest "item-like"),
/// and the `local_id` which is unique within the given owner.
///
/// This two-level structure makes for more stable values: One can move an item
/// around within the source code, or add or remove stuff before it, without
/// the local_id part of the HirId changing, which is a very useful property
/// incremental compilation where we have to persist things through changes to
/// the code base.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug,
RustcEncodable, RustcDecodable)]
pub struct HirId {
pub owner: DefIndex,
pub local_id: ItemLocalId,
}
/// An `ItemLocalId` uniquely identifies something within a given "item-like",
/// that is within a hir::Item, hir::TraitItem, or hir::ImplItem. There is no
/// guarantee that the numerical value of a given `ItemLocalId` corresponds to
/// the node's position within the owning item in any way, but there is a
/// guarantee that the `LocalItemId`s within an owner occupy a dense range of
/// integers starting at zero, so a mapping that maps all or most nodes within
/// an "item-like" to something else can be implement by a `Vec` instead of a
/// tree or hash map.
#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug,
RustcEncodable, RustcDecodable)]
pub struct ItemLocalId(pub u32);
impl ItemLocalId {
pub fn as_usize(&self) -> usize {
self.0 as usize
}
}
impl indexed_vec::Idx for ItemLocalId {
fn new(idx: usize) -> Self {
debug_assert!((idx as u32) as usize == idx);
ItemLocalId(idx as u32)
}
fn index(self) -> usize {
self.0 as usize
}
}
/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX
pub const CRATE_HIR_ID: HirId = HirId {
owner: CRATE_DEF_INDEX,
local_id: ItemLocalId(0)
};
pub const DUMMY_HIR_ID: HirId = HirId {
owner: CRATE_DEF_INDEX,
local_id: ItemLocalId(!0)
};
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
pub span: Span,
/// Either "'a", referring to a named lifetime definition,
/// or "" (aka keywords::Invalid), for elision placeholders.
///
/// HIR lowering inserts these placeholders in type paths that
/// refer to type definitions needing lifetime parameters,
/// `&T` and `&mut T`, and trait objects without `... + 'a`.
pub name: Name,
}
impl fmt::Debug for Lifetime {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"lifetime({}: {})",
self.id,
print::to_string(print::NO_ANN, |s| s.print_lifetime(self)))
}
}
impl Lifetime {
pub fn is_elided(&self) -> bool {
self.name == keywords::Invalid.name()
}
pub fn is_static(&self) -> bool {
self.name == keywords::StaticLifetime.name()
}
}
/// A lifetime definition, eg `'a: 'b+'c+'d`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct LifetimeDef {
pub lifetime: Lifetime,
pub bounds: HirVec<Lifetime>,
pub pure_wrt_drop: bool,
}
/// A "Path" is essentially Rust's notion of a name; for instance:
/// std::cmp::PartialEq . It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Path {
pub span: Span,
/// The definition that the path resolved to.
pub def: Def,
/// The segments in the path: the things separated by `::`.
pub segments: HirVec<PathSegment>,
}
impl Path {
pub fn is_global(&self) -> bool {
!self.segments.is_empty() && self.segments[0].name == keywords::CrateRoot.name()
}
}
impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "path({})",
print::to_string(print::NO_ANN, |s| s.print_path(self, false)))
}
}
/// A segment of a path: an identifier, an optional lifetime, and a set of
/// types.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
pub name: Name,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
/// this is more than just simple syntactic sugar; the use of
/// parens affects the region binding rules, so we preserve the
/// distinction.
pub parameters: PathParameters,
}
impl PathSegment {
/// Convert an identifier to the corresponding segment.
pub fn from_name(name: Name) -> PathSegment {
PathSegment {
name: name,
parameters: PathParameters::none()
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum PathParameters {
/// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
AngleBracketedParameters(AngleBracketedParameterData),
/// The `(A,B)` and `C` in `Foo(A,B) -> C`
ParenthesizedParameters(ParenthesizedParameterData),
}
impl PathParameters {
pub fn none() -> PathParameters {
AngleBracketedParameters(AngleBracketedParameterData {
lifetimes: HirVec::new(),
types: HirVec::new(),
infer_types: true,
bindings: HirVec::new(),
})
}
/// Returns the types that the user wrote. Note that these do not necessarily map to the type
/// parameters in the parenthesized case.
pub fn types(&self) -> HirVec<&P<Ty>> {
match *self {
AngleBracketedParameters(ref data) => {
data.types.iter().collect()
}
ParenthesizedParameters(ref data) => {
data.inputs
.iter()
.chain(data.output.iter())
.collect()
}
}
}
pub fn lifetimes(&self) -> HirVec<&Lifetime> {
match *self {
AngleBracketedParameters(ref data) => {
data.lifetimes.iter().collect()
}
ParenthesizedParameters(_) => {
HirVec::new()
}
}
}
pub fn bindings(&self) -> HirVec<&TypeBinding> {
match *self {
AngleBracketedParameters(ref data) => {
data.bindings.iter().collect()
}
ParenthesizedParameters(_) => {
HirVec::new()
}
}
}
}
/// A path like `Foo<'a, T>`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct AngleBracketedParameterData {
/// The lifetime parameters for this path segment.
pub lifetimes: HirVec<Lifetime>,
/// The type parameters for this path segment, if present.
pub types: HirVec<P<Ty>>,
/// Whether to infer remaining type parameters, if any.
/// This only applies to expression and pattern paths, and
/// out of those only the segments with no type parameters
/// to begin with, e.g. `Vec::new` is `<Vec<..>>::new::<..>`.
pub infer_types: bool,
/// Bindings (equality constraints) on associated types, if present.
/// E.g., `Foo<A=Bar>`.
pub bindings: HirVec<TypeBinding>,
}
/// A path like `Foo(A,B) -> C`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ParenthesizedParameterData {
/// Overall span
pub span: Span,
/// `(A,B)`
pub inputs: HirVec<P<Ty>>,
/// `C`
pub output: Option<P<Ty>>,
}
/// The AST represents all type param bounds as types.
/// typeck::collect::compute_bounds matches these against
/// the "special" built-in traits (see middle::lang_items) and
/// detects Copy, Send and Sync.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TyParamBound {
TraitTyParamBound(PolyTraitRef, TraitBoundModifier),
RegionTyParamBound(Lifetime),
}
/// A modifier on a bound, currently this is only used for `?Sized`, where the
/// modifier is `Maybe`. Negative bounds should also be handled here.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitBoundModifier {
None,
Maybe,
}
pub type TyParamBounds = HirVec<TyParamBound>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TyParam {
pub name: Name,
pub id: NodeId,
pub bounds: TyParamBounds,
pub default: Option<P<Ty>>,
pub span: Span,
pub pure_wrt_drop: bool,
}
/// Represents lifetimes and type parameters attached to a declaration
/// of a function, enum, trait, etc.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Generics {
pub lifetimes: HirVec<LifetimeDef>,
pub ty_params: HirVec<TyParam>,
pub where_clause: WhereClause,
pub span: Span,
}
impl Generics {
pub fn empty() -> Generics {
Generics {
lifetimes: HirVec::new(),
ty_params: HirVec::new(),
where_clause: WhereClause {
id: DUMMY_NODE_ID,
predicates: HirVec::new(),
},
span: DUMMY_SP,
}
}
pub fn is_lt_parameterized(&self) -> bool {
!self.lifetimes.is_empty()
}
pub fn is_type_parameterized(&self) -> bool {
!self.ty_params.is_empty()
}
pub fn is_parameterized(&self) -> bool {
self.is_lt_parameterized() || self.is_type_parameterized()
}
}
pub enum UnsafeGeneric {
Region(LifetimeDef, &'static str),
Type(TyParam, &'static str),
}
impl UnsafeGeneric {
pub fn attr_name(&self) -> &'static str {
match *self {
UnsafeGeneric::Region(_, s) => s,
UnsafeGeneric::Type(_, s) => s,
}
}
}
impl Generics {
pub fn carries_unsafe_attr(&self) -> Option<UnsafeGeneric> {
for r in &self.lifetimes {
if r.pure_wrt_drop {
return Some(UnsafeGeneric::Region(r.clone(), "may_dangle"));
}
}
for t in &self.ty_params {
if t.pure_wrt_drop {
return Some(UnsafeGeneric::Type(t.clone(), "may_dangle"));
}
}
return None;
}
}
/// A `where` clause in a definition
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereClause {
pub id: NodeId,
pub predicates: HirVec<WherePredicate>,
}
/// A single predicate in a `where` clause
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum WherePredicate {
/// A type binding, eg `for<'c> Foo: Send+Clone+'c`
BoundPredicate(WhereBoundPredicate),
/// A lifetime predicate, e.g. `'a: 'b+'c`
RegionPredicate(WhereRegionPredicate),
/// An equality predicate (unsupported)
EqPredicate(WhereEqPredicate),
}
/// A type bound, eg `for<'c> Foo: Send+Clone+'c`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereBoundPredicate {
pub span: Span,
/// Any lifetimes from a `for` binding
pub bound_lifetimes: HirVec<LifetimeDef>,
/// The type being bounded
pub bounded_ty: P<Ty>,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub bounds: TyParamBounds,
}
/// A lifetime predicate, e.g. `'a: 'b+'c`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereRegionPredicate {
pub span: Span,
pub lifetime: Lifetime,
pub bounds: HirVec<Lifetime>,
}
/// An equality predicate (unsupported), e.g. `T=int`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct WhereEqPredicate {
pub id: NodeId,
pub span: Span,
pub lhs_ty: P<Ty>,
pub rhs_ty: P<Ty>,
}
pub type CrateConfig = HirVec<P<MetaItem>>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
pub struct Crate {
pub module: Mod,
pub attrs: HirVec<Attribute>,
pub span: Span,
pub exported_macros: HirVec<MacroDef>,
// NB: We use a BTreeMap here so that `visit_all_items` iterates
// over the ids in increasing order. In principle it should not
// matter what order we visit things in, but in *practice* it
// does, because it can affect the order in which errors are
// detected, which in turn can make compile-fail tests yield
// slightly different results.
pub items: BTreeMap<NodeId, Item>,
pub trait_items: BTreeMap<TraitItemId, TraitItem>,
pub impl_items: BTreeMap<ImplItemId, ImplItem>,
pub bodies: BTreeMap<BodyId, Body>,
pub trait_impls: BTreeMap<DefId, Vec<NodeId>>,
pub trait_default_impl: BTreeMap<DefId, NodeId>,
/// A list of the body ids written out in the order in which they
/// appear in the crate. If you're going to process all the bodies
/// in the crate, you should iterate over this list rather than the keys
/// of bodies.
pub body_ids: Vec<BodyId>,
}
impl Crate {
pub fn item(&self, id: NodeId) -> &Item {
&self.items[&id]
}
pub fn trait_item(&self, id: TraitItemId) -> &TraitItem {
&self.trait_items[&id]
}
pub fn impl_item(&self, id: ImplItemId) -> &ImplItem {
&self.impl_items[&id]
}
/// Visits all items in the crate in some determinstic (but
/// unspecified) order. If you just need to process every item,
/// but don't care about nesting, this method is the best choice.
///
/// If you do care about nesting -- usually because your algorithm
/// follows lexical scoping rules -- then you want a different
/// approach. You should override `visit_nested_item` in your
/// visitor and then call `intravisit::walk_crate` instead.
pub fn visit_all_item_likes<'hir, V>(&'hir self, visitor: &mut V)
where V: itemlikevisit::ItemLikeVisitor<'hir>
{
for (_, item) in &self.items {
visitor.visit_item(item);
}
for (_, trait_item) in &self.trait_items {
visitor.visit_trait_item(trait_item);
}
for (_, impl_item) in &self.impl_items {
visitor.visit_impl_item(impl_item);
}
}
pub fn body(&self, id: BodyId) -> &Body {
&self.bodies[&id]
}
}
/// A macro definition, in this crate or imported from another.
///
/// Not parsed directly, but created on macro import or `macro_rules!` expansion.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MacroDef {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub id: NodeId,
pub span: Span,
pub body: TokenStream,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Block {
/// Statements in a block
pub stmts: HirVec<Stmt>,
/// An expression at the end of the block
/// without a semicolon, if any
pub expr: Option<P<Expr>>,
pub id: NodeId,
/// Distinguishes between `unsafe { ... }` and `{ ... }`
pub rules: BlockCheckMode,
pub span: Span,
/// If true, then there may exist `break 'a` values that aim to
/// break out of this block early. As of this writing, this is not
/// currently permitted in Rust itself, but it is generated as
/// part of `catch` statements.
pub targeted_by_break: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Pat {
pub id: NodeId,
pub node: PatKind,
pub span: Span,
}
impl fmt::Debug for Pat {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "pat({}: {})", self.id,
print::to_string(print::NO_ANN, |s| s.print_pat(self)))
}
}
impl Pat {
// FIXME(#19596) this is a workaround, but there should be a better way
fn walk_<G>(&self, it: &mut G) -> bool
where G: FnMut(&Pat) -> bool
{
if !it(self) {
return false;
}
match self.node {
PatKind::Binding(.., Some(ref p)) => p.walk_(it),
PatKind::Struct(_, ref fields, _) => {
fields.iter().all(|field| field.node.pat.walk_(it))
}
PatKind::TupleStruct(_, ref s, _) | PatKind::Tuple(ref s, _) => {
s.iter().all(|p| p.walk_(it))
}
PatKind::Box(ref s) | PatKind::Ref(ref s, _) => {
s.walk_(it)
}
PatKind::Slice(ref before, ref slice, ref after) => {
before.iter().all(|p| p.walk_(it)) &&
slice.iter().all(|p| p.walk_(it)) &&
after.iter().all(|p| p.walk_(it))
}
PatKind::Wild |
PatKind::Lit(_) |
PatKind::Range(..) |
PatKind::Binding(..) |
PatKind::Path(_) => {
true
}
}
}
pub fn walk<F>(&self, mut it: F) -> bool
where F: FnMut(&Pat) -> bool
{
self.walk_(&mut it)
}
}
/// A single field in a struct pattern
///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }`
/// are treated the same as` x: x, y: ref y, z: ref mut z`,
/// except is_shorthand is true
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FieldPat {
/// The identifier for the field
pub name: Name,
/// The pattern the field is destructured to
pub pat: P<Pat>,
pub is_shorthand: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BindingMode {
BindByRef(Mutability),
BindByValue(Mutability),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum RangeEnd {
Included,
Excluded,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum PatKind {
/// Represents a wildcard pattern (`_`)
Wild,
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
/// The `DefId` is for the definition of the variable being bound.
Binding(BindingMode, DefId, Spanned<Name>, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
Struct(QPath, HirVec<Spanned<FieldPat>>, bool),
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>),
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
Path(QPath),
/// A tuple pattern `(a, b)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
/// 0 <= position <= subpats.len()
Tuple(HirVec<P<Pat>>, Option<usize>),
/// A `box` pattern
Box(P<Pat>),
/// A reference pattern, e.g. `&mut (a, b)`
Ref(P<Pat>, Mutability),
/// A literal
Lit(P<Expr>),
/// A range pattern, e.g. `1...2` or `1..2`
Range(P<Expr>, P<Expr>, RangeEnd),
/// `[a, b, ..i, y, z]` is represented as:
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`
Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum Mutability {
MutMutable,
MutImmutable,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BinOp_ {
/// The `+` operator (addition)
BiAdd,
/// The `-` operator (subtraction)
BiSub,
/// The `*` operator (multiplication)
BiMul,
/// The `/` operator (division)
BiDiv,
/// The `%` operator (modulus)
BiRem,
/// The `&&` operator (logical and)
BiAnd,
/// The `||` operator (logical or)
BiOr,
/// The `^` operator (bitwise xor)
BiBitXor,
/// The `&` operator (bitwise and)
BiBitAnd,
/// The `|` operator (bitwise or)
BiBitOr,
/// The `<<` operator (shift left)
BiShl,
/// The `>>` operator (shift right)
BiShr,
/// The `==` operator (equality)
BiEq,
/// The `<` operator (less than)
BiLt,
/// The `<=` operator (less than or equal to)
BiLe,
/// The `!=` operator (not equal to)
BiNe,
/// The `>=` operator (greater than or equal to)
BiGe,
/// The `>` operator (greater than)
BiGt,
}
impl BinOp_ {
pub fn as_str(self) -> &'static str {
match self {
BiAdd => "+",
BiSub => "-",
BiMul => "*",
BiDiv => "/",
BiRem => "%",
BiAnd => "&&",
BiOr => "||",
BiBitXor => "^",
BiBitAnd => "&",
BiBitOr => "|",
BiShl => "<<",
BiShr => ">>",
BiEq => "==",
BiLt => "<",
BiLe => "<=",
BiNe => "!=",
BiGe => ">=",
BiGt => ">",
}
}
pub fn is_lazy(self) -> bool {
match self {
BiAnd | BiOr => true,
_ => false,
}
}
pub fn is_shift(self) -> bool {
match self {
BiShl | BiShr => true,
_ => false,
}
}
pub fn is_comparison(self) -> bool {
match self {
BiEq | BiLt | BiLe | BiNe | BiGt | BiGe => true,
BiAnd |
BiOr |
BiAdd |
BiSub |
BiMul |
BiDiv |
BiRem |
BiBitXor |
BiBitAnd |
BiBitOr |
BiShl |
BiShr => false,
}
}
/// Returns `true` if the binary operator takes its arguments by value
pub fn is_by_value(self) -> bool {
!self.is_comparison()
}
}
pub type BinOp = Spanned<BinOp_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum UnOp {
/// The `*` operator for dereferencing
UnDeref,
/// The `!` operator for logical inversion
UnNot,
/// The `-` operator for negation
UnNeg,
}
impl UnOp {
pub fn as_str(self) -> &'static str {
match self {
UnDeref => "*",
UnNot => "!",
UnNeg => "-",
}
}
/// Returns `true` if the unary operator takes its argument by value
pub fn is_by_value(self) -> bool {
match self {
UnNeg | UnNot => true,
_ => false,
}
}
}
/// A statement
pub type Stmt = Spanned<Stmt_>;
impl fmt::Debug for Stmt_ {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Sadness.
let spanned = codemap::dummy_spanned(self.clone());
write!(f,
"stmt({}: {})",
spanned.node.id(),
print::to_string(print::NO_ANN, |s| s.print_stmt(&spanned)))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum Stmt_ {
/// Could be an item or a local (let) binding:
StmtDecl(P<Decl>, NodeId),
/// Expr without trailing semi-colon (must have unit type):
StmtExpr(P<Expr>, NodeId),
/// Expr with trailing semi-colon (may have any type):
StmtSemi(P<Expr>, NodeId),
}
impl Stmt_ {
pub fn attrs(&self) -> &[Attribute] {
match *self {
StmtDecl(ref d, _) => d.node.attrs(),
StmtExpr(ref e, _) |
StmtSemi(ref e, _) => &e.attrs,
}
}
pub fn id(&self) -> NodeId {
match *self {
StmtDecl(_, id) => id,
StmtExpr(_, id) => id,
StmtSemi(_, id) => id,
}
}
}
// FIXME (pending discussion of #1697, #2178...): local should really be
// a refinement on pat.
/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Local {
pub pat: P<Pat>,
pub ty: Option<P<Ty>>,
/// Initializer expression to set the value, if any
pub init: Option<P<Expr>>,
pub id: NodeId,
pub span: Span,
pub attrs: ThinVec<Attribute>,
}
pub type Decl = Spanned<Decl_>;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Decl_ {
/// A local (let) binding:
DeclLocal(P<Local>),
/// An item binding:
DeclItem(ItemId),
}
impl Decl_ {
pub fn attrs(&self) -> &[Attribute] {
match *self {
DeclLocal(ref l) => &l.attrs,
DeclItem(_) => &[]
}
}
}
/// represents one arm of a 'match'
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Arm {
pub attrs: HirVec<Attribute>,
pub pats: HirVec<P<Pat>>,
pub guard: Option<P<Expr>>,
pub body: P<Expr>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Field {
pub name: Spanned<Name>,
pub expr: P<Expr>,
pub span: Span,
pub is_shorthand: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BlockCheckMode {
DefaultBlock,
UnsafeBlock(UnsafeSource),
PushUnsafeBlock(UnsafeSource),
PopUnsafeBlock(UnsafeSource),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum UnsafeSource {
CompilerGenerated,
UserProvided,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct BodyId {
pub node_id: NodeId,
}
/// The body of a function or constant value.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Body {
pub arguments: HirVec<Arg>,
pub value: Expr
}
impl Body {
pub fn id(&self) -> BodyId {
BodyId {
node_id: self.value.id
}
}
}
/// An expression
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Expr {
pub id: NodeId,
pub span: Span,
pub node: Expr_,
pub attrs: ThinVec<Attribute>,
}
impl fmt::Debug for Expr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "expr({}: {})", self.id,
print::to_string(print::NO_ANN, |s| s.print_expr(self)))
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Expr_ {
/// A `box x` expression.
ExprBox(P<Expr>),
/// An array (`[a, b, c, d]`)
ExprArray(HirVec<Expr>),
/// A function call
///
/// The first field resolves to the function itself (usually an `ExprPath`),
/// and the second field is the list of arguments
ExprCall(P<Expr>, HirVec<Expr>),
/// A method call (`x.foo::<Bar, Baz>(a, b, c, d)`)
///
/// The `Spanned<Name>` is the identifier for the method name.
/// The vector of `Ty`s are the ascripted type parameters for the method
/// (within the angle brackets).
///
/// The first element of the vector of `Expr`s is the expression that
/// evaluates to the object on which the method is being called on (the
/// receiver), and the remaining elements are the rest of the arguments.
///
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprMethodCall(foo, [Bar, Baz], [x, a, b, c, d])`.
ExprMethodCall(Spanned<Name>, HirVec<P<Ty>>, HirVec<Expr>),
/// A tuple (`(a, b, c ,d)`)
ExprTup(HirVec<Expr>),
/// A binary operation (For example: `a + b`, `a * b`)
ExprBinary(BinOp, P<Expr>, P<Expr>),
/// A unary operation (For example: `!x`, `*x`)
ExprUnary(UnOp, P<Expr>),
/// A literal (For example: `1`, `"foo"`)
ExprLit(P<Lit>),
/// A cast (`foo as f64`)
ExprCast(P<Expr>, P<Ty>),
ExprType(P<Expr>, P<Ty>),
/// An `if` block, with an optional else block
///
/// `if expr { expr } else { expr }`
ExprIf(P<Expr>, P<Expr>, Option<P<Expr>>),
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>),
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
ExprLoop(P<Block>, Option<Spanned<Name>>, LoopSource),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
ExprMatch(P<Expr>, HirVec<Arm>, MatchSource),
/// A closure (for example, `move |a, b, c| {a + b + c}`).
///
/// The final span is the span of the argument block `|...|`
ExprClosure(CaptureClause, P<FnDecl>, BodyId, Span),
/// A block (`{ ... }`)
ExprBlock(P<Block>),
/// An assignment (`a = foo()`)
ExprAssign(P<Expr>, P<Expr>),
/// An assignment with an operator
///
/// For example, `a += 1`.
ExprAssignOp(BinOp, P<Expr>, P<Expr>),
/// Access of a named struct field (`obj.foo`)
ExprField(P<Expr>, Spanned<Name>),
/// Access of an unnamed field of a struct or tuple-struct
///
/// For example, `foo.0`.
ExprTupField(P<Expr>, Spanned<usize>),
/// An indexing operation (`foo[2]`)
ExprIndex(P<Expr>, P<Expr>),
/// Path to a definition, possibly containing lifetime or type parameters.
ExprPath(QPath),
/// A referencing operation (`&a` or `&mut a`)
ExprAddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break
ExprBreak(Destination, Option<P<Expr>>),
/// A `continue`, with an optional label
ExprAgain(Destination),
/// A `return`, with an optional value to be returned
ExprRet(Option<P<Expr>>),
/// Inline assembly (from `asm!`), with its outputs and inputs.
ExprInlineAsm(P<InlineAsm>, HirVec<Expr>, HirVec<Expr>),
/// A struct or struct-like variant literal expression.
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
ExprStruct(QPath, HirVec<Field>, Option<P<Expr>>),
/// An array literal constructed from one repeated element.
///
/// For example, `[1; 5]`. The first expression is the element
/// to be repeated; the second is the number of times to repeat it.
ExprRepeat(P<Expr>, BodyId),
}
/// Optionally `Self`-qualified value/type path or associated extension.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum QPath {
/// Path to a definition, optionally "fully-qualified" with a `Self`
/// type, if the path points to an associated item in a trait.
///
/// E.g. an unqualified path like `Clone::clone` has `None` for `Self`,
/// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`,
/// even though they both have the same two-segment `Clone::clone` `Path`.
Resolved(Option<P<Ty>>, P<Path>),
/// Type-related paths, e.g. `<T>::default` or `<T>::Output`.
/// Will be resolved by type-checking to an associated item.
///
/// UFCS source paths can desugar into this, with `Vec::new` turning into
/// `<Vec>::new`, and `T::X::Y::method` into `<<<T>::X>::Y>::method`,
/// the `X` and `Y` nodes each being a `TyPath(QPath::TypeRelative(..))`.
TypeRelative(P<Ty>, P<PathSegment>)
}
/// Hints at the original code for a `match _ { .. }`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum MatchSource {
/// A `match _ { .. }`
Normal,
/// An `if let _ = _ { .. }` (optionally with `else { .. }`)
IfLetDesugar {
contains_else_clause: bool,
},
/// A `while let _ = _ { .. }` (which was desugared to a
/// `loop { match _ { .. } }`)
WhileLetDesugar,
/// A desugared `for _ in _ { .. }` loop
ForLoopDesugar,
/// A desugared `?` operator
TryDesugar,
}
/// The loop type that yielded an ExprLoop
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum LoopSource {
/// A `loop { .. }` loop
Loop,
/// A `while let _ = _ { .. }` loop
WhileLet,
/// A `for _ in _ { .. }` loop
ForLoop,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum LoopIdError {
OutsideLoopScope,
UnlabeledCfInWhileCondition,
UnresolvedLabel,
}
impl fmt::Display for LoopIdError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(match *self {
LoopIdError::OutsideLoopScope => "not inside loop scope",
LoopIdError::UnlabeledCfInWhileCondition =>
"unlabeled control flow (break or continue) in while condition",
LoopIdError::UnresolvedLabel => "label not found",
}, f)
}
}
// FIXME(cramertj) this should use `Result` once master compiles w/ a vesion of Rust where
// `Result` implements `Encodable`/`Decodable`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum LoopIdResult {
Ok(NodeId),
Err(LoopIdError),
}
impl Into<Result<NodeId, LoopIdError>> for LoopIdResult {
fn into(self) -> Result<NodeId, LoopIdError> {
match self {
LoopIdResult::Ok(ok) => Ok(ok),
LoopIdResult::Err(err) => Err(err),
}
}
}
impl From<Result<NodeId, LoopIdError>> for LoopIdResult {
fn from(res: Result<NodeId, LoopIdError>) -> Self {
match res {
Ok(ok) => LoopIdResult::Ok(ok),
Err(err) => LoopIdResult::Err(err),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum ScopeTarget {
Block(NodeId),
Loop(LoopIdResult),
}
impl ScopeTarget {
pub fn opt_id(self) -> Option<NodeId> {
match self {
ScopeTarget::Block(node_id) |
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) => Some(node_id),
ScopeTarget::Loop(LoopIdResult::Err(_)) => None,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Destination {
// This is `Some(_)` iff there is an explicit user-specified `label
pub ident: Option<Spanned<Ident>>,
// These errors are caught and then reported during the diagnostics pass in
// librustc_passes/loops.rs
pub target_id: ScopeTarget,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum CaptureClause {
CaptureByValue,
CaptureByRef,
}
// NB: If you change this, you'll probably want to change the corresponding
// type structure in middle/ty.rs as well.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MutTy {
pub ty: P<Ty>,
pub mutbl: Mutability,
}
/// Represents a method's signature in a trait declaration or implementation.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MethodSig {
pub unsafety: Unsafety,
pub constness: Constness,
pub abi: Abi,
pub decl: P<FnDecl>,
pub generics: Generics,
}
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the node-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItemId {
pub node_id: NodeId,
}
/// Represents an item declaration within a trait declaration,
/// possibly including a default implementation. A trait item is
/// either required (meaning it doesn't have an implementation, just a
/// signature) or provided (meaning it has a default implementation).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItem {
pub id: NodeId,
pub name: Name,
pub attrs: HirVec<Attribute>,
pub node: TraitItemKind,
pub span: Span,
}
/// A trait method's body (or just argument names).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitMethod {
/// No default body in the trait, just a signature.
Required(HirVec<Spanned<Name>>),
/// Both signature and body are provided in the trait.
Provided(BodyId),
}
/// Represents a trait method or associated constant or type
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitItemKind {
/// An associated constant with an optional value (otherwise `impl`s
/// must contain a value)
Const(P<Ty>, Option<BodyId>),
/// A method with an optional body
Method(MethodSig, TraitMethod),
/// An associated type with (possibly empty) bounds and optional concrete
/// type
Type(TyParamBounds, Option<P<Ty>>),
}
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the node-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItemId {
pub node_id: NodeId,
}
/// Represents anything within an `impl` block
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItem {
pub id: NodeId,
pub name: Name,
pub vis: Visibility,
pub defaultness: Defaultness,
pub attrs: HirVec<Attribute>,
pub node: ImplItemKind,
pub span: Span,
}
/// Represents different contents within `impl`s
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ImplItemKind {
/// An associated constant of the given type, set to the constant result
/// of the expression
Const(P<Ty>, BodyId),
/// A method implementation with the given signature and body
Method(MethodSig, BodyId),
/// An associated type
Type(P<Ty>),
}
// Bind a type to an associated type: `A=Foo`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeBinding {
pub id: NodeId,
pub name: Name,
pub ty: P<Ty>,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Ty {
pub id: NodeId,
pub node: Ty_,
pub span: Span,
}
impl fmt::Debug for Ty {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "type({})",
print::to_string(print::NO_ANN, |s| s.print_type(self)))
}
}
/// Not represented directly in the AST, referred to by name through a ty_path.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PrimTy {
TyInt(IntTy),
TyUint(UintTy),
TyFloat(FloatTy),
TyStr,
TyBool,
TyChar,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct BareFnTy {
pub unsafety: Unsafety,
pub abi: Abi,
pub lifetimes: HirVec<LifetimeDef>,
pub decl: P<FnDecl>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
/// The different kinds of types recognized by the compiler
pub enum Ty_ {
/// A variable length slice (`[T]`)
TySlice(P<Ty>),
/// A fixed length array (`[T; n]`)
TyArray(P<Ty>, BodyId),
/// A raw pointer (`*const T` or `*mut T`)
TyPtr(MutTy),
/// A reference (`&'a T` or `&'a mut T`)
TyRptr(Lifetime, MutTy),
/// A bare function (e.g. `fn(usize) -> bool`)
TyBareFn(P<BareFnTy>),
/// The never type (`!`)
TyNever,
/// A tuple (`(A, B, C, D,...)`)
TyTup(HirVec<P<Ty>>),
/// A path to a type definition (`module::module::...::Type`), or an
/// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
///
/// Type parameters may be stored in each `PathSegment`.
TyPath(QPath),
/// A trait object type `Bound1 + Bound2 + Bound3`
/// where `Bound` is a trait or a lifetime.
TyTraitObject(HirVec<PolyTraitRef>, Lifetime),
/// An `impl Bound1 + Bound2 + Bound3` type
/// where `Bound` is a trait or a lifetime.
TyImplTrait(TyParamBounds),
/// Unused for now
TyTypeof(BodyId),
/// TyInfer means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
TyInfer,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct InlineAsmOutput {
pub constraint: Symbol,
pub is_rw: bool,
pub is_indirect: bool,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct InlineAsm {
pub asm: Symbol,
pub asm_str_style: StrStyle,
pub outputs: HirVec<InlineAsmOutput>,
pub inputs: HirVec<Symbol>,
pub clobbers: HirVec<Symbol>,
pub volatile: bool,
pub alignstack: bool,
pub dialect: AsmDialect,
pub ctxt: SyntaxContext,
}
/// represents an argument in a function header
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Arg {
pub pat: P<Pat>,
pub id: NodeId,
}
/// Represents the header (not the body) of a function declaration
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FnDecl {
pub inputs: HirVec<P<Ty>>,
pub output: FunctionRetTy,
pub variadic: bool,
/// True if this function has an `self`, `&self` or `&mut self` receiver
/// (but not a `self: Xxx` one).
pub has_implicit_self: bool,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Unsafety {
Unsafe,
Normal,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Constness {
Const,
NotConst,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Defaultness {
Default { has_value: bool },
Final,
}
impl Defaultness {
pub fn has_value(&self) -> bool {
match *self {
Defaultness::Default { has_value, .. } => has_value,
Defaultness::Final => true,
}
}
pub fn is_final(&self) -> bool {
*self == Defaultness::Final
}
pub fn is_default(&self) -> bool {
match *self {
Defaultness::Default { .. } => true,
_ => false,
}
}
}
impl fmt::Display for Unsafety {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(match *self {
Unsafety::Normal => "normal",
Unsafety::Unsafe => "unsafe",
},
f)
}
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub enum ImplPolarity {
/// `impl Trait for Type`
Positive,
/// `impl !Trait for Type`
Negative,
}
impl fmt::Debug for ImplPolarity {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ImplPolarity::Positive => "positive".fmt(f),
ImplPolarity::Negative => "negative".fmt(f),
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum FunctionRetTy {
/// Return type is not specified.
///
/// Functions default to `()` and
/// closures default to inference. Span points to where return
/// type would be inserted.
DefaultReturn(Span),
/// Everything else
Return(P<Ty>),
}
impl FunctionRetTy {
pub fn span(&self) -> Span {
match *self {
DefaultReturn(span) => span,
Return(ref ty) => ty.span,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Mod {
/// A span from the first token past `{` to the last token until `}`.
/// For `mod foo;`, the inner span ranges from the first token
/// to the last token in the external file.
pub inner: Span,
pub item_ids: HirVec<ItemId>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ForeignMod {
pub abi: Abi,
pub items: HirVec<ForeignItem>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct EnumDef {
pub variants: HirVec<Variant>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Variant_ {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub data: VariantData,
/// Explicit discriminant, eg `Foo = 1`
pub disr_expr: Option<BodyId>,
}
pub type Variant = Spanned<Variant_>;
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum UseKind {
/// One import, e.g. `use foo::bar` or `use foo::bar as baz`.
/// Also produced for each element of a list `use`, e.g.
// `use foo::{a, b}` lowers to `use foo::a; use foo::b;`.
Single,
/// Glob import, e.g. `use foo::*`.
Glob,
/// Degenerate list import, e.g. `use foo::{a, b}` produces
/// an additional `use foo::{}` for performing checks such as
/// unstable feature gating. May be removed in the future.
ListStem,
}
/// TraitRef's appear in impls.
///
/// resolve maps each TraitRef's ref_id to its defining trait; that's all
/// that the ref_id is for. Note that ref_id's value is not the NodeId of the
/// trait being referred to but just a unique NodeId that serves as a key
/// within the DefMap.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitRef {
pub path: Path,
pub ref_id: NodeId,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PolyTraitRef {
/// The `'a` in `<'a> Foo<&'a T>`
pub bound_lifetimes: HirVec<LifetimeDef>,
/// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
pub trait_ref: TraitRef,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Visibility {
Public,
Crate,
Restricted { path: P<Path>, id: NodeId },
Inherited,
}
impl Visibility {
pub fn is_pub_restricted(&self) -> bool {
use self::Visibility::*;
match self {
&Public |
&Inherited => false,
&Crate |
&Restricted { .. } => true,
}
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct StructField {
pub span: Span,
pub name: Name,
pub vis: Visibility,
pub id: NodeId,
pub ty: P<Ty>,
pub attrs: HirVec<Attribute>,
}
impl StructField {
// Still necessary in couple of places
pub fn is_positional(&self) -> bool {
let first = self.name.as_str().as_bytes()[0];
first >= b'0' && first <= b'9'
}
}
/// Fields and Ids of enum variants and structs
///
/// For enum variants: `NodeId` represents both an Id of the variant itself (relevant for all
/// variant kinds) and an Id of the variant's constructor (not relevant for `Struct`-variants).
/// One shared Id can be successfully used for these two purposes.
/// Id of the whole enum lives in `Item`.
///
/// For structs: `NodeId` represents an Id of the structure's constructor, so it is not actually
/// used for `Struct`-structs (but still presents). Structures don't have an analogue of "Id of
/// the variant itself" from enum variants.
/// Id of the whole struct lives in `Item`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum VariantData {
Struct(HirVec<StructField>, NodeId),
Tuple(HirVec<StructField>, NodeId),
Unit(NodeId),
}
impl VariantData {
pub fn fields(&self) -> &[StructField] {
match *self {
VariantData::Struct(ref fields, _) | VariantData::Tuple(ref fields, _) => fields,
_ => &[],
}
}
pub fn id(&self) -> NodeId {
match *self {
VariantData::Struct(_, id) | VariantData::Tuple(_, id) | VariantData::Unit(id) => id,
}
}
pub fn is_struct(&self) -> bool {
if let VariantData::Struct(..) = *self {
true
} else {
false
}
}
pub fn is_tuple(&self) -> bool {
if let VariantData::Tuple(..) = *self {
true
} else {
false
}
}
pub fn is_unit(&self) -> bool {
if let VariantData::Unit(..) = *self {
true
} else {
false
}
}
}
// The bodies for items are stored "out of line", in a separate
// hashmap in the `Crate`. Here we just record the node-id of the item
// so it can fetched later.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ItemId {
pub id: NodeId,
}
/// An item
///
/// The name might be a dummy name in case of anonymous items
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Item {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub id: NodeId,
pub node: Item_,
pub vis: Visibility,
pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum Item_ {
/// An`extern crate` item, with optional original crate name,
///
/// e.g. `extern crate foo` or `extern crate foo_bar as foo`
ItemExternCrate(Option<Name>),
/// `use foo::bar::*;` or `use foo::bar::baz as quux;`
///
/// or just
///
/// `use foo::bar::baz;` (with `as baz` implicitly on the right)
ItemUse(P<Path>, UseKind),
/// A `static` item
ItemStatic(P<Ty>, Mutability, BodyId),
/// A `const` item
ItemConst(P<Ty>, BodyId),
/// A function declaration
ItemFn(P<FnDecl>, Unsafety, Constness, Abi, Generics, BodyId),
/// A module
ItemMod(Mod),
/// An external module
ItemForeignMod(ForeignMod),
/// A type alias, e.g. `type Foo = Bar<u8>`
ItemTy(P<Ty>, Generics),
/// An enum definition, e.g. `enum Foo<A, B> {C<A>, D<B>}`
ItemEnum(EnumDef, Generics),
/// A struct definition, e.g. `struct Foo<A> {x: A}`
ItemStruct(VariantData, Generics),
/// A union definition, e.g. `union Foo<A, B> {x: A, y: B}`
ItemUnion(VariantData, Generics),
/// Represents a Trait Declaration
ItemTrait(Unsafety, Generics, TyParamBounds, HirVec<TraitItemRef>),
// Default trait implementations
///
/// `impl Trait for .. {}`
ItemDefaultImpl(Unsafety, TraitRef),
/// An implementation, eg `impl<A> Trait for Foo { .. }`
ItemImpl(Unsafety,
ImplPolarity,
Generics,
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
HirVec<ImplItemRef>),
}
impl Item_ {
pub fn descriptive_variant(&self) -> &str {
match *self {
ItemExternCrate(..) => "extern crate",
ItemUse(..) => "use",
ItemStatic(..) => "static item",
ItemConst(..) => "constant item",
ItemFn(..) => "function",
ItemMod(..) => "module",
ItemForeignMod(..) => "foreign module",
ItemTy(..) => "type alias",
ItemEnum(..) => "enum",
ItemStruct(..) => "struct",
ItemUnion(..) => "union",
ItemTrait(..) => "trait",
ItemImpl(..) |
ItemDefaultImpl(..) => "item",
}
}
}
/// A reference from an trait to one of its associated items. This
/// contains the item's id, naturally, but also the item's name and
/// some other high-level details (like whether it is an associated
/// type or method, and whether it is public). This allows other
/// passes to find the impl they want without loading the id (which
/// means fewer edges in the incremental compilation graph).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItemRef {
pub id: TraitItemId,
pub name: Name,
pub kind: AssociatedItemKind,
pub span: Span,
pub defaultness: Defaultness,
}
/// A reference from an impl to one of its associated items. This
/// contains the item's id, naturally, but also the item's name and
/// some other high-level details (like whether it is an associated
/// type or method, and whether it is public). This allows other
/// passes to find the impl they want without loading the id (which
/// means fewer edges in the incremental compilation graph).
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItemRef {
pub id: ImplItemId,
pub name: Name,
pub kind: AssociatedItemKind,
pub span: Span,
pub vis: Visibility,
pub defaultness: Defaultness,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum AssociatedItemKind {
Const,
Method { has_self: bool },
Type,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ForeignItem {
pub name: Name,
pub attrs: HirVec<Attribute>,
pub node: ForeignItem_,
pub id: NodeId,
pub span: Span,
pub vis: Visibility,
}
/// An item within an `extern` block
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ForeignItem_ {
/// A foreign function
ForeignItemFn(P<FnDecl>, HirVec<Spanned<Name>>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
/// (the boolean is true when mutable)
ForeignItemStatic(P<Ty>, bool),
}
impl ForeignItem_ {
pub fn descriptive_variant(&self) -> &str {
match *self {
ForeignItemFn(..) => "foreign function",
ForeignItemStatic(..) => "foreign static item",
}
}
}
/// A free variable referred to in a function.
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct Freevar {
/// The variable being accessed free.
pub def: Def,
// First span where it is accessed (there can be multiple).
pub span: Span
}
pub type FreevarMap = NodeMap<Vec<Freevar>>;
pub type CaptureModeMap = NodeMap<CaptureClause>;
#[derive(Clone, Debug)]
pub struct TraitCandidate {
pub def_id: DefId,
pub import_id: Option<NodeId>,
}
// Trait method resolution
pub type TraitMap = NodeMap<Vec<TraitCandidate>>;
// Map from the NodeId of a glob import to a list of items which are actually
// imported.
pub type GlobMap = NodeMap<FxHashSet<Name>>;
| 30.968767 | 99 | 0.616441 |
75d099e3937fc979caeb4b0565926374b46b280d | 371 | pub mod builtin_directives;
pub mod default_negative;
pub mod default_positive;
pub mod id_negative;
pub mod id_positive;
pub mod index;
pub mod relations_basic;
pub mod relations_consistency;
pub mod relations_legacy;
pub mod relations_negative;
pub mod relations_positive;
pub mod unique;
pub mod updated_at_negative;
pub mod updated_at_positive;
pub mod arg_parsing;
| 21.823529 | 30 | 0.83558 |
d99d3e734aa2872f7ad6c9498d8a195539fc6ff2 | 2,777 | use bevy::{
prelude::*,
render::{mesh::Indices, render_resource::PrimitiveTopology},
};
/// A cone) shape.
#[derive(Debug, Clone, Copy)]
pub struct Cone {
pub radius: f32,
pub height: f32,
pub subdivisions: usize,
}
impl Default for Cone {
fn default() -> Self {
Cone {
radius: 1.0,
height: 1.0,
subdivisions: 32,
}
}
}
impl From<Cone> for Mesh {
fn from(cone: Cone) -> Self {
// code adapted from http://apparat-engine.blogspot.com/2013/04/procedural-meshes-torus.html
// (source code at https://github.com/SEilers/Apparat)
let n_vertices = cone.subdivisions + 2;
let mut positions: Vec<[f32; 3]> = Vec::with_capacity(n_vertices);
let mut normals: Vec<[f32; 3]> = Vec::with_capacity(n_vertices);
let mut uvs: Vec<[f32; 2]> = Vec::with_capacity(n_vertices);
let side_stride = 2.0 * std::f32::consts::PI / cone.subdivisions as f32;
// Cone tip
positions.push([0.0, cone.height, 0.0]);
normals.push(Vec3::Y.into());
uvs.push([0.0, 1.0]);
// Bottom center
positions.push([0.0, 0.0, 0.0]);
normals.push(Vec3::new(0.0, -1.0, 0.0).into());
uvs.push([0.0, -1.0]);
for side in 0..=cone.subdivisions {
let phi = side_stride * side as f32;
let x = phi.cos() * cone.radius;
let y = 0.0;
let z = phi.sin() * cone.radius;
let vertex = Vec3::new(x, y, z);
let tangent = vertex.normalize().cross(Vec3::Y).normalize();
let edge = (Vec3::Y - vertex).normalize();
let normal = edge.cross(tangent).normalize();
positions.push([x, y, z]);
normals.push(normal.into());
uvs.push([side as f32 / cone.subdivisions as f32, 0.0]);
}
let n_triangles = cone.subdivisions * 2;
let n_indices = n_triangles * 3;
let mut indices: Vec<u32> = Vec::with_capacity(n_indices);
for point in 2..cone.subdivisions + 2 {
let top = 0;
let bottom = 1;
let left = point + 1;
let right = point;
indices.push(top as u32);
indices.push(left as u32);
indices.push(right as u32);
indices.push(bottom as u32);
indices.push(right as u32);
indices.push(left as u32);
}
let mut mesh = Mesh::new(PrimitiveTopology::TriangleList);
mesh.set_indices(Some(Indices::U32(indices)));
mesh.insert_attribute(Mesh::ATTRIBUTE_POSITION, positions);
mesh.insert_attribute(Mesh::ATTRIBUTE_NORMAL, normals);
mesh.insert_attribute(Mesh::ATTRIBUTE_UV_0, uvs);
mesh
}
}
| 31.202247 | 100 | 0.552395 |
de568922b41f8b5934166e94eaf521a8333a05fb | 3,446 | use physics::{ElevatorState, MAX_JERK, MAX_ACCELERATION, MAX_VELOCITY};
use buildings::{Building, getCumulativeFloorHeight};
pub trait MotionController
{
fn init(&mut self, esp: Box<Building>, est: ElevatorState);
fn adjust(&mut self, est: &ElevatorState, dst: u64) -> f64;
}
pub struct SmoothMotionController
{
pub esp: Box<Building>,
pub timestamp: f64
}
impl MotionController for SmoothMotionController
{
fn init(&mut self, esp: Box<Building>, est: ElevatorState)
{
self.esp = esp;
self.timestamp = est.timestamp;
}
fn adjust(&mut self, est: &ElevatorState, dst: u64) -> f64
{
//5.3. Adjust motor control to process next floor request
//it will take t seconds to reach max from max
let t_accel = MAX_ACCELERATION / MAX_JERK;
let t_veloc = MAX_VELOCITY / MAX_ACCELERATION;
//it may take up to d meters to decelerate from current
let decel_t = if (est.velocity>0.0) == (est.acceleration>0.0) {
//this case deliberately overestimates d to prevent "back up"
(est.acceleration.abs() / MAX_JERK) +
(est.velocity.abs() / (MAX_ACCELERATION / 2.0)) +
2.0 * (MAX_ACCELERATION / MAX_JERK)
} else {
//without the MAX_JERK, this approaches infinity and decelerates way too soon
//MAX_JERK * 1s = acceleration in m/s^2
est.velocity.abs() / (MAX_JERK + est.acceleration.abs())
};
let d = est.velocity.abs() * decel_t;
let dst_height = getCumulativeFloorHeight(self.esp.get_floor_heights(), dst);
//l = distance to next floor
let l = (est.location - dst_height).abs();
let target_acceleration = {
//are we going up?
let going_up = est.location < dst_height;
//time elapsed since last poll
let dt = est.timestamp - self.timestamp;
self.timestamp = est.timestamp;
//Do not exceed maximum acceleration
if est.acceleration.abs() >= MAX_ACCELERATION {
if est.acceleration > 0.0 {
est.acceleration - (dt * MAX_JERK)
} else {
est.acceleration + (dt * MAX_JERK)
}
//Do not exceed maximum velocity
} else if est.velocity.abs() >= MAX_VELOCITY
|| (est.velocity + est.acceleration * (est.acceleration.abs() / MAX_JERK)).abs() >= MAX_VELOCITY {
if est.velocity > 0.0 {
est.acceleration - (dt * MAX_JERK)
} else {
est.acceleration + (dt * MAX_JERK)
}
//if within comfortable deceleration range and moving in right direction, decelerate
} else if l < d && (est.velocity>0.0) == going_up {
if going_up {
est.acceleration - (dt * MAX_JERK)
} else {
est.acceleration + (dt * MAX_JERK)
}
//else if not at peak velocity, accelerate smoothly
} else {
if going_up {
est.acceleration + (dt * MAX_JERK)
} else {
est.acceleration - (dt * MAX_JERK)
}
}
};
let gravity_adjusted_acceleration = target_acceleration + 9.8;
let target_force = gravity_adjusted_acceleration * self.esp.get_carriage_weight();
if !target_force.is_finite() {
//divide by zero etc.
//may happen if time delta underflows
0.0
} else {
0.0
}
}
}
| 32.819048 | 110 | 0.588508 |
d73bf3dd731571d092b2de49cf24f018f904310e | 749 | // Copyright (C) 2016 Symtern Project Contributors
//
// Licensed under the Apache License, Version 2.0 <LICENSE-Apache
// or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>,
// at your option. This file may not be copied, modified, or
// distributed except according to those terms.
//! Example for lib.rs to demonstrate that symbol types implement Copy.
extern crate symtern;
use symtern::prelude::*;
use symtern::Pool;
/// Take ownership of a value, consuming it.
fn consume<T>(_: T) {}
fn main() {
let mut pool = Pool::<str, u32>::new();
let sym = pool.intern("xyz").unwrap();
consume(sym);
println!("The symbol is still valid: {:?}", pool.resolve(sym));
}
| 34.045455 | 71 | 0.691589 |
d7253e36978017e9aacbfd5b5b0d8b22c711ce87 | 2,832 | use log::info;
use swayipc::Connection;
mod algorithm;
use algorithm::{EdgeMode, Kind, Target};
mod tree;
#[derive(Debug)]
enum FocusError {
Args,
Command,
SwayIPC(swayipc::Error),
}
fn main() {
match task() {
Err(e) => {
match e {
FocusError::Args => eprint!("{}", include_str!("../usage.md")),
FocusError::Command => eprintln!("error: no valid focus command"),
FocusError::SwayIPC(e) => eprintln!("swayipc error: {e}"),
};
std::process::exit(1);
}
Ok(()) => (),
}
}
fn task() -> Result<(), FocusError> {
env_logger::init();
info!("Parsing arguments");
let args: Box<[String]> = std::env::args().collect();
let targets = parse_args(&args).ok_or(FocusError::Args)?;
info!("Starting connection");
let mut c = Connection::new().map_err(FocusError::SwayIPC)?;
info!("Retrieving tree");
let tree = c.get_tree().map_err(FocusError::SwayIPC)?;
info!("Pre-processing tree");
let tree = tree::preprocess(tree);
info!("Searching for neighbor");
let neighbor = algorithm::neighbor(&tree, &targets);
if let Some(neighbor) = neighbor {
let focus_cmd = tree::focus_command(neighbor).ok_or(FocusError::Command)?;
info!("Running focus command: '{focus_cmd}'");
c.run_command(focus_cmd).map_err(FocusError::SwayIPC)?;
} else {
info!("No neighbor found");
}
Ok(())
}
fn parse_args(args: &[String]) -> Option<Box<[Target]>> {
if args.len() < 2 {
return None;
}
args[1..]
.iter()
.map(|arg| {
let (target_name, mode_chars) = arg.split_once('-')?;
let kind = match target_name {
"split" => Some(Kind::Split),
"group" => Some(Kind::Group),
"float" => Some(Kind::Float),
"workspace" => Some(Kind::Workspace),
"output" => Some(Kind::Output),
_ => None,
}?;
let mut mode_chars = mode_chars.chars();
let (backward, vertical) = match mode_chars.next()? {
'r' => Some((false, false)),
'l' => Some((true, false)),
'd' => Some((false, true)),
'u' => Some((true, true)),
_ => None,
}?;
let edge_mode = match mode_chars.next()? {
's' => Some(EdgeMode::Stop),
'w' => Some(EdgeMode::Wrap),
't' => Some(EdgeMode::Traverse),
'i' => Some(EdgeMode::Inactive),
_ => None,
}?;
Some(Target {
kind,
backward,
vertical,
edge_mode,
})
})
.collect()
}
| 29.810526 | 82 | 0.486582 |
e2b98ca1d2561597b2a1543cda396a5edb1e0a7e | 2,296 | use jinya_ui::widgets::form::input::*;
use jinya_ui::layout::button_row::*;
use jinya_ui::layout::page::*;
use jinya_ui::widgets::button::*;
use jinya_ui::widgets::dialog::content::*;
use yew::prelude::*;
use yew::services::ConsoleService;
pub struct ContentDialogs {
link: ComponentLink<Self>,
dialog_open: bool,
value: String,
}
pub enum Msg {
Open,
Primary,
Secondary,
DefaultInput(String),
}
impl Component for ContentDialogs {
type Message = Msg;
type Properties = ();
fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {
ContentDialogs {
link,
dialog_open: false,
value: "".to_string(),
}
}
fn update(&mut self, msg: Self::Message) -> bool {
match msg {
Msg::Open => self.dialog_open = true,
Msg::Primary => {
ConsoleService::log("Negative approved");
self.dialog_open = false
}
Msg::Secondary => {
ConsoleService::log("Negative declined");
self.dialog_open = false
}
Msg::DefaultInput(value) => self.value = value
}
true
}
fn change(&mut self, _props: Self::Properties) -> bool {
false
}
fn view(&self) -> Html {
html! {
<Page>
<h1>{"Cards"}</h1>
<ButtonRow alignment=ButtonRowAlignment::Start>
<Button label="Open Primary" button_type=ButtonType::Primary on_click=self.link.callback(|_| Msg::Open) />
</ButtonRow>
<ContentDialog
title="Content dialog"
secondary_label="Discard changes"
primary_label="Save changes"
on_primary=self.link.callback(|_| Msg::Primary)
on_secondary=self.link.callback(|_| Msg::Secondary)
is_open=&self.dialog_open
>
<div>
<Input placeholder="Default placeholder" label="Default input" on_input=self.link.callback(|value| Msg::DefaultInput(value)) value=&self.value />
</div>
</ContentDialog>
</Page>
}
}
} | 29.818182 | 169 | 0.526568 |
acccfba8d554a66c5e8c93f86c77a32073927296 | 2,548 | use super::sync::Sync;
use core_network::{DiagnosticEvent, NetworkServiceHandle};
use protocol::{
async_trait,
traits::{Context, MessageHandler, PeerTrust, TrustFeedback},
};
use serde_derive::{Deserialize, Serialize};
use std::ops::Deref;
pub const GOSSIP_TRUST_NEW_INTERVAL: &str = "/gossip/diagnostic/trust_new_interval";
pub const GOSSIP_TRUST_TWIN_EVENT: &str = "/gossip/diagnostic/trust_twin_event";
#[derive(Debug, Serialize, Deserialize)]
pub struct TrustNewIntervalReq(pub u8);
pub struct TrustNewIntervalHandler {
pub sync: Sync,
pub network: NetworkServiceHandle,
}
impl TrustNewIntervalHandler {
pub fn new(sync: Sync, network: NetworkServiceHandle) -> Self {
TrustNewIntervalHandler { sync, network }
}
}
#[async_trait]
impl MessageHandler for TrustNewIntervalHandler {
type Message = TrustNewIntervalReq;
async fn process(&self, ctx: Context, _msg: Self::Message) -> TrustFeedback {
let session_id = ctx
.get::<usize>("session_id")
.cloned()
.expect("impossible, session id not found");
let report = self
.network
.diagnostic
.new_trust_interval(session_id.into())
.expect("failed to enter new trust interval");
self.sync.emit(DiagnosticEvent::TrustNewInterval { report });
TrustFeedback::Neutral
}
}
#[repr(u8)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Hash)]
pub enum TwinEvent {
Good = 0,
Bad = 1,
Worse = 2,
Both = 3,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct TrustTwinEventReq(pub TwinEvent);
pub struct TrustTwinEventHandler(pub NetworkServiceHandle);
#[async_trait]
impl MessageHandler for TrustTwinEventHandler {
type Message = TrustTwinEventReq;
async fn process(&self, ctx: Context, msg: Self::Message) -> TrustFeedback {
match msg.0 {
TwinEvent::Good => self.report(ctx, TrustFeedback::Good),
TwinEvent::Bad => self.report(ctx, TrustFeedback::Bad("twin bad".to_owned())),
TwinEvent::Worse => self.report(ctx, TrustFeedback::Worse("twin worse".to_owned())),
TwinEvent::Both => {
self.report(ctx.clone(), TrustFeedback::Good);
self.report(ctx, TrustFeedback::Bad("twin bad".to_owned()));
}
}
TrustFeedback::Neutral
}
}
impl Deref for TrustTwinEventHandler {
type Target = NetworkServiceHandle;
fn deref(&self) -> &Self::Target {
&self.0
}
}
| 28.311111 | 96 | 0.656593 |
e214321b7883b338c2adee94cc620b23a4004b31 | 40,483 | use crate::io::util::flush::{flush, Flush};
use crate::io::util::shutdown::{shutdown, Shutdown};
use crate::io::util::write::{write, Write};
use crate::io::util::write_all::{write_all, WriteAll};
use crate::io::util::write_all_buf::{write_all_buf, WriteAllBuf};
use crate::io::util::write_buf::{write_buf, WriteBuf};
use crate::io::util::write_int::{
WriteI128, WriteI128Le, WriteI16, WriteI16Le, WriteI32, WriteI32Le, WriteI64, WriteI64Le,
WriteI8,
};
use crate::io::util::write_int::{
WriteU128, WriteU128Le, WriteU16, WriteU16Le, WriteU32, WriteU32Le, WriteU64, WriteU64Le,
WriteU8,
};
use crate::io::util::write_vectored::{write_vectored, WriteVectored};
use crate::io::AsyncWrite;
use std::io::IoSlice;
use bytes::Buf;
cfg_io_util! {
/// Defines numeric writer
macro_rules! write_impl {
(
$(
$(#[$outer:meta])*
fn $name:ident(&mut self, n: $ty:ty) -> $($fut:ident)*;
)*
) => {
$(
$(#[$outer])*
fn $name<'a>(&'a mut self, n: $ty) -> $($fut)*<&'a mut Self> where Self: Unpin {
$($fut)*::new(self, n)
}
)*
}
}
/// Writes bytes to a sink.
///
/// Implemented as an extension trait, adding utility methods to all
/// [`AsyncWrite`] types. Callers will tend to import this trait instead of
/// [`AsyncWrite`].
///
/// ```no_run
/// use tokio::io::{self, AsyncWriteExt};
/// use tokio::fs::File;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let data = b"some bytes";
///
/// let mut pos = 0;
/// let mut buffer = File::create("foo.txt").await?;
///
/// while pos < data.len() {
/// let bytes_written = buffer.write(&data[pos..]).await?;
/// pos += bytes_written;
/// }
///
/// Ok(())
/// }
/// ```
///
/// See [module][crate::io] documentation for more details.
///
/// [`AsyncWrite`]: AsyncWrite
pub trait AsyncWriteExt: AsyncWrite {
/// Writes a buffer into this writer, returning how many bytes were
/// written.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write(&mut self, buf: &[u8]) -> io::Result<usize>;
/// ```
///
/// This function will attempt to write the entire contents of `buf`, but
/// the entire write may not succeed, or the write may also generate an
/// error. A call to `write` represents *at most one* attempt to write to
/// any wrapped object.
///
/// # Return
///
/// If the return value is `Ok(n)` then it must be guaranteed that `n <=
/// buf.len()`. A return value of `0` typically means that the
/// underlying object is no longer able to accept bytes and will likely
/// not be able to in the future as well, or that the buffer provided is
/// empty.
///
/// # Errors
///
/// Each call to `write` may generate an I/O error indicating that the
/// operation could not be completed. If an error is returned then no bytes
/// in the buffer were written to this writer.
///
/// It is **not** considered an error if the entire buffer could not be
/// written to this writer.
///
/// # Cancel safety
///
/// This method is cancellation safe in the sense that if it is used as
/// the event in a [`tokio::select!`](crate::select) statement and some
/// other branch completes first, then it is guaranteed that no data was
/// written to this `AsyncWrite`.
///
/// # Examples
///
/// ```no_run
/// use tokio::io::{self, AsyncWriteExt};
/// use tokio::fs::File;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut file = File::create("foo.txt").await?;
///
/// // Writes some prefix of the byte string, not necessarily all of it.
/// file.write(b"some bytes").await?;
/// Ok(())
/// }
/// ```
fn write<'a>(&'a mut self, src: &'a [u8]) -> Write<'a, Self>
where
Self: Unpin,
{
write(self, src)
}
/// Like [`write`], except that it writes from a slice of buffers.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize>;
/// ```
///
/// See [`AsyncWrite::poll_write_vectored`] for more details.
///
/// # Cancel safety
///
/// This method is cancellation safe in the sense that if it is used as
/// the event in a [`tokio::select!`](crate::select) statement and some
/// other branch completes first, then it is guaranteed that no data was
/// written to this `AsyncWrite`.
///
/// # Examples
///
/// ```no_run
/// use tokio::io::{self, AsyncWriteExt};
/// use tokio::fs::File;
/// use std::io::IoSlice;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut file = File::create("foo.txt").await?;
///
/// let bufs: &[_] = &[
/// IoSlice::new(b"hello"),
/// IoSlice::new(b" "),
/// IoSlice::new(b"world"),
/// ];
///
/// file.write_vectored(&bufs).await?;
///
/// Ok(())
/// }
/// ```
///
/// [`write`]: AsyncWriteExt::write
fn write_vectored<'a, 'b>(&'a mut self, bufs: &'a [IoSlice<'b>]) -> WriteVectored<'a, 'b, Self>
where
Self: Unpin,
{
write_vectored(self, bufs)
}
/// Writes a buffer into this writer, advancing the buffer's internal
/// cursor.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_buf<B: Buf>(&mut self, buf: &mut B) -> io::Result<usize>;
/// ```
///
/// This function will attempt to write the entire contents of `buf`, but
/// the entire write may not succeed, or the write may also generate an
/// error. After the operation completes, the buffer's
/// internal cursor is advanced by the number of bytes written. A
/// subsequent call to `write_buf` using the **same** `buf` value will
/// resume from the point that the first call to `write_buf` completed.
/// A call to `write_buf` represents *at most one* attempt to write to any
/// wrapped object.
///
/// # Return
///
/// If the return value is `Ok(n)` then it must be guaranteed that `n <=
/// buf.len()`. A return value of `0` typically means that the
/// underlying object is no longer able to accept bytes and will likely
/// not be able to in the future as well, or that the buffer provided is
/// empty.
///
/// # Errors
///
/// Each call to `write` may generate an I/O error indicating that the
/// operation could not be completed. If an error is returned then no bytes
/// in the buffer were written to this writer.
///
/// It is **not** considered an error if the entire buffer could not be
/// written to this writer.
///
/// # Cancel safety
///
/// This method is cancellation safe in the sense that if it is used as
/// the event in a [`tokio::select!`](crate::select) statement and some
/// other branch completes first, then it is guaranteed that no data was
/// written to this `AsyncWrite`.
///
/// # Examples
///
/// [`File`] implements [`AsyncWrite`] and [`Cursor`]`<&[u8]>` implements [`Buf`]:
///
/// [`File`]: crate::fs::File
/// [`Buf`]: bytes::Buf
/// [`Cursor`]: std::io::Cursor
///
/// ```no_run
/// use tokio::io::{self, AsyncWriteExt};
/// use tokio::fs::File;
///
/// use bytes::Buf;
/// use std::io::Cursor;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut file = File::create("foo.txt").await?;
/// let mut buffer = Cursor::new(b"data to write");
///
/// // Loop until the entire contents of the buffer are written to
/// // the file.
/// while buffer.has_remaining() {
/// // Writes some prefix of the byte string, not necessarily
/// // all of it.
/// file.write_buf(&mut buffer).await?;
/// }
///
/// Ok(())
/// }
/// ```
fn write_buf<'a, B>(&'a mut self, src: &'a mut B) -> WriteBuf<'a, Self, B>
where
Self: Sized + Unpin,
B: Buf,
{
write_buf(self, src)
}
/// Attempts to write an entire buffer into this writer
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_all_buf(&mut self, buf: impl Buf) -> Result<(), io::Error> {
/// while buf.has_remaining() {
/// self.write_buf(&mut buf).await?;
/// }
/// Ok(())
/// }
/// ```
///
/// This method will continuously call [`write`] until
/// [`buf.has_remaining()`](bytes::Buf::has_remaining) returns false. This method will not
/// return until the entire buffer has been successfully written or an error occurs. The
/// first error generated will be returned.
///
/// The buffer is advanced after each chunk is successfully written. After failure,
/// `src.chunk()` will return the chunk that failed to write.
///
/// # Cancel safety
///
/// If `write_all_buf` is used as the event in a
/// [`tokio::select!`](crate::select) statement and some other branch
/// completes first, then the data in the provided buffer may have been
/// partially written. However, it is guaranteed that the provided
/// buffer has been [advanced] by the amount of bytes that have been
/// partially written.
///
/// # Examples
///
/// [`File`] implements [`AsyncWrite`] and [`Cursor`]`<&[u8]>` implements [`Buf`]:
///
/// [`File`]: crate::fs::File
/// [`Buf`]: bytes::Buf
/// [`Cursor`]: std::io::Cursor
/// [advanced]: bytes::Buf::advance
///
/// ```no_run
/// use tokio::io::{self, AsyncWriteExt};
/// use tokio::fs::File;
///
/// use std::io::Cursor;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut file = File::create("foo.txt").await?;
/// let mut buffer = Cursor::new(b"data to write");
///
/// file.write_all_buf(&mut buffer).await?;
/// Ok(())
/// }
/// ```
///
/// [`write`]: AsyncWriteExt::write
fn write_all_buf<'a, B>(&'a mut self, src: &'a mut B) -> WriteAllBuf<'a, Self, B>
where
Self: Sized + Unpin,
B: Buf,
{
write_all_buf(self, src)
}
/// Attempts to write an entire buffer into this writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_all(&mut self, buf: &[u8]) -> io::Result<()>;
/// ```
///
/// This method will continuously call [`write`] until there is no more data
/// to be written. This method will not return until the entire buffer
/// has been successfully written or such an error occurs. The first
/// error generated from this method will be returned.
///
/// # Cancel safety
///
/// This method is not cancellation safe. If it is used as the event
/// in a [`tokio::select!`](crate::select) statement and some other
/// branch completes first, then the provided buffer may have been
/// partially written, but future calls to `write_all` will start over
/// from the beginning of the buffer.
///
/// # Errors
///
/// This function will return the first error that [`write`] returns.
///
/// # Examples
///
/// ```no_run
/// use tokio::io::{self, AsyncWriteExt};
/// use tokio::fs::File;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut buffer = File::create("foo.txt").await?;
///
/// buffer.write_all(b"some bytes").await?;
/// Ok(())
/// }
/// ```
///
/// [`write`]: AsyncWriteExt::write
fn write_all<'a>(&'a mut self, src: &'a [u8]) -> WriteAll<'a, Self>
where
Self: Unpin,
{
write_all(self, src)
}
write_impl! {
/// Writes an unsigned 8-bit integer to the underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u8(&mut self, n: u8) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 8 bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u8(2).await?;
/// writer.write_u8(5).await?;
///
/// assert_eq!(writer, b"\x02\x05");
/// Ok(())
/// }
/// ```
fn write_u8(&mut self, n: u8) -> WriteU8;
/// Writes an unsigned 8-bit integer to the underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i8(&mut self, n: i8) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 8 bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u8(2).await?;
/// writer.write_u8(5).await?;
///
/// assert_eq!(writer, b"\x02\x05");
/// Ok(())
/// }
/// ```
fn write_i8(&mut self, n: i8) -> WriteI8;
/// Writes an unsigned 16-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u16(&mut self, n: u16) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 16-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u16(517).await?;
/// writer.write_u16(768).await?;
///
/// assert_eq!(writer, b"\x02\x05\x03\x00");
/// Ok(())
/// }
/// ```
fn write_u16(&mut self, n: u16) -> WriteU16;
/// Writes a signed 16-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i16(&mut self, n: i16) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 16-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i16(193).await?;
/// writer.write_i16(-132).await?;
///
/// assert_eq!(writer, b"\x00\xc1\xff\x7c");
/// Ok(())
/// }
/// ```
fn write_i16(&mut self, n: i16) -> WriteI16;
/// Writes an unsigned 32-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u32(&mut self, n: u32) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 32-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u32(267).await?;
/// writer.write_u32(1205419366).await?;
///
/// assert_eq!(writer, b"\x00\x00\x01\x0b\x47\xd9\x3d\x66");
/// Ok(())
/// }
/// ```
fn write_u32(&mut self, n: u32) -> WriteU32;
/// Writes a signed 32-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i32(&mut self, n: i32) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 32-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i32(267).await?;
/// writer.write_i32(1205419366).await?;
///
/// assert_eq!(writer, b"\x00\x00\x01\x0b\x47\xd9\x3d\x66");
/// Ok(())
/// }
/// ```
fn write_i32(&mut self, n: i32) -> WriteI32;
/// Writes an unsigned 64-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u64(&mut self, n: u64) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 64-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u64(918733457491587).await?;
/// writer.write_u64(143).await?;
///
/// assert_eq!(writer, b"\x00\x03\x43\x95\x4d\x60\x86\x83\x00\x00\x00\x00\x00\x00\x00\x8f");
/// Ok(())
/// }
/// ```
fn write_u64(&mut self, n: u64) -> WriteU64;
/// Writes an signed 64-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i64(&mut self, n: i64) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 64-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i64(i64::MIN).await?;
/// writer.write_i64(i64::MAX).await?;
///
/// assert_eq!(writer, b"\x80\x00\x00\x00\x00\x00\x00\x00\x7f\xff\xff\xff\xff\xff\xff\xff");
/// Ok(())
/// }
/// ```
fn write_i64(&mut self, n: i64) -> WriteI64;
/// Writes an unsigned 128-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u128(&mut self, n: u128) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 128-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u128(16947640962301618749969007319746179).await?;
///
/// assert_eq!(writer, vec![
/// 0x00, 0x03, 0x43, 0x95, 0x4d, 0x60, 0x86, 0x83,
/// 0x00, 0x03, 0x43, 0x95, 0x4d, 0x60, 0x86, 0x83
/// ]);
/// Ok(())
/// }
/// ```
fn write_u128(&mut self, n: u128) -> WriteU128;
/// Writes an signed 128-bit integer in big-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i128(&mut self, n: i128) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 128-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i128(i128::MIN).await?;
///
/// assert_eq!(writer, vec![
/// 0x80, 0, 0, 0, 0, 0, 0, 0,
/// 0, 0, 0, 0, 0, 0, 0, 0
/// ]);
/// Ok(())
/// }
/// ```
fn write_i128(&mut self, n: i128) -> WriteI128;
/// Writes an unsigned 16-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u16_le(&mut self, n: u16) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 16-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u16_le(517).await?;
/// writer.write_u16_le(768).await?;
///
/// assert_eq!(writer, b"\x05\x02\x00\x03");
/// Ok(())
/// }
/// ```
fn write_u16_le(&mut self, n: u16) -> WriteU16Le;
/// Writes a signed 16-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i16_le(&mut self, n: i16) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 16-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i16_le(193).await?;
/// writer.write_i16_le(-132).await?;
///
/// assert_eq!(writer, b"\xc1\x00\x7c\xff");
/// Ok(())
/// }
/// ```
fn write_i16_le(&mut self, n: i16) -> WriteI16Le;
/// Writes an unsigned 32-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u32_le(&mut self, n: u32) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 32-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u32_le(267).await?;
/// writer.write_u32_le(1205419366).await?;
///
/// assert_eq!(writer, b"\x0b\x01\x00\x00\x66\x3d\xd9\x47");
/// Ok(())
/// }
/// ```
fn write_u32_le(&mut self, n: u32) -> WriteU32Le;
/// Writes a signed 32-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i32_le(&mut self, n: i32) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 32-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i32_le(267).await?;
/// writer.write_i32_le(1205419366).await?;
///
/// assert_eq!(writer, b"\x0b\x01\x00\x00\x66\x3d\xd9\x47");
/// Ok(())
/// }
/// ```
fn write_i32_le(&mut self, n: i32) -> WriteI32Le;
/// Writes an unsigned 64-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u64_le(&mut self, n: u64) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 64-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u64_le(918733457491587).await?;
/// writer.write_u64_le(143).await?;
///
/// assert_eq!(writer, b"\x83\x86\x60\x4d\x95\x43\x03\x00\x8f\x00\x00\x00\x00\x00\x00\x00");
/// Ok(())
/// }
/// ```
fn write_u64_le(&mut self, n: u64) -> WriteU64Le;
/// Writes an signed 64-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i64_le(&mut self, n: i64) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 64-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i64_le(i64::MIN).await?;
/// writer.write_i64_le(i64::MAX).await?;
///
/// assert_eq!(writer, b"\x00\x00\x00\x00\x00\x00\x00\x80\xff\xff\xff\xff\xff\xff\xff\x7f");
/// Ok(())
/// }
/// ```
fn write_i64_le(&mut self, n: i64) -> WriteI64Le;
/// Writes an unsigned 128-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_u128_le(&mut self, n: u128) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write unsigned 128-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_u128_le(16947640962301618749969007319746179).await?;
///
/// assert_eq!(writer, vec![
/// 0x83, 0x86, 0x60, 0x4d, 0x95, 0x43, 0x03, 0x00,
/// 0x83, 0x86, 0x60, 0x4d, 0x95, 0x43, 0x03, 0x00,
/// ]);
/// Ok(())
/// }
/// ```
fn write_u128_le(&mut self, n: u128) -> WriteU128Le;
/// Writes an signed 128-bit integer in little-endian order to the
/// underlying writer.
///
/// Equivalent to:
///
/// ```ignore
/// async fn write_i128_le(&mut self, n: i128) -> io::Result<()>;
/// ```
///
/// It is recommended to use a buffered writer to avoid excessive
/// syscalls.
///
/// # Errors
///
/// This method returns the same errors as [`AsyncWriteExt::write_all`].
///
/// [`AsyncWriteExt::write_all`]: AsyncWriteExt::write_all
///
/// # Examples
///
/// Write signed 128-bit integers to a `AsyncWrite`:
///
/// ```rust
/// use tokio::io::{self, AsyncWriteExt};
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let mut writer = Vec::new();
///
/// writer.write_i128_le(i128::MIN).await?;
///
/// assert_eq!(writer, vec![
/// 0, 0, 0, 0, 0, 0, 0,
/// 0, 0, 0, 0, 0, 0, 0, 0, 0x80
/// ]);
/// Ok(())
/// }
/// ```
fn write_i128_le(&mut self, n: i128) -> WriteI128Le;
}
/// Flushes this output stream, ensuring that all intermediately buffered
/// contents reach their destination.
///
/// Equivalent to:
///
/// ```ignore
/// async fn flush(&mut self) -> io::Result<()>;
/// ```
///
/// # Errors
///
/// It is considered an error if not all bytes could be written due to
/// I/O errors or EOF being reached.
///
/// # Examples
///
/// ```no_run
/// use tokio::io::{self, BufWriter, AsyncWriteExt};
/// use tokio::fs::File;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let f = File::create("foo.txt").await?;
/// let mut buffer = BufWriter::new(f);
///
/// buffer.write_all(b"some bytes").await?;
/// buffer.flush().await?;
/// Ok(())
/// }
/// ```
fn flush(&mut self) -> Flush<'_, Self>
where
Self: Unpin,
{
flush(self)
}
/// Shuts down the output stream, ensuring that the value can be dropped
/// cleanly.
///
/// Equivalent to:
///
/// ```ignore
/// async fn shutdown(&mut self) -> io::Result<()>;
/// ```
///
/// Similar to [`flush`], all intermediately buffered is written to the
/// underlying stream. Once the operation completes, the caller should
/// no longer attempt to write to the stream. For example, the
/// `TcpStream` implementation will issue a `shutdown(Write)` sys call.
///
/// [`flush`]: fn@crate::io::AsyncWriteExt::flush
///
/// # Examples
///
/// ```no_run
/// use tokio::io::{self, BufWriter, AsyncWriteExt};
/// use tokio::fs::File;
///
/// #[tokio::main]
/// async fn main() -> io::Result<()> {
/// let f = File::create("foo.txt").await?;
/// let mut buffer = BufWriter::new(f);
///
/// buffer.write_all(b"some bytes").await?;
/// buffer.shutdown().await?;
/// Ok(())
/// }
/// ```
fn shutdown(&mut self) -> Shutdown<'_, Self>
where
Self: Unpin,
{
shutdown(self)
}
}
}
impl<W: AsyncWrite + ?Sized> AsyncWriteExt for W {}
| 35.449212 | 108 | 0.428032 |
ff32051bcdcafeb71ac4b5160ad900cf310c3080 | 4,394 | #![warn(clippy::pedantic, clippy::nursery)]
use std::env;
use std::fs::File;
use std::io::BufReader;
use fancy_regex::Regex;
use once_cell::sync::OnceCell;
use serde::{self, Deserialize};
use serenity::{
async_trait,
model::{
channel::Message,
gateway::Ready,
id::{ChannelId, GuildId, RoleId},
},
prelude::*,
utils::{Colour, MessageBuilder},
};
mod parse_channel_id;
mod parse_guild_id;
mod parse_regexp;
mod parse_role_id;
#[derive(Debug, Deserialize)]
struct Config {
token: String,
rules: Vec<Filter>,
guilds: Vec<GuildConfig>,
}
#[derive(Debug, Deserialize)]
struct GuildConfig {
#[serde(with = "parse_channel_id")]
report_channel: ChannelId,
#[serde(with = "parse_guild_id")]
guild: GuildId,
#[serde(with = "parse_role_id")]
role: RoleId,
}
#[derive(Debug, Deserialize)]
struct Filter {
#[serde(with = "parse_regexp")]
pattern: Regex,
note: String,
}
static CONFIG: OnceCell<Config> = OnceCell::new();
struct Handler;
#[async_trait]
impl EventHandler for Handler {
async fn message(&self, ctx: Context, msg: Message) {
if msg.author.bot {
return;
}
let config = CONFIG.get().unwrap();
let guild_config =
match (&msg.guild_id).and_then(|id| config.guilds.iter().find(|v| v.guild == id)) {
Some(c) => c,
None => return,
};
let notes: Vec<&str> = (&config.rules)
.iter()
.filter(|s| s.pattern.is_match(&msg.content).unwrap_or(false))
.map(|s| s.note.as_str())
.collect();
if notes.is_empty() {
return;
}
let notes = notes
.iter()
.map(|s| format!("- {}", s))
.collect::<Vec<String>>()
.join("\n");
// NOTE:
// あまりに長いSPAMを送られるとそれ自身をメッセージに含むのでレポートできない可能性がある
#[allow(clippy::unreadable_literal)]
let msg_s = (&guild_config.report_channel)
.send_message(&ctx.http, |m| {
m.embed(|e| {
e.title(":x: violation detected")
.colour(Colour::from_rgb(238, 0, 0))
.field(
"user",
MessageBuilder::new().mention(&msg.author.id).build(),
true,
)
.field(
"in",
MessageBuilder::new().mention(&msg.channel_id).build(),
true,
)
.field(
"violation(s)",
MessageBuilder::new().push_codeblock_safe(¬es, None),
false,
)
.field(
"original message",
MessageBuilder::new().push_codeblock_safe(&msg.content, None),
false,
)
})
})
.await;
if let Err(why) = msg_s {
println!("Error sending message: {:?}", why);
}
if let Err(why) = msg.delete(&ctx.http).await {
println!("Error deleting message: {:?}", why);
};
let mut member = guild_config
.guild
.member(&ctx.http, &msg.author.id)
.await
.unwrap();
if let Err(why) = member.add_role(&ctx.http, &guild_config.role).await {
println!("Error adding a role: {:?}", why);
};
}
async fn ready(&self, _: Context, ready: Ready) {
println!("{} is connected!", ready.user.name);
}
}
#[tokio::main]
async fn main() {
CONFIG
.set(
serde_yaml::from_reader(BufReader::new(
File::open(env::var("CONFIG").expect("Failed to lookup CONFIG environment"))
.expect("Failed to open CONFIG"),
))
.expect("Failed to parse CONFIG"),
)
.unwrap();
let mut client = Client::builder(&CONFIG.get().unwrap().token)
.event_handler(Handler)
.await
.expect("Failed to create client");
if let Err(why) = client.start().await {
println!("Client error: {:?}", why);
}
}
| 27.291925 | 95 | 0.481566 |
76da000ae7a31231ea5cb104d8f10809b4db7a3b | 1,086 | /*
已知一个长度为 n 的数组,预先按照升序排列,经由 1 到 n 次 旋转 后,得到输入数组。例如,原数组 nums = [0,1,4,4,5,6,7] 在变化后可能得到:
若旋转 4 次,则可以得到 [4,5,6,7,0,1,4]
若旋转 7 次,则可以得到 [0,1,4,4,5,6,7]
注意,数组 [a[0], a[1], a[2], ..., a[n-1]] 旋转一次 的结果为数组 [a[n-1], a[0], a[1], a[2], ..., a[n-2]] 。
给你一个可能存在 重复 元素值的数组 nums ,它原来是一个升序排列的数组,并按上述情形进行了多次旋转。请你找出并返回数组中的 最小元素 。
示例 1:
输入:nums = [1,3,5]
输出:1
示例 2:
输入:nums = [2,2,2,0,1]
输出:0
提示:
n == nums.length
1 <= n <= 5000
-5000 <= nums[i] <= 5000
nums 原来是一个升序排序的数组,并进行了 1 至 n 次旋转
进阶:
这道题是寻找旋转排序数组中的最小值的延伸题目。
允许重复会影响算法的时间复杂度吗?会如何影响,为什么?
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/find-minimum-in-rotated-sorted-array-ii
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
*/
impl Solution {
pub fn find_min(nums: Vec<i32>) -> i32 {
let (mut left,mut right)=(0,nums.len()-1);
while left < right {
let mid=left+((right-left)>>1);
if nums[mid]<nums[right] {
right=mid
}else if nums[mid]>nums[right]{
left=mid+1
}else {
right-=1;
}
}
nums[left]
}
} | 20.111111 | 91 | 0.555249 |
90ac67e8389b4eac672c046012c95c313b7c24a7 | 1,918 | // Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
//! SHA-256, a cryptographic hash algorithm.
/// A 256-bit message digest.
///
/// See [`Hasher`](trait.Hasher.html).
pub type Digest = [u8; 32];
/// Convenience type alias for simplifying signatures involving a `Builder`.
pub type Error<H> = <H as Hasher>::Error;
/// A builder for creating new [`Hasher`]s.
///
/// A value of a type implementing this trait already contains everything it
/// needs (such as OS handles) to start creating hashers.
///
/// [`Hasher`]: trait.Hasher.html
pub trait Builder {
/// The concrete [`Hasher`] generated by this trait.
///
/// [`Hasher`]: trait.Hasher.html
type Hasher: Hasher;
/// Begins a new hashing operation, returning a new [`Hasher`] to manage
/// the computation.
///
/// [`Hasher`]: trait.Hasher.html
fn new_hasher(&self) -> Result<Self::Hasher, Error<Self::Hasher>>;
/// Convenience function for hashing a contiguous buffer without having
/// to deal with a hasher directly.
fn hash_contiguous<'a>(
&self,
bytes: &[u8],
out: &mut Digest,
) -> Result<(), Error<Self::Hasher>> {
let mut hasher = self.new_hasher()?;
hasher.write(bytes)?;
hasher.finish(out)
}
}
/// A particular hashing operation in progress.
///
/// Compare Rust's [`Hasher`] trait.
///
/// [`Hasher`]: https://doc.rust-lang.org/std/hash/trait.Hasher.html
pub trait Hasher {
/// The error returned when a hashing operation fails.
type Error;
/// Feeds more data into the current hashing operation.
fn write(&mut self, bytes: &[u8]) -> Result<(), Self::Error>;
/// Finishes the current hashing operation, writing the result to the given
/// buffer.
fn finish(self, out: &mut Digest) -> Result<(), Self::Error>;
}
| 30.935484 | 79 | 0.642857 |
f5a9aded8455e5a40a5bd8ec9ac60fa88e521324 | 119,912 | use crate::ast::{self, BlockCheckMode, PatKind, RangeEnd, RangeSyntax};
use crate::ast::{SelfKind, GenericBound, TraitBoundModifier};
use crate::ast::{Attribute, MacDelimiter, GenericArg};
use crate::util::parser::{self, AssocOp, Fixity};
use crate::attr;
use crate::source_map::{self, SourceMap, Spanned};
use crate::parse::token::{self, BinOpToken, Nonterminal, Token};
use crate::parse::lexer::comments;
use crate::parse::{self, ParseSess};
use crate::print::pp::{self, Breaks};
use crate::print::pp::Breaks::{Consistent, Inconsistent};
use crate::ptr::P;
use crate::std_inject;
use crate::symbol::keywords;
use crate::tokenstream::{self, TokenStream, TokenTree};
use rustc_target::spec::abi::{self, Abi};
use syntax_pos::{self, BytePos};
use syntax_pos::{DUMMY_SP, FileName};
use std::ascii;
use std::borrow::Cow;
use std::io::{self, Write, Read};
use std::iter::Peekable;
use std::vec;
pub enum AnnNode<'a> {
Ident(&'a ast::Ident),
Name(&'a ast::Name),
Block(&'a ast::Block),
Item(&'a ast::Item),
SubItem(ast::NodeId),
Expr(&'a ast::Expr),
Pat(&'a ast::Pat),
}
pub trait PpAnn {
fn pre(&self, _state: &mut State<'_>, _node: AnnNode<'_>) -> io::Result<()> { Ok(()) }
fn post(&self, _state: &mut State<'_>, _node: AnnNode<'_>) -> io::Result<()> { Ok(()) }
}
#[derive(Copy, Clone)]
pub struct NoAnn;
impl PpAnn for NoAnn {}
pub struct State<'a> {
pub s: pp::Printer<'a>,
cm: Option<&'a SourceMap>,
comments: Option<Vec<comments::Comment> >,
literals: Peekable<vec::IntoIter<comments::Literal>>,
cur_cmnt: usize,
boxes: Vec<pp::Breaks>,
ann: &'a (dyn PpAnn+'a),
is_expanded: bool
}
fn rust_printer<'a>(writer: Box<dyn Write+'a>, ann: &'a dyn PpAnn) -> State<'a> {
State {
s: pp::mk_printer(writer, DEFAULT_COLUMNS),
cm: None,
comments: None,
literals: vec![].into_iter().peekable(),
cur_cmnt: 0,
boxes: Vec::new(),
ann,
is_expanded: false
}
}
pub const INDENT_UNIT: usize = 4;
pub const DEFAULT_COLUMNS: usize = 78;
/// Requires you to pass an input filename and reader so that
/// it can scan the input text for comments and literals to
/// copy forward.
pub fn print_crate<'a>(cm: &'a SourceMap,
sess: &ParseSess,
krate: &ast::Crate,
filename: FileName,
input: &mut dyn Read,
out: Box<dyn Write+'a>,
ann: &'a dyn PpAnn,
is_expanded: bool) -> io::Result<()> {
let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded);
if is_expanded && std_inject::injected_crate_name().is_some() {
// We need to print `#![no_std]` (and its feature gate) so that
// compiling pretty-printed source won't inject libstd again.
// However we don't want these attributes in the AST because
// of the feature gate, so we fake them up here.
// #![feature(prelude_import)]
let pi_nested = attr::mk_nested_word_item(ast::Ident::from_str("prelude_import"));
let list = attr::mk_list_item(DUMMY_SP, ast::Ident::from_str("feature"), vec![pi_nested]);
let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), list);
s.print_attribute(&fake_attr)?;
// #![no_std]
let no_std_meta = attr::mk_word_item(ast::Ident::from_str("no_std"));
let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), no_std_meta);
s.print_attribute(&fake_attr)?;
}
s.print_mod(&krate.module, &krate.attrs)?;
s.print_remaining_comments()?;
s.s.eof()
}
impl<'a> State<'a> {
pub fn new_from_input(cm: &'a SourceMap,
sess: &ParseSess,
filename: FileName,
input: &mut dyn Read,
out: Box<dyn Write+'a>,
ann: &'a dyn PpAnn,
is_expanded: bool) -> State<'a> {
let (cmnts, lits) = comments::gather_comments_and_literals(sess, filename, input);
State::new(
cm,
out,
ann,
Some(cmnts),
// If the code is post expansion, don't use the table of
// literals, since it doesn't correspond with the literals
// in the AST anymore.
if is_expanded { None } else { Some(lits) },
is_expanded
)
}
pub fn new(cm: &'a SourceMap,
out: Box<dyn Write+'a>,
ann: &'a dyn PpAnn,
comments: Option<Vec<comments::Comment>>,
literals: Option<Vec<comments::Literal>>,
is_expanded: bool) -> State<'a> {
State {
s: pp::mk_printer(out, DEFAULT_COLUMNS),
cm: Some(cm),
comments,
literals: literals.unwrap_or_default().into_iter().peekable(),
cur_cmnt: 0,
boxes: Vec::new(),
ann,
is_expanded: is_expanded
}
}
}
pub fn to_string<F>(f: F) -> String where
F: FnOnce(&mut State<'_>) -> io::Result<()>,
{
let mut wr = Vec::new();
{
let ann = NoAnn;
let mut printer = rust_printer(Box::new(&mut wr), &ann);
f(&mut printer).unwrap();
printer.s.eof().unwrap();
}
String::from_utf8(wr).unwrap()
}
fn binop_to_string(op: BinOpToken) -> &'static str {
match op {
token::Plus => "+",
token::Minus => "-",
token::Star => "*",
token::Slash => "/",
token::Percent => "%",
token::Caret => "^",
token::And => "&",
token::Or => "|",
token::Shl => "<<",
token::Shr => ">>",
}
}
pub fn token_to_string(tok: &Token) -> String {
match *tok {
token::Eq => "=".to_string(),
token::Lt => "<".to_string(),
token::Le => "<=".to_string(),
token::EqEq => "==".to_string(),
token::Ne => "!=".to_string(),
token::Ge => ">=".to_string(),
token::Gt => ">".to_string(),
token::Not => "!".to_string(),
token::Tilde => "~".to_string(),
token::OrOr => "||".to_string(),
token::AndAnd => "&&".to_string(),
token::BinOp(op) => binop_to_string(op).to_string(),
token::BinOpEq(op) => format!("{}=", binop_to_string(op)),
/* Structural symbols */
token::At => "@".to_string(),
token::Dot => ".".to_string(),
token::DotDot => "..".to_string(),
token::DotDotDot => "...".to_string(),
token::DotDotEq => "..=".to_string(),
token::Comma => ",".to_string(),
token::Semi => ";".to_string(),
token::Colon => ":".to_string(),
token::ModSep => "::".to_string(),
token::RArrow => "->".to_string(),
token::LArrow => "<-".to_string(),
token::FatArrow => "=>".to_string(),
token::OpenDelim(token::Paren) => "(".to_string(),
token::CloseDelim(token::Paren) => ")".to_string(),
token::OpenDelim(token::Bracket) => "[".to_string(),
token::CloseDelim(token::Bracket) => "]".to_string(),
token::OpenDelim(token::Brace) => "{".to_string(),
token::CloseDelim(token::Brace) => "}".to_string(),
token::OpenDelim(token::NoDelim) |
token::CloseDelim(token::NoDelim) => " ".to_string(),
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
token::Question => "?".to_string(),
token::SingleQuote => "'".to_string(),
/* Literals */
token::Literal(lit, suf) => {
let mut out = match lit {
token::Byte(b) => format!("b'{}'", b),
token::Char(c) => format!("'{}'", c),
token::Err(c) => format!("'{}'", c),
token::Float(c) |
token::Integer(c) => c.to_string(),
token::Str_(s) => format!("\"{}\"", s),
token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n as usize),
string=s),
token::ByteStr(v) => format!("b\"{}\"", v),
token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
delim="#".repeat(n as usize),
string=s),
};
if let Some(s) = suf {
out.push_str(&s.as_str())
}
out
}
/* Name components */
token::Ident(s, false) => s.to_string(),
token::Ident(s, true) => format!("r#{}", s),
token::Lifetime(s) => s.to_string(),
/* Other */
token::DocComment(s) => s.to_string(),
token::Eof => "<eof>".to_string(),
token::Whitespace => " ".to_string(),
token::Comment => "/* */".to_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => nonterminal_to_string(nt),
}
}
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
match *nt {
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => meta_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),
token::NtPath(ref e) => path_to_string(e),
token::NtItem(ref e) => item_to_string(e),
token::NtBlock(ref e) => block_to_string(e),
token::NtStmt(ref e) => stmt_to_string(e),
token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(e, false) => ident_to_string(e),
token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)),
token::NtLifetime(e) => ident_to_string(e),
token::NtLiteral(ref e) => expr_to_string(e),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
token::NtArm(ref e) => arm_to_string(e),
token::NtImplItem(ref e) => impl_item_to_string(e),
token::NtTraitItem(ref e) => trait_item_to_string(e),
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
token::NtWhereClause(ref e) => where_clause_to_string(e),
token::NtArg(ref e) => arg_to_string(e),
token::NtVis(ref e) => vis_to_string(e),
token::NtForeignItem(ref e) => foreign_item_to_string(e),
}
}
pub fn ty_to_string(ty: &ast::Ty) -> String {
to_string(|s| s.print_type(ty))
}
pub fn bounds_to_string(bounds: &[ast::GenericBound]) -> String {
to_string(|s| s.print_type_bounds("", bounds))
}
pub fn pat_to_string(pat: &ast::Pat) -> String {
to_string(|s| s.print_pat(pat))
}
pub fn arm_to_string(arm: &ast::Arm) -> String {
to_string(|s| s.print_arm(arm))
}
pub fn expr_to_string(e: &ast::Expr) -> String {
to_string(|s| s.print_expr(e))
}
pub fn lifetime_to_string(lt: &ast::Lifetime) -> String {
to_string(|s| s.print_lifetime(*lt))
}
pub fn tt_to_string(tt: tokenstream::TokenTree) -> String {
to_string(|s| s.print_tt(tt))
}
pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
to_string(|s| s.print_tts(tts.iter().cloned().collect()))
}
pub fn tokens_to_string(tokens: TokenStream) -> String {
to_string(|s| s.print_tts(tokens))
}
pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
to_string(|s| s.print_stmt(stmt))
}
pub fn attr_to_string(attr: &ast::Attribute) -> String {
to_string(|s| s.print_attribute(attr))
}
pub fn item_to_string(i: &ast::Item) -> String {
to_string(|s| s.print_item(i))
}
pub fn impl_item_to_string(i: &ast::ImplItem) -> String {
to_string(|s| s.print_impl_item(i))
}
pub fn trait_item_to_string(i: &ast::TraitItem) -> String {
to_string(|s| s.print_trait_item(i))
}
pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
to_string(|s| s.print_generic_params(generic_params))
}
pub fn where_clause_to_string(i: &ast::WhereClause) -> String {
to_string(|s| s.print_where_clause(i))
}
pub fn fn_block_to_string(p: &ast::FnDecl) -> String {
to_string(|s| s.print_fn_block_args(p))
}
pub fn path_to_string(p: &ast::Path) -> String {
to_string(|s| s.print_path(p, false, 0))
}
pub fn path_segment_to_string(p: &ast::PathSegment) -> String {
to_string(|s| s.print_path_segment(p, false))
}
pub fn ident_to_string(id: ast::Ident) -> String {
to_string(|s| s.print_ident(id))
}
pub fn vis_to_string(v: &ast::Visibility) -> String {
to_string(|s| s.print_visibility(v))
}
pub fn fun_to_string(decl: &ast::FnDecl,
header: ast::FnHeader,
name: ast::Ident,
generics: &ast::Generics)
-> String {
to_string(|s| {
s.head("")?;
s.print_fn(decl, header, Some(name),
generics, &source_map::dummy_spanned(ast::VisibilityKind::Inherited))?;
s.end()?; // Close the head box
s.end() // Close the outer box
})
}
pub fn block_to_string(blk: &ast::Block) -> String {
to_string(|s| {
// containing cbox, will be closed by print-block at }
s.cbox(INDENT_UNIT)?;
// head-ibox, will be closed by print-block after {
s.ibox(0)?;
s.print_block(blk)
})
}
pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
to_string(|s| s.print_meta_list_item(li))
}
pub fn meta_item_to_string(mi: &ast::MetaItem) -> String {
to_string(|s| s.print_meta_item(mi))
}
pub fn attribute_to_string(attr: &ast::Attribute) -> String {
to_string(|s| s.print_attribute(attr))
}
pub fn lit_to_string(l: &ast::Lit) -> String {
to_string(|s| s.print_literal(l))
}
pub fn variant_to_string(var: &ast::Variant) -> String {
to_string(|s| s.print_variant(var))
}
pub fn arg_to_string(arg: &ast::Arg) -> String {
to_string(|s| s.print_arg(arg, false))
}
pub fn mac_to_string(arg: &ast::Mac) -> String {
to_string(|s| s.print_mac(arg))
}
pub fn foreign_item_to_string(arg: &ast::ForeignItem) -> String {
to_string(|s| s.print_foreign_item(arg))
}
pub fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String {
format!("{}{}", to_string(|s| s.print_visibility(vis)), s)
}
pub trait PrintState<'a> {
fn writer(&mut self) -> &mut pp::Printer<'a>;
fn boxes(&mut self) -> &mut Vec<pp::Breaks>;
fn comments(&mut self) -> &mut Option<Vec<comments::Comment>>;
fn cur_cmnt(&mut self) -> &mut usize;
fn cur_lit(&mut self) -> Option<&comments::Literal>;
fn bump_lit(&mut self) -> Option<comments::Literal>;
fn word_space<S: Into<Cow<'static, str>>>(&mut self, w: S) -> io::Result<()> {
self.writer().word(w)?;
self.writer().space()
}
fn popen(&mut self) -> io::Result<()> { self.writer().word("(") }
fn pclose(&mut self) -> io::Result<()> { self.writer().word(")") }
fn is_begin(&mut self) -> bool {
match self.writer().last_token() {
pp::Token::Begin(_) => true,
_ => false,
}
}
fn is_end(&mut self) -> bool {
match self.writer().last_token() {
pp::Token::End => true,
_ => false,
}
}
// is this the beginning of a line?
fn is_bol(&mut self) -> bool {
self.writer().last_token().is_eof() || self.writer().last_token().is_hardbreak_tok()
}
fn hardbreak_if_not_bol(&mut self) -> io::Result<()> {
if !self.is_bol() {
self.writer().hardbreak()?
}
Ok(())
}
// "raw box"
fn rbox(&mut self, u: usize, b: pp::Breaks) -> io::Result<()> {
self.boxes().push(b);
self.writer().rbox(u, b)
}
fn ibox(&mut self, u: usize) -> io::Result<()> {
self.boxes().push(pp::Breaks::Inconsistent);
self.writer().ibox(u)
}
fn end(&mut self) -> io::Result<()> {
self.boxes().pop().unwrap();
self.writer().end()
}
fn commasep<T, F>(&mut self, b: Breaks, elts: &[T], mut op: F) -> io::Result<()>
where F: FnMut(&mut Self, &T) -> io::Result<()>,
{
self.rbox(0, b)?;
let mut first = true;
for elt in elts {
if first { first = false; } else { self.word_space(",")?; }
op(self, elt)?;
}
self.end()
}
fn next_lit(&mut self, pos: BytePos) -> Option<comments::Literal> {
while let Some(ltrl) = self.cur_lit().cloned() {
if ltrl.pos > pos { break; }
// we don't need the value here since we're forced to clone cur_lit
// due to lack of NLL.
self.bump_lit();
if ltrl.pos == pos {
return Some(ltrl);
}
}
None
}
fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> {
while let Some(ref cmnt) = self.next_comment() {
if cmnt.pos < pos {
self.print_comment(cmnt)?;
} else {
break
}
}
Ok(())
}
fn print_comment(&mut self,
cmnt: &comments::Comment) -> io::Result<()> {
let r = match cmnt.style {
comments::Mixed => {
assert_eq!(cmnt.lines.len(), 1);
self.writer().zerobreak()?;
self.writer().word(cmnt.lines[0].clone())?;
self.writer().zerobreak()
}
comments::Isolated => {
self.hardbreak_if_not_bol()?;
for line in &cmnt.lines {
// Don't print empty lines because they will end up as trailing
// whitespace
if !line.is_empty() {
self.writer().word(line.clone())?;
}
self.writer().hardbreak()?;
}
Ok(())
}
comments::Trailing => {
if !self.is_bol() {
self.writer().word(" ")?;
}
if cmnt.lines.len() == 1 {
self.writer().word(cmnt.lines[0].clone())?;
self.writer().hardbreak()
} else {
self.ibox(0)?;
for line in &cmnt.lines {
if !line.is_empty() {
self.writer().word(line.clone())?;
}
self.writer().hardbreak()?;
}
self.end()
}
}
comments::BlankLine => {
// We need to do at least one, possibly two hardbreaks.
let is_semi = match self.writer().last_token() {
pp::Token::String(s, _) => ";" == s,
_ => false
};
if is_semi || self.is_begin() || self.is_end() {
self.writer().hardbreak()?;
}
self.writer().hardbreak()
}
};
match r {
Ok(()) => {
*self.cur_cmnt() = *self.cur_cmnt() + 1;
Ok(())
}
Err(e) => Err(e),
}
}
fn next_comment(&mut self) -> Option<comments::Comment> {
let cur_cmnt = *self.cur_cmnt();
match *self.comments() {
Some(ref cmnts) => {
if cur_cmnt < cmnts.len() {
Some(cmnts[cur_cmnt].clone())
} else {
None
}
}
_ => None
}
}
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo())?;
if let Some(ltrl) = self.next_lit(lit.span.lo()) {
return self.writer().word(ltrl.lit.clone());
}
match lit.node {
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
ast::LitKind::Err(st) => {
let st = st.as_str().escape_debug().to_string();
let mut res = String::with_capacity(st.len() + 2);
res.push('\'');
res.push_str(&st);
res.push('\'');
self.writer().word(res)
}
ast::LitKind::Byte(byte) => {
let mut res = String::from("b'");
res.extend(ascii::escape_default(byte).map(|c| c as char));
res.push('\'');
self.writer().word(res)
}
ast::LitKind::Char(ch) => {
let mut res = String::from("'");
res.extend(ch.escape_default());
res.push('\'');
self.writer().word(res)
}
ast::LitKind::Int(i, t) => {
match t {
ast::LitIntType::Signed(st) => {
self.writer().word(st.val_to_string(i as i128))
}
ast::LitIntType::Unsigned(ut) => {
self.writer().word(ut.val_to_string(i))
}
ast::LitIntType::Unsuffixed => {
self.writer().word(i.to_string())
}
}
}
ast::LitKind::Float(ref f, t) => {
self.writer().word(format!("{}{}", &f, t.ty_to_string()))
}
ast::LitKind::FloatUnsuffixed(ref f) => self.writer().word(f.as_str().get()),
ast::LitKind::Bool(val) => {
if val { self.writer().word("true") } else { self.writer().word("false") }
}
ast::LitKind::ByteStr(ref v) => {
let mut escaped: String = String::new();
for &ch in v.iter() {
escaped.extend(ascii::escape_default(ch)
.map(|c| c as char));
}
self.writer().word(format!("b\"{}\"", escaped))
}
}
}
fn print_string(&mut self, st: &str,
style: ast::StrStyle) -> io::Result<()> {
let st = match style {
ast::StrStyle::Cooked => {
(format!("\"{}\"", st.escape_debug()))
}
ast::StrStyle::Raw(n) => {
(format!("r{delim}\"{string}\"{delim}",
delim="#".repeat(n as usize),
string=st))
}
};
self.writer().word(st)
}
fn print_inner_attributes(&mut self,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true)
}
fn print_inner_attributes_no_trailing_hardbreak(&mut self,
attrs: &[ast::Attribute])
-> io::Result<()> {
self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, false)
}
fn print_outer_attributes(&mut self,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true)
}
fn print_inner_attributes_inline(&mut self,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_either_attributes(attrs, ast::AttrStyle::Inner, true, true)
}
fn print_outer_attributes_inline(&mut self,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_either_attributes(attrs, ast::AttrStyle::Outer, true, true)
}
fn print_either_attributes(&mut self,
attrs: &[ast::Attribute],
kind: ast::AttrStyle,
is_inline: bool,
trailing_hardbreak: bool) -> io::Result<()> {
let mut count = 0;
for attr in attrs {
if attr.style == kind {
self.print_attribute_inline(attr, is_inline)?;
if is_inline {
self.nbsp()?;
}
count += 1;
}
}
if count > 0 && trailing_hardbreak && !is_inline {
self.hardbreak_if_not_bol()?;
}
Ok(())
}
fn print_attribute_path(&mut self, path: &ast::Path) -> io::Result<()> {
for (i, segment) in path.segments.iter().enumerate() {
if i > 0 {
self.writer().word("::")?
}
if segment.ident.name != keywords::PathRoot.name() {
if segment.ident.name == keywords::DollarCrate.name() {
self.print_dollar_crate(segment.ident)?;
} else {
self.writer().word(segment.ident.as_str().get())?;
}
}
}
Ok(())
}
fn print_attribute(&mut self, attr: &ast::Attribute) -> io::Result<()> {
self.print_attribute_inline(attr, false)
}
fn print_attribute_inline(&mut self, attr: &ast::Attribute,
is_inline: bool) -> io::Result<()> {
if !is_inline {
self.hardbreak_if_not_bol()?;
}
self.maybe_print_comment(attr.span.lo())?;
if attr.is_sugared_doc {
self.writer().word(attr.value_str().unwrap().as_str().get())?;
self.writer().hardbreak()
} else {
match attr.style {
ast::AttrStyle::Inner => self.writer().word("#![")?,
ast::AttrStyle::Outer => self.writer().word("#[")?,
}
if let Some(mi) = attr.meta() {
self.print_meta_item(&mi)?
} else {
self.print_attribute_path(&attr.path)?;
self.writer().space()?;
self.print_tts(attr.tokens.clone())?;
}
self.writer().word("]")
}
}
fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) -> io::Result<()> {
match item {
ast::NestedMetaItem::MetaItem(ref mi) => {
self.print_meta_item(mi)
},
ast::NestedMetaItem::Literal(ref lit) => {
self.print_literal(lit)
}
}
}
fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> {
self.ibox(INDENT_UNIT)?;
match item.node {
ast::MetaItemKind::Word => self.print_attribute_path(&item.path)?,
ast::MetaItemKind::NameValue(ref value) => {
self.print_attribute_path(&item.path)?;
self.writer().space()?;
self.word_space("=")?;
self.print_literal(value)?;
}
ast::MetaItemKind::List(ref items) => {
self.print_attribute_path(&item.path)?;
self.popen()?;
self.commasep(Consistent,
&items[..],
|s, i| s.print_meta_list_item(i))?;
self.pclose()?;
}
}
self.end()
}
/// This doesn't deserve to be called "pretty" printing, but it should be
/// meaning-preserving. A quick hack that might help would be to look at the
/// spans embedded in the TTs to decide where to put spaces and newlines.
/// But it'd be better to parse these according to the grammar of the
/// appropriate macro, transcribe back into the grammar we just parsed from,
/// and then pretty-print the resulting AST nodes (so, e.g., we print
/// expression arguments as expressions). It can be done! I think.
fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
match tt {
TokenTree::Token(_, ref tk) => {
self.writer().word(token_to_string(tk))?;
match *tk {
parse::token::DocComment(..) => {
self.writer().hardbreak()
}
_ => Ok(())
}
}
TokenTree::Delimited(_, delim, tts) => {
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
self.writer().space()?;
self.print_tts(tts)?;
self.writer().space()?;
self.writer().word(token_to_string(&token::CloseDelim(delim)))
},
}
}
fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> {
self.ibox(0)?;
for (i, tt) in tts.into_trees().enumerate() {
if i != 0 {
self.writer().space()?;
}
self.print_tt(tt)?;
}
self.end()
}
fn space_if_not_bol(&mut self) -> io::Result<()> {
if !self.is_bol() { self.writer().space()?; }
Ok(())
}
fn nbsp(&mut self) -> io::Result<()> { self.writer().word(" ") }
// AST pretty-printer is used as a fallback for turning AST structures into token streams for
// proc macros. Additionally, proc macros may stringify their input and expect it survive the
// stringification (especially true for proc macro derives written between Rust 1.15 and 1.30).
// So we need to somehow pretty-print `$crate` in paths in a way preserving at least some of
// its hygiene data, most importantly name of the crate it refers to.
// As a result we print `$crate` as `crate` if it refers to the local crate
// and as `::other_crate_name` if it refers to some other crate.
fn print_dollar_crate(&mut self, ident: ast::Ident) -> io::Result<()> {
let name = ident.span.ctxt().dollar_crate_name();
if !ast::Ident::with_empty_ctxt(name).is_path_segment_keyword() {
self.writer().word("::")?;
}
self.writer().word(name.as_str().get())
}
}
impl<'a> PrintState<'a> for State<'a> {
fn writer(&mut self) -> &mut pp::Printer<'a> {
&mut self.s
}
fn boxes(&mut self) -> &mut Vec<pp::Breaks> {
&mut self.boxes
}
fn comments(&mut self) -> &mut Option<Vec<comments::Comment>> {
&mut self.comments
}
fn cur_cmnt(&mut self) -> &mut usize {
&mut self.cur_cmnt
}
fn cur_lit(&mut self) -> Option<&comments::Literal> {
self.literals.peek()
}
fn bump_lit(&mut self) -> Option<comments::Literal> {
self.literals.next()
}
}
impl<'a> State<'a> {
pub fn cbox(&mut self, u: usize) -> io::Result<()> {
self.boxes.push(pp::Breaks::Consistent);
self.s.cbox(u)
}
pub fn word_nbsp<S: Into<Cow<'static, str>>>(&mut self, w: S) -> io::Result<()> {
self.s.word(w)?;
self.nbsp()
}
pub fn head<S: Into<Cow<'static, str>>>(&mut self, w: S) -> io::Result<()> {
let w = w.into();
// outer-box is consistent
self.cbox(INDENT_UNIT)?;
// head-box is inconsistent
self.ibox(w.len() + 1)?;
// keyword that starts the head
if !w.is_empty() {
self.word_nbsp(w)?;
}
Ok(())
}
pub fn bopen(&mut self) -> io::Result<()> {
self.s.word("{")?;
self.end() // close the head-box
}
pub fn bclose_(&mut self, span: syntax_pos::Span,
indented: usize) -> io::Result<()> {
self.bclose_maybe_open(span, indented, true)
}
pub fn bclose_maybe_open(&mut self, span: syntax_pos::Span,
indented: usize, close_box: bool) -> io::Result<()> {
self.maybe_print_comment(span.hi())?;
self.break_offset_if_not_bol(1, -(indented as isize))?;
self.s.word("}")?;
if close_box {
self.end()?; // close the outer-box
}
Ok(())
}
pub fn bclose(&mut self, span: syntax_pos::Span) -> io::Result<()> {
self.bclose_(span, INDENT_UNIT)
}
pub fn in_cbox(&self) -> bool {
match self.boxes.last() {
Some(&last_box) => last_box == pp::Breaks::Consistent,
None => false
}
}
pub fn break_offset_if_not_bol(&mut self, n: usize,
off: isize) -> io::Result<()> {
if !self.is_bol() {
self.s.break_offset(n, off)
} else {
if off != 0 && self.s.last_token().is_hardbreak_tok() {
// We do something pretty sketchy here: tuck the nonzero
// offset-adjustment we were going to deposit along with the
// break into the previous hardbreak.
self.s.replace_last_token(pp::Printer::hardbreak_tok_offset(off));
}
Ok(())
}
}
// Synthesizes a comment that was not textually present in the original source
// file.
pub fn synth_comment(&mut self, text: String) -> io::Result<()> {
self.s.word("/*")?;
self.s.space()?;
self.s.word(text)?;
self.s.space()?;
self.s.word("*/")
}
pub fn commasep_cmnt<T, F, G>(&mut self,
b: Breaks,
elts: &[T],
mut op: F,
mut get_span: G) -> io::Result<()> where
F: FnMut(&mut State<'_>, &T) -> io::Result<()>,
G: FnMut(&T) -> syntax_pos::Span,
{
self.rbox(0, b)?;
let len = elts.len();
let mut i = 0;
for elt in elts {
self.maybe_print_comment(get_span(elt).hi())?;
op(self, elt)?;
i += 1;
if i < len {
self.s.word(",")?;
self.maybe_print_trailing_comment(get_span(elt),
Some(get_span(&elts[i]).hi()))?;
self.space_if_not_bol()?;
}
}
self.end()
}
pub fn commasep_exprs(&mut self, b: Breaks,
exprs: &[P<ast::Expr>]) -> io::Result<()> {
self.commasep_cmnt(b, exprs, |s, e| s.print_expr(e), |e| e.span)
}
pub fn print_mod(&mut self, _mod: &ast::Mod,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_inner_attributes(attrs)?;
for item in &_mod.items {
self.print_item(item)?;
}
Ok(())
}
pub fn print_foreign_mod(&mut self, nmod: &ast::ForeignMod,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_inner_attributes(attrs)?;
for item in &nmod.items {
self.print_foreign_item(item)?;
}
Ok(())
}
pub fn print_opt_lifetime(&mut self, lifetime: &Option<ast::Lifetime>) -> io::Result<()> {
if let Some(lt) = *lifetime {
self.print_lifetime(lt)?;
self.nbsp()?;
}
Ok(())
}
pub fn print_generic_arg(&mut self, generic_arg: &GenericArg) -> io::Result<()> {
match generic_arg {
GenericArg::Lifetime(lt) => self.print_lifetime(*lt),
GenericArg::Type(ty) => self.print_type(ty),
GenericArg::Const(ct) => self.print_expr(&ct.value),
}
}
pub fn print_type(&mut self, ty: &ast::Ty) -> io::Result<()> {
self.maybe_print_comment(ty.span.lo())?;
self.ibox(0)?;
match ty.node {
ast::TyKind::Slice(ref ty) => {
self.s.word("[")?;
self.print_type(ty)?;
self.s.word("]")?;
}
ast::TyKind::Ptr(ref mt) => {
self.s.word("*")?;
match mt.mutbl {
ast::Mutability::Mutable => self.word_nbsp("mut")?,
ast::Mutability::Immutable => self.word_nbsp("const")?,
}
self.print_type(&mt.ty)?;
}
ast::TyKind::Rptr(ref lifetime, ref mt) => {
self.s.word("&")?;
self.print_opt_lifetime(lifetime)?;
self.print_mt(mt)?;
}
ast::TyKind::Never => {
self.s.word("!")?;
},
ast::TyKind::Tup(ref elts) => {
self.popen()?;
self.commasep(Inconsistent, &elts[..],
|s, ty| s.print_type(ty))?;
if elts.len() == 1 {
self.s.word(",")?;
}
self.pclose()?;
}
ast::TyKind::Paren(ref typ) => {
self.popen()?;
self.print_type(typ)?;
self.pclose()?;
}
ast::TyKind::BareFn(ref f) => {
self.print_ty_fn(f.abi,
f.unsafety,
&f.decl,
None,
&f.generic_params)?;
}
ast::TyKind::Path(None, ref path) => {
self.print_path(path, false, 0)?;
}
ast::TyKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, false)?
}
ast::TyKind::TraitObject(ref bounds, syntax) => {
let prefix = if syntax == ast::TraitObjectSyntax::Dyn { "dyn" } else { "" };
self.print_type_bounds(prefix, &bounds[..])?;
}
ast::TyKind::ImplTrait(_, ref bounds) => {
self.print_type_bounds("impl", &bounds[..])?;
}
ast::TyKind::Array(ref ty, ref length) => {
self.s.word("[")?;
self.print_type(ty)?;
self.s.word("; ")?;
self.print_expr(&length.value)?;
self.s.word("]")?;
}
ast::TyKind::Typeof(ref e) => {
self.s.word("typeof(")?;
self.print_expr(&e.value)?;
self.s.word(")")?;
}
ast::TyKind::Infer => {
self.s.word("_")?;
}
ast::TyKind::Err => {
self.popen()?;
self.s.word("/*ERROR*/")?;
self.pclose()?;
}
ast::TyKind::ImplicitSelf => {
self.s.word("Self")?;
}
ast::TyKind::Mac(ref m) => {
self.print_mac(m)?;
}
ast::TyKind::CVarArgs => {
self.s.word("...")?;
}
}
self.end()
}
pub fn print_foreign_item(&mut self,
item: &ast::ForeignItem) -> io::Result<()> {
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(item.span.lo())?;
self.print_outer_attributes(&item.attrs)?;
match item.node {
ast::ForeignItemKind::Fn(ref decl, ref generics) => {
self.head("")?;
self.print_fn(decl, ast::FnHeader::default(),
Some(item.ident),
generics, &item.vis)?;
self.end()?; // end head-ibox
self.s.word(";")?;
self.end() // end the outer fn box
}
ast::ForeignItemKind::Static(ref t, m) => {
self.head(visibility_qualified(&item.vis, "static"))?;
if m {
self.word_space("mut")?;
}
self.print_ident(item.ident)?;
self.word_space(":")?;
self.print_type(t)?;
self.s.word(";")?;
self.end()?; // end the head-ibox
self.end() // end the outer cbox
}
ast::ForeignItemKind::Ty => {
self.head(visibility_qualified(&item.vis, "type"))?;
self.print_ident(item.ident)?;
self.s.word(";")?;
self.end()?; // end the head-ibox
self.end() // end the outer cbox
}
ast::ForeignItemKind::Macro(ref m) => {
self.print_mac(m)?;
match m.node.delim {
MacDelimiter::Brace => Ok(()),
_ => self.s.word(";")
}
}
}
}
fn print_associated_const(&mut self,
ident: ast::Ident,
ty: &ast::Ty,
default: Option<&ast::Expr>,
vis: &ast::Visibility)
-> io::Result<()>
{
self.s.word(visibility_qualified(vis, ""))?;
self.word_space("const")?;
self.print_ident(ident)?;
self.word_space(":")?;
self.print_type(ty)?;
if let Some(expr) = default {
self.s.space()?;
self.word_space("=")?;
self.print_expr(expr)?;
}
self.s.word(";")
}
fn print_associated_type(&mut self,
ident: ast::Ident,
bounds: Option<&ast::GenericBounds>,
ty: Option<&ast::Ty>)
-> io::Result<()> {
self.word_space("type")?;
self.print_ident(ident)?;
if let Some(bounds) = bounds {
self.print_type_bounds(":", bounds)?;
}
if let Some(ty) = ty {
self.s.space()?;
self.word_space("=")?;
self.print_type(ty)?;
}
self.s.word(";")
}
/// Pretty-print an item
pub fn print_item(&mut self, item: &ast::Item) -> io::Result<()> {
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(item.span.lo())?;
self.print_outer_attributes(&item.attrs)?;
self.ann.pre(self, AnnNode::Item(item))?;
match item.node {
ast::ItemKind::ExternCrate(orig_name) => {
self.head(visibility_qualified(&item.vis, "extern crate"))?;
if let Some(orig_name) = orig_name {
self.print_name(orig_name)?;
self.s.space()?;
self.s.word("as")?;
self.s.space()?;
}
self.print_ident(item.ident)?;
self.s.word(";")?;
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
ast::ItemKind::Use(ref tree) => {
self.head(visibility_qualified(&item.vis, "use"))?;
self.print_use_tree(tree)?;
self.s.word(";")?;
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
ast::ItemKind::Static(ref ty, m, ref expr) => {
self.head(visibility_qualified(&item.vis, "static"))?;
if m == ast::Mutability::Mutable {
self.word_space("mut")?;
}
self.print_ident(item.ident)?;
self.word_space(":")?;
self.print_type(ty)?;
self.s.space()?;
self.end()?; // end the head-ibox
self.word_space("=")?;
self.print_expr(expr)?;
self.s.word(";")?;
self.end()?; // end the outer cbox
}
ast::ItemKind::Const(ref ty, ref expr) => {
self.head(visibility_qualified(&item.vis, "const"))?;
self.print_ident(item.ident)?;
self.word_space(":")?;
self.print_type(ty)?;
self.s.space()?;
self.end()?; // end the head-ibox
self.word_space("=")?;
self.print_expr(expr)?;
self.s.word(";")?;
self.end()?; // end the outer cbox
}
ast::ItemKind::Fn(ref decl, header, ref param_names, ref body) => {
self.head("")?;
self.print_fn(
decl,
header,
Some(item.ident),
param_names,
&item.vis
)?;
self.s.word(" ")?;
self.print_block_with_attrs(body, &item.attrs)?;
}
ast::ItemKind::Mod(ref _mod) => {
self.head(visibility_qualified(&item.vis, "mod"))?;
self.print_ident(item.ident)?;
if _mod.inline || self.is_expanded {
self.nbsp()?;
self.bopen()?;
self.print_mod(_mod, &item.attrs)?;
self.bclose(item.span)?;
} else {
self.s.word(";")?;
self.end()?; // end inner head-block
self.end()?; // end outer head-block
}
}
ast::ItemKind::ForeignMod(ref nmod) => {
self.head("extern")?;
self.word_nbsp(nmod.abi.to_string())?;
self.bopen()?;
self.print_foreign_mod(nmod, &item.attrs)?;
self.bclose(item.span)?;
}
ast::ItemKind::GlobalAsm(ref ga) => {
self.head(visibility_qualified(&item.vis, "global_asm!"))?;
self.s.word(ga.asm.as_str().get())?;
self.end()?;
}
ast::ItemKind::Ty(ref ty, ref generics) => {
self.head(visibility_qualified(&item.vis, "type"))?;
self.print_ident(item.ident)?;
self.print_generic_params(&generics.params)?;
self.end()?; // end the inner ibox
self.print_where_clause(&generics.where_clause)?;
self.s.space()?;
self.word_space("=")?;
self.print_type(ty)?;
self.s.word(";")?;
self.end()?; // end the outer ibox
}
ast::ItemKind::Existential(ref bounds, ref generics) => {
self.head(visibility_qualified(&item.vis, "existential type"))?;
self.print_ident(item.ident)?;
self.print_generic_params(&generics.params)?;
self.end()?; // end the inner ibox
self.print_where_clause(&generics.where_clause)?;
self.s.space()?;
self.print_type_bounds(":", bounds)?;
self.s.word(";")?;
self.end()?; // end the outer ibox
}
ast::ItemKind::Enum(ref enum_definition, ref params) => {
self.print_enum_def(
enum_definition,
params,
item.ident,
item.span,
&item.vis
)?;
}
ast::ItemKind::Struct(ref struct_def, ref generics) => {
self.head(visibility_qualified(&item.vis, "struct"))?;
self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::Union(ref struct_def, ref generics) => {
self.head(visibility_qualified(&item.vis, "union"))?;
self.print_struct(struct_def, generics, item.ident, item.span, true)?;
}
ast::ItemKind::Impl(unsafety,
polarity,
defaultness,
ref generics,
ref opt_trait,
ref ty,
ref impl_items) => {
self.head("")?;
self.print_visibility(&item.vis)?;
self.print_defaultness(defaultness)?;
self.print_unsafety(unsafety)?;
self.word_nbsp("impl")?;
if !generics.params.is_empty() {
self.print_generic_params(&generics.params)?;
self.s.space()?;
}
if polarity == ast::ImplPolarity::Negative {
self.s.word("!")?;
}
if let Some(ref t) = *opt_trait {
self.print_trait_ref(t)?;
self.s.space()?;
self.word_space("for")?;
}
self.print_type(ty)?;
self.print_where_clause(&generics.where_clause)?;
self.s.space()?;
self.bopen()?;
self.print_inner_attributes(&item.attrs)?;
for impl_item in impl_items {
self.print_impl_item(impl_item)?;
}
self.bclose(item.span)?;
}
ast::ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref trait_items) => {
self.head("")?;
self.print_visibility(&item.vis)?;
self.print_unsafety(unsafety)?;
self.print_is_auto(is_auto)?;
self.word_nbsp("trait")?;
self.print_ident(item.ident)?;
self.print_generic_params(&generics.params)?;
let mut real_bounds = Vec::with_capacity(bounds.len());
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
self.s.space()?;
self.word_space("for ?")?;
self.print_trait_ref(&ptr.trait_ref)?;
} else {
real_bounds.push(b.clone());
}
}
self.print_type_bounds(":", &real_bounds[..])?;
self.print_where_clause(&generics.where_clause)?;
self.s.word(" ")?;
self.bopen()?;
for trait_item in trait_items {
self.print_trait_item(trait_item)?;
}
self.bclose(item.span)?;
}
ast::ItemKind::TraitAlias(ref generics, ref bounds) => {
self.head("")?;
self.print_visibility(&item.vis)?;
self.word_nbsp("trait")?;
self.print_ident(item.ident)?;
self.print_generic_params(&generics.params)?;
let mut real_bounds = Vec::with_capacity(bounds.len());
// FIXME(durka) this seems to be some quite outdated syntax
for b in bounds.iter() {
if let GenericBound::Trait(ref ptr, ast::TraitBoundModifier::Maybe) = *b {
self.s.space()?;
self.word_space("for ?")?;
self.print_trait_ref(&ptr.trait_ref)?;
} else {
real_bounds.push(b.clone());
}
}
self.nbsp()?;
self.print_type_bounds("=", &real_bounds[..])?;
self.print_where_clause(&generics.where_clause)?;
self.s.word(";")?;
}
ast::ItemKind::Mac(ref mac) => {
if item.ident.name == keywords::Invalid.name() {
self.print_mac(mac)?;
match mac.node.delim {
MacDelimiter::Brace => {}
_ => self.s.word(";")?,
}
} else {
self.print_path(&mac.node.path, false, 0)?;
self.s.word("! ")?;
self.print_ident(item.ident)?;
self.cbox(INDENT_UNIT)?;
self.popen()?;
self.print_tts(mac.node.stream())?;
self.pclose()?;
self.s.word(";")?;
self.end()?;
}
}
ast::ItemKind::MacroDef(ref tts) => {
self.s.word("macro_rules! ")?;
self.print_ident(item.ident)?;
self.cbox(INDENT_UNIT)?;
self.popen()?;
self.print_tts(tts.stream())?;
self.pclose()?;
self.s.word(";")?;
self.end()?;
}
}
self.ann.post(self, AnnNode::Item(item))
}
fn print_trait_ref(&mut self, t: &ast::TraitRef) -> io::Result<()> {
self.print_path(&t.path, false, 0)
}
fn print_formal_generic_params(
&mut self,
generic_params: &[ast::GenericParam]
) -> io::Result<()> {
if !generic_params.is_empty() {
self.s.word("for")?;
self.print_generic_params(generic_params)?;
self.nbsp()?;
}
Ok(())
}
fn print_poly_trait_ref(&mut self, t: &ast::PolyTraitRef) -> io::Result<()> {
self.print_formal_generic_params(&t.bound_generic_params)?;
self.print_trait_ref(&t.trait_ref)
}
pub fn print_enum_def(&mut self, enum_definition: &ast::EnumDef,
generics: &ast::Generics, ident: ast::Ident,
span: syntax_pos::Span,
visibility: &ast::Visibility) -> io::Result<()> {
self.head(visibility_qualified(visibility, "enum"))?;
self.print_ident(ident)?;
self.print_generic_params(&generics.params)?;
self.print_where_clause(&generics.where_clause)?;
self.s.space()?;
self.print_variants(&enum_definition.variants, span)
}
pub fn print_variants(&mut self,
variants: &[ast::Variant],
span: syntax_pos::Span) -> io::Result<()> {
self.bopen()?;
for v in variants {
self.space_if_not_bol()?;
self.maybe_print_comment(v.span.lo())?;
self.print_outer_attributes(&v.node.attrs)?;
self.ibox(INDENT_UNIT)?;
self.print_variant(v)?;
self.s.word(",")?;
self.end()?;
self.maybe_print_trailing_comment(v.span, None)?;
}
self.bclose(span)
}
pub fn print_visibility(&mut self, vis: &ast::Visibility) -> io::Result<()> {
match vis.node {
ast::VisibilityKind::Public => self.word_nbsp("pub"),
ast::VisibilityKind::Crate(sugar) => match sugar {
ast::CrateSugar::PubCrate => self.word_nbsp("pub(crate)"),
ast::CrateSugar::JustCrate => self.word_nbsp("crate")
}
ast::VisibilityKind::Restricted { ref path, .. } => {
let path = to_string(|s| s.print_path(path, false, 0));
if path == "self" || path == "super" {
self.word_nbsp(format!("pub({})", path))
} else {
self.word_nbsp(format!("pub(in {})", path))
}
}
ast::VisibilityKind::Inherited => Ok(())
}
}
pub fn print_defaultness(&mut self, defaultness: ast::Defaultness) -> io::Result<()> {
if let ast::Defaultness::Default = defaultness {
self.word_nbsp("default")?;
}
Ok(())
}
pub fn print_struct(&mut self,
struct_def: &ast::VariantData,
generics: &ast::Generics,
ident: ast::Ident,
span: syntax_pos::Span,
print_finalizer: bool) -> io::Result<()> {
self.print_ident(ident)?;
self.print_generic_params(&generics.params)?;
match struct_def {
ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => {
if let ast::VariantData::Tuple(..) = struct_def {
self.popen()?;
self.commasep(
Inconsistent, struct_def.fields(),
|s, field| {
s.maybe_print_comment(field.span.lo())?;
s.print_outer_attributes(&field.attrs)?;
s.print_visibility(&field.vis)?;
s.print_type(&field.ty)
}
)?;
self.pclose()?;
}
self.print_where_clause(&generics.where_clause)?;
if print_finalizer {
self.s.word(";")?;
}
self.end()?;
self.end() // close the outer-box
}
ast::VariantData::Struct(..) => {
self.print_where_clause(&generics.where_clause)?;
self.nbsp()?;
self.bopen()?;
self.hardbreak_if_not_bol()?;
for field in struct_def.fields() {
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(field.span.lo())?;
self.print_outer_attributes(&field.attrs)?;
self.print_visibility(&field.vis)?;
self.print_ident(field.ident.unwrap())?;
self.word_nbsp(":")?;
self.print_type(&field.ty)?;
self.s.word(",")?;
}
self.bclose(span)
}
}
}
pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
self.head("")?;
let generics = ast::Generics::default();
self.print_struct(&v.node.data, &generics, v.node.ident, v.span, false)?;
match v.node.disr_expr {
Some(ref d) => {
self.s.space()?;
self.word_space("=")?;
self.print_expr(&d.value)
}
_ => Ok(())
}
}
pub fn print_method_sig(&mut self,
ident: ast::Ident,
generics: &ast::Generics,
m: &ast::MethodSig,
vis: &ast::Visibility)
-> io::Result<()> {
self.print_fn(&m.decl,
m.header,
Some(ident),
&generics,
vis)
}
pub fn print_trait_item(&mut self, ti: &ast::TraitItem)
-> io::Result<()> {
self.ann.pre(self, AnnNode::SubItem(ti.id))?;
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(ti.span.lo())?;
self.print_outer_attributes(&ti.attrs)?;
match ti.node {
ast::TraitItemKind::Const(ref ty, ref default) => {
self.print_associated_const(
ti.ident,
ty,
default.as_ref().map(|expr| &**expr),
&source_map::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited),
)?;
}
ast::TraitItemKind::Method(ref sig, ref body) => {
if body.is_some() {
self.head("")?;
}
self.print_method_sig(
ti.ident,
&ti.generics,
sig,
&source_map::respan(ti.span.shrink_to_lo(), ast::VisibilityKind::Inherited),
)?;
if let Some(ref body) = *body {
self.nbsp()?;
self.print_block_with_attrs(body, &ti.attrs)?;
} else {
self.s.word(";")?;
}
}
ast::TraitItemKind::Type(ref bounds, ref default) => {
self.print_associated_type(ti.ident, Some(bounds),
default.as_ref().map(|ty| &**ty))?;
}
ast::TraitItemKind::Macro(ref mac) => {
self.print_mac(mac)?;
match mac.node.delim {
MacDelimiter::Brace => {}
_ => self.s.word(";")?,
}
}
}
self.ann.post(self, AnnNode::SubItem(ti.id))
}
pub fn print_impl_item(&mut self, ii: &ast::ImplItem) -> io::Result<()> {
self.ann.pre(self, AnnNode::SubItem(ii.id))?;
self.hardbreak_if_not_bol()?;
self.maybe_print_comment(ii.span.lo())?;
self.print_outer_attributes(&ii.attrs)?;
self.print_defaultness(ii.defaultness)?;
match ii.node {
ast::ImplItemKind::Const(ref ty, ref expr) => {
self.print_associated_const(ii.ident, ty, Some(expr), &ii.vis)?;
}
ast::ImplItemKind::Method(ref sig, ref body) => {
self.head("")?;
self.print_method_sig(ii.ident, &ii.generics, sig, &ii.vis)?;
self.nbsp()?;
self.print_block_with_attrs(body, &ii.attrs)?;
}
ast::ImplItemKind::Type(ref ty) => {
self.print_associated_type(ii.ident, None, Some(ty))?;
}
ast::ImplItemKind::Existential(ref bounds) => {
self.word_space("existential")?;
self.print_associated_type(ii.ident, Some(bounds), None)?;
}
ast::ImplItemKind::Macro(ref mac) => {
self.print_mac(mac)?;
match mac.node.delim {
MacDelimiter::Brace => {}
_ => self.s.word(";")?,
}
}
}
self.ann.post(self, AnnNode::SubItem(ii.id))
}
pub fn print_stmt(&mut self, st: &ast::Stmt) -> io::Result<()> {
self.maybe_print_comment(st.span.lo())?;
match st.node {
ast::StmtKind::Local(ref loc) => {
self.print_outer_attributes(&loc.attrs)?;
self.space_if_not_bol()?;
self.ibox(INDENT_UNIT)?;
self.word_nbsp("let")?;
self.ibox(INDENT_UNIT)?;
self.print_local_decl(loc)?;
self.end()?;
if let Some(ref init) = loc.init {
self.nbsp()?;
self.word_space("=")?;
self.print_expr(init)?;
}
self.s.word(";")?;
self.end()?;
}
ast::StmtKind::Item(ref item) => self.print_item(item)?,
ast::StmtKind::Expr(ref expr) => {
self.space_if_not_bol()?;
self.print_expr_outer_attr_style(expr, false)?;
if parse::classify::expr_requires_semi_to_be_stmt(expr) {
self.s.word(";")?;
}
}
ast::StmtKind::Semi(ref expr) => {
self.space_if_not_bol()?;
self.print_expr_outer_attr_style(expr, false)?;
self.s.word(";")?;
}
ast::StmtKind::Mac(ref mac) => {
let (ref mac, style, ref attrs) = **mac;
self.space_if_not_bol()?;
self.print_outer_attributes(attrs)?;
self.print_mac(mac)?;
if style == ast::MacStmtStyle::Semicolon {
self.s.word(";")?;
}
}
}
self.maybe_print_trailing_comment(st.span, None)
}
pub fn print_block(&mut self, blk: &ast::Block) -> io::Result<()> {
self.print_block_with_attrs(blk, &[])
}
pub fn print_block_unclosed(&mut self, blk: &ast::Block) -> io::Result<()> {
self.print_block_unclosed_indent(blk, INDENT_UNIT)
}
pub fn print_block_unclosed_with_attrs(&mut self, blk: &ast::Block,
attrs: &[ast::Attribute])
-> io::Result<()> {
self.print_block_maybe_unclosed(blk, INDENT_UNIT, attrs, false)
}
pub fn print_block_unclosed_indent(&mut self, blk: &ast::Block,
indented: usize) -> io::Result<()> {
self.print_block_maybe_unclosed(blk, indented, &[], false)
}
pub fn print_block_with_attrs(&mut self,
blk: &ast::Block,
attrs: &[ast::Attribute]) -> io::Result<()> {
self.print_block_maybe_unclosed(blk, INDENT_UNIT, attrs, true)
}
pub fn print_block_maybe_unclosed(&mut self,
blk: &ast::Block,
indented: usize,
attrs: &[ast::Attribute],
close_box: bool) -> io::Result<()> {
match blk.rules {
BlockCheckMode::Unsafe(..) => self.word_space("unsafe")?,
BlockCheckMode::Default => ()
}
self.maybe_print_comment(blk.span.lo())?;
self.ann.pre(self, AnnNode::Block(blk))?;
self.bopen()?;
self.print_inner_attributes(attrs)?;
for (i, st) in blk.stmts.iter().enumerate() {
match st.node {
ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => {
self.maybe_print_comment(st.span.lo())?;
self.space_if_not_bol()?;
self.print_expr_outer_attr_style(expr, false)?;
self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi()))?;
}
_ => self.print_stmt(st)?,
}
}
self.bclose_maybe_open(blk.span, indented, close_box)?;
self.ann.post(self, AnnNode::Block(blk))
}
fn print_else(&mut self, els: Option<&ast::Expr>) -> io::Result<()> {
match els {
Some(_else) => {
match _else.node {
// "another else-if"
ast::ExprKind::If(ref i, ref then, ref e) => {
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
self.s.word(" else if ")?;
self.print_expr_as_cond(i)?;
self.s.space()?;
self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "another else-if-let"
ast::ExprKind::IfLet(ref pats, ref expr, ref then, ref e) => {
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
self.s.word(" else if let ")?;
self.print_pats(pats)?;
self.s.space()?;
self.word_space("=")?;
self.print_expr_as_cond(expr)?;
self.s.space()?;
self.print_block(then)?;
self.print_else(e.as_ref().map(|e| &**e))
}
// "final else"
ast::ExprKind::Block(ref b, _) => {
self.cbox(INDENT_UNIT - 1)?;
self.ibox(0)?;
self.s.word(" else ")?;
self.print_block(b)
}
// BLEAH, constraints would be great here
_ => {
panic!("print_if saw if with weird alternative");
}
}
}
_ => Ok(())
}
}
pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block,
elseopt: Option<&ast::Expr>) -> io::Result<()> {
self.head("if")?;
self.print_expr_as_cond(test)?;
self.s.space()?;
self.print_block(blk)?;
self.print_else(elseopt)
}
pub fn print_if_let(&mut self, pats: &[P<ast::Pat>], expr: &ast::Expr, blk: &ast::Block,
elseopt: Option<&ast::Expr>) -> io::Result<()> {
self.head("if let")?;
self.print_pats(pats)?;
self.s.space()?;
self.word_space("=")?;
self.print_expr_as_cond(expr)?;
self.s.space()?;
self.print_block(blk)?;
self.print_else(elseopt)
}
pub fn print_mac(&mut self, m: &ast::Mac) -> io::Result<()> {
self.print_path(&m.node.path, false, 0)?;
self.s.word("!")?;
match m.node.delim {
MacDelimiter::Parenthesis => self.popen()?,
MacDelimiter::Bracket => self.s.word("[")?,
MacDelimiter::Brace => {
self.head("")?;
self.bopen()?;
}
}
self.print_tts(m.node.stream())?;
match m.node.delim {
MacDelimiter::Parenthesis => self.pclose(),
MacDelimiter::Bracket => self.s.word("]"),
MacDelimiter::Brace => self.bclose(m.span),
}
}
fn print_call_post(&mut self, args: &[P<ast::Expr>]) -> io::Result<()> {
self.popen()?;
self.commasep_exprs(Inconsistent, args)?;
self.pclose()
}
pub fn print_expr_maybe_paren(&mut self, expr: &ast::Expr, prec: i8) -> io::Result<()> {
let needs_par = expr.precedence().order() < prec;
if needs_par {
self.popen()?;
}
self.print_expr(expr)?;
if needs_par {
self.pclose()?;
}
Ok(())
}
/// Print an expr using syntax that's acceptable in a condition position, such as the `cond` in
/// `if cond { ... }`.
pub fn print_expr_as_cond(&mut self, expr: &ast::Expr) -> io::Result<()> {
let needs_par = match expr.node {
// These cases need parens due to the parse error observed in #26461: `if return {}`
// parses as the erroneous construct `if (return {})`, not `if (return) {}`.
ast::ExprKind::Closure(..) |
ast::ExprKind::Ret(..) |
ast::ExprKind::Break(..) => true,
_ => parser::contains_exterior_struct_lit(expr),
};
if needs_par {
self.popen()?;
}
self.print_expr(expr)?;
if needs_par {
self.pclose()?;
}
Ok(())
}
fn print_expr_vec(&mut self, exprs: &[P<ast::Expr>],
attrs: &[Attribute]) -> io::Result<()> {
self.ibox(INDENT_UNIT)?;
self.s.word("[")?;
self.print_inner_attributes_inline(attrs)?;
self.commasep_exprs(Inconsistent, &exprs[..])?;
self.s.word("]")?;
self.end()
}
fn print_expr_repeat(&mut self,
element: &ast::Expr,
count: &ast::AnonConst,
attrs: &[Attribute]) -> io::Result<()> {
self.ibox(INDENT_UNIT)?;
self.s.word("[")?;
self.print_inner_attributes_inline(attrs)?;
self.print_expr(element)?;
self.word_space(";")?;
self.print_expr(&count.value)?;
self.s.word("]")?;
self.end()
}
fn print_expr_struct(&mut self,
path: &ast::Path,
fields: &[ast::Field],
wth: &Option<P<ast::Expr>>,
attrs: &[Attribute]) -> io::Result<()> {
self.print_path(path, true, 0)?;
self.s.word("{")?;
self.print_inner_attributes_inline(attrs)?;
self.commasep_cmnt(
Consistent,
&fields[..],
|s, field| {
s.ibox(INDENT_UNIT)?;
if !field.is_shorthand {
s.print_ident(field.ident)?;
s.word_space(":")?;
}
s.print_expr(&field.expr)?;
s.end()
},
|f| f.span)?;
match *wth {
Some(ref expr) => {
self.ibox(INDENT_UNIT)?;
if !fields.is_empty() {
self.s.word(",")?;
self.s.space()?;
}
self.s.word("..")?;
self.print_expr(expr)?;
self.end()?;
}
_ => if !fields.is_empty() {
self.s.word(",")?
}
}
self.s.word("}")?;
Ok(())
}
fn print_expr_tup(&mut self, exprs: &[P<ast::Expr>],
attrs: &[Attribute]) -> io::Result<()> {
self.popen()?;
self.print_inner_attributes_inline(attrs)?;
self.commasep_exprs(Inconsistent, &exprs[..])?;
if exprs.len() == 1 {
self.s.word(",")?;
}
self.pclose()
}
fn print_expr_call(&mut self,
func: &ast::Expr,
args: &[P<ast::Expr>]) -> io::Result<()> {
let prec =
match func.node {
ast::ExprKind::Field(..) => parser::PREC_FORCE_PAREN,
_ => parser::PREC_POSTFIX,
};
self.print_expr_maybe_paren(func, prec)?;
self.print_call_post(args)
}
fn print_expr_method_call(&mut self,
segment: &ast::PathSegment,
args: &[P<ast::Expr>]) -> io::Result<()> {
let base_args = &args[1..];
self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?;
self.s.word(".")?;
self.print_ident(segment.ident)?;
if let Some(ref args) = segment.args {
self.print_generic_args(args, true)?;
}
self.print_call_post(base_args)
}
fn print_expr_binary(&mut self,
op: ast::BinOp,
lhs: &ast::Expr,
rhs: &ast::Expr) -> io::Result<()> {
let assoc_op = AssocOp::from_ast_binop(op.node);
let prec = assoc_op.precedence() as i8;
let fixity = assoc_op.fixity();
let (left_prec, right_prec) = match fixity {
Fixity::Left => (prec, prec + 1),
Fixity::Right => (prec + 1, prec),
Fixity::None => (prec + 1, prec + 1),
};
let left_prec = match (&lhs.node, op.node) {
// These cases need parens: `x as i32 < y` has the parser thinking that `i32 < y` is
// the beginning of a path type. It starts trying to parse `x as (i32 < y ...` instead
// of `(x as i32) < ...`. We need to convince it _not_ to do that.
(&ast::ExprKind::Cast { .. }, ast::BinOpKind::Lt) |
(&ast::ExprKind::Cast { .. }, ast::BinOpKind::Shl) => parser::PREC_FORCE_PAREN,
_ => left_prec,
};
self.print_expr_maybe_paren(lhs, left_prec)?;
self.s.space()?;
self.word_space(op.node.to_string())?;
self.print_expr_maybe_paren(rhs, right_prec)
}
fn print_expr_unary(&mut self,
op: ast::UnOp,
expr: &ast::Expr) -> io::Result<()> {
self.s.word(ast::UnOp::to_string(op))?;
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
}
fn print_expr_addr_of(&mut self,
mutability: ast::Mutability,
expr: &ast::Expr) -> io::Result<()> {
self.s.word("&")?;
self.print_mutability(mutability)?;
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
}
pub fn print_expr(&mut self, expr: &ast::Expr) -> io::Result<()> {
self.print_expr_outer_attr_style(expr, true)
}
fn print_expr_outer_attr_style(&mut self,
expr: &ast::Expr,
is_inline: bool) -> io::Result<()> {
self.maybe_print_comment(expr.span.lo())?;
let attrs = &expr.attrs;
if is_inline {
self.print_outer_attributes_inline(attrs)?;
} else {
self.print_outer_attributes(attrs)?;
}
self.ibox(INDENT_UNIT)?;
self.ann.pre(self, AnnNode::Expr(expr))?;
match expr.node {
ast::ExprKind::Box(ref expr) => {
self.word_space("box")?;
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)?;
}
ast::ExprKind::ObsoleteInPlace(ref place, ref expr) => {
let prec = AssocOp::ObsoleteInPlace.precedence() as i8;
self.print_expr_maybe_paren(place, prec + 1)?;
self.s.space()?;
self.word_space("<-")?;
self.print_expr_maybe_paren(expr, prec)?;
}
ast::ExprKind::Array(ref exprs) => {
self.print_expr_vec(&exprs[..], attrs)?;
}
ast::ExprKind::Repeat(ref element, ref count) => {
self.print_expr_repeat(element, count, attrs)?;
}
ast::ExprKind::Struct(ref path, ref fields, ref wth) => {
self.print_expr_struct(path, &fields[..], wth, attrs)?;
}
ast::ExprKind::Tup(ref exprs) => {
self.print_expr_tup(&exprs[..], attrs)?;
}
ast::ExprKind::Call(ref func, ref args) => {
self.print_expr_call(func, &args[..])?;
}
ast::ExprKind::MethodCall(ref segment, ref args) => {
self.print_expr_method_call(segment, &args[..])?;
}
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
self.print_expr_binary(op, lhs, rhs)?;
}
ast::ExprKind::Unary(op, ref expr) => {
self.print_expr_unary(op, expr)?;
}
ast::ExprKind::AddrOf(m, ref expr) => {
self.print_expr_addr_of(m, expr)?;
}
ast::ExprKind::Lit(ref lit) => {
self.print_literal(lit)?;
}
ast::ExprKind::Cast(ref expr, ref ty) => {
let prec = AssocOp::As.precedence() as i8;
self.print_expr_maybe_paren(expr, prec)?;
self.s.space()?;
self.word_space("as")?;
self.print_type(ty)?;
}
ast::ExprKind::Type(ref expr, ref ty) => {
let prec = AssocOp::Colon.precedence() as i8;
self.print_expr_maybe_paren(expr, prec)?;
self.word_space(":")?;
self.print_type(ty)?;
}
ast::ExprKind::If(ref test, ref blk, ref elseopt) => {
self.print_if(test, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::IfLet(ref pats, ref expr, ref blk, ref elseopt) => {
self.print_if_let(pats, expr, blk, elseopt.as_ref().map(|e| &**e))?;
}
ast::ExprKind::While(ref test, ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while")?;
self.print_expr_as_cond(test)?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::WhileLet(ref pats, ref expr, ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while let")?;
self.print_pats(pats)?;
self.s.space()?;
self.word_space("=")?;
self.print_expr_as_cond(expr)?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("for")?;
self.print_pat(pat)?;
self.s.space()?;
self.word_space("in")?;
self.print_expr_as_cond(iter)?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Loop(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("loop")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Match(ref expr, ref arms) => {
self.cbox(INDENT_UNIT)?;
self.ibox(4)?;
self.word_nbsp("match")?;
self.print_expr_as_cond(expr)?;
self.s.space()?;
self.bopen()?;
self.print_inner_attributes_no_trailing_hardbreak(attrs)?;
for arm in arms {
self.print_arm(arm)?;
}
self.bclose_(expr.span, INDENT_UNIT)?;
}
ast::ExprKind::Closure(
capture_clause, asyncness, movability, ref decl, ref body, _) => {
self.print_movability(movability)?;
self.print_asyncness(asyncness)?;
self.print_capture_clause(capture_clause)?;
self.print_fn_block_args(decl)?;
self.s.space()?;
self.print_expr(body)?;
self.end()?; // need to close a box
// a box will be closed by print_expr, but we didn't want an overall
// wrapper so we closed the corresponding opening. so create an
// empty box to satisfy the close.
self.ibox(0)?;
}
ast::ExprKind::Block(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.word_space(":")?;
}
// containing cbox, will be closed by print-block at }
self.cbox(INDENT_UNIT)?;
// head-box, will be closed by print-block after {
self.ibox(0)?;
self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Async(capture_clause, _, ref blk) => {
self.word_nbsp("async")?;
self.print_capture_clause(capture_clause)?;
self.s.space()?;
// cbox/ibox in analogy to the `ExprKind::Block` arm above
self.cbox(INDENT_UNIT)?;
self.ibox(0)?;
self.print_block_with_attrs(blk, attrs)?;
}
ast::ExprKind::Assign(ref lhs, ref rhs) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1)?;
self.s.space()?;
self.word_space("=")?;
self.print_expr_maybe_paren(rhs, prec)?;
}
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
let prec = AssocOp::Assign.precedence() as i8;
self.print_expr_maybe_paren(lhs, prec + 1)?;
self.s.space()?;
self.s.word(op.node.to_string())?;
self.word_space("=")?;
self.print_expr_maybe_paren(rhs, prec)?;
}
ast::ExprKind::Field(ref expr, ident) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?;
self.s.word(".")?;
self.print_ident(ident)?;
}
ast::ExprKind::Index(ref expr, ref index) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?;
self.s.word("[")?;
self.print_expr(index)?;
self.s.word("]")?;
}
ast::ExprKind::Range(ref start, ref end, limits) => {
// Special case for `Range`. `AssocOp` claims that `Range` has higher precedence
// than `Assign`, but `x .. x = x` gives a parse error instead of `x .. (x = x)`.
// Here we use a fake precedence value so that any child with lower precedence than
// a "normal" binop gets parenthesized. (`LOr` is the lowest-precedence binop.)
let fake_prec = AssocOp::LOr.precedence() as i8;
if let Some(ref e) = *start {
self.print_expr_maybe_paren(e, fake_prec)?;
}
if limits == ast::RangeLimits::HalfOpen {
self.s.word("..")?;
} else {
self.s.word("..=")?;
}
if let Some(ref e) = *end {
self.print_expr_maybe_paren(e, fake_prec)?;
}
}
ast::ExprKind::Path(None, ref path) => {
self.print_path(path, true, 0)?
}
ast::ExprKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, true)?
}
ast::ExprKind::Break(opt_label, ref opt_expr) => {
self.s.word("break")?;
self.s.space()?;
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?;
self.s.space()?;
}
}
ast::ExprKind::Continue(opt_label) => {
self.s.word("continue")?;
self.s.space()?;
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.s.space()?
}
}
ast::ExprKind::Ret(ref result) => {
self.s.word("return")?;
if let Some(ref expr) = *result {
self.s.word(" ")?;
self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?;
}
}
ast::ExprKind::InlineAsm(ref a) => {
self.s.word("asm!")?;
self.popen()?;
self.print_string(&a.asm.as_str(), a.asm_str_style)?;
self.word_space(":")?;
self.commasep(Inconsistent, &a.outputs, |s, out| {
let constraint = out.constraint.as_str();
let mut ch = constraint.chars();
match ch.next() {
Some('=') if out.is_rw => {
s.print_string(&format!("+{}", ch.as_str()),
ast::StrStyle::Cooked)?
}
_ => s.print_string(&constraint, ast::StrStyle::Cooked)?
}
s.popen()?;
s.print_expr(&out.expr)?;
s.pclose()?;
Ok(())
})?;
self.s.space()?;
self.word_space(":")?;
self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| {
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
s.popen()?;
s.print_expr(o)?;
s.pclose()?;
Ok(())
})?;
self.s.space()?;
self.word_space(":")?;
self.commasep(Inconsistent, &a.clobbers,
|s, co| {
s.print_string(&co.as_str(), ast::StrStyle::Cooked)?;
Ok(())
})?;
let mut options = vec![];
if a.volatile {
options.push("volatile");
}
if a.alignstack {
options.push("alignstack");
}
if a.dialect == ast::AsmDialect::Intel {
options.push("intel");
}
if !options.is_empty() {
self.s.space()?;
self.word_space(":")?;
self.commasep(Inconsistent, &options,
|s, &co| {
s.print_string(co, ast::StrStyle::Cooked)?;
Ok(())
})?;
}
self.pclose()?;
}
ast::ExprKind::Mac(ref m) => self.print_mac(m)?,
ast::ExprKind::Paren(ref e) => {
self.popen()?;
self.print_inner_attributes_inline(attrs)?;
self.print_expr(e)?;
self.pclose()?;
},
ast::ExprKind::Yield(ref e) => {
self.s.word("yield")?;
match *e {
Some(ref expr) => {
self.s.space()?;
self.print_expr_maybe_paren(expr, parser::PREC_JUMP)?;
}
_ => ()
}
}
ast::ExprKind::Try(ref e) => {
self.print_expr_maybe_paren(e, parser::PREC_POSTFIX)?;
self.s.word("?")?
}
ast::ExprKind::TryBlock(ref blk) => {
self.head("try")?;
self.s.space()?;
self.print_block_with_attrs(blk, attrs)?
}
ast::ExprKind::Err => {
self.popen()?;
self.s.word("/*ERROR*/")?;
self.pclose()?
}
}
self.ann.post(self, AnnNode::Expr(expr))?;
self.end()
}
pub fn print_local_decl(&mut self, loc: &ast::Local) -> io::Result<()> {
self.print_pat(&loc.pat)?;
if let Some(ref ty) = loc.ty {
self.word_space(":")?;
self.print_type(ty)?;
}
Ok(())
}
pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> {
if ident.is_raw_guess() {
self.s.word(format!("r#{}", ident))?;
} else {
self.s.word(ident.as_str().get())?;
}
self.ann.post(self, AnnNode::Ident(&ident))
}
pub fn print_usize(&mut self, i: usize) -> io::Result<()> {
self.s.word(i.to_string())
}
pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> {
self.s.word(name.as_str().get())?;
self.ann.post(self, AnnNode::Name(&name))
}
pub fn print_for_decl(&mut self, loc: &ast::Local,
coll: &ast::Expr) -> io::Result<()> {
self.print_local_decl(loc)?;
self.s.space()?;
self.word_space("in")?;
self.print_expr(coll)
}
fn print_path(&mut self,
path: &ast::Path,
colons_before_params: bool,
depth: usize)
-> io::Result<()>
{
self.maybe_print_comment(path.span.lo())?;
for (i, segment) in path.segments[..path.segments.len() - depth].iter().enumerate() {
if i > 0 {
self.s.word("::")?
}
self.print_path_segment(segment, colons_before_params)?;
}
Ok(())
}
fn print_path_segment(&mut self,
segment: &ast::PathSegment,
colons_before_params: bool)
-> io::Result<()>
{
if segment.ident.name != keywords::PathRoot.name() {
if segment.ident.name == keywords::DollarCrate.name() {
self.print_dollar_crate(segment.ident)?;
} else {
self.print_ident(segment.ident)?;
}
if let Some(ref args) = segment.args {
self.print_generic_args(args, colons_before_params)?;
}
}
Ok(())
}
fn print_qpath(&mut self,
path: &ast::Path,
qself: &ast::QSelf,
colons_before_params: bool)
-> io::Result<()>
{
self.s.word("<")?;
self.print_type(&qself.ty)?;
if qself.position > 0 {
self.s.space()?;
self.word_space("as")?;
let depth = path.segments.len() - qself.position;
self.print_path(path, false, depth)?;
}
self.s.word(">")?;
self.s.word("::")?;
let item_segment = path.segments.last().unwrap();
self.print_ident(item_segment.ident)?;
match item_segment.args {
Some(ref args) => self.print_generic_args(args, colons_before_params),
None => Ok(()),
}
}
fn print_generic_args(&mut self,
args: &ast::GenericArgs,
colons_before_params: bool)
-> io::Result<()>
{
if colons_before_params {
self.s.word("::")?
}
match *args {
ast::GenericArgs::AngleBracketed(ref data) => {
self.s.word("<")?;
self.commasep(Inconsistent, &data.args, |s, generic_arg| {
s.print_generic_arg(generic_arg)
})?;
let mut comma = data.args.len() != 0;
for binding in data.bindings.iter() {
if comma {
self.word_space(",")?
}
self.print_ident(binding.ident)?;
self.s.space()?;
self.word_space("=")?;
self.print_type(&binding.ty)?;
comma = true;
}
self.s.word(">")?
}
ast::GenericArgs::Parenthesized(ref data) => {
self.s.word("(")?;
self.commasep(
Inconsistent,
&data.inputs,
|s, ty| s.print_type(ty))?;
self.s.word(")")?;
if let Some(ref ty) = data.output {
self.space_if_not_bol()?;
self.word_space("->")?;
self.print_type(ty)?;
}
}
}
Ok(())
}
pub fn print_pat(&mut self, pat: &ast::Pat) -> io::Result<()> {
self.maybe_print_comment(pat.span.lo())?;
self.ann.pre(self, AnnNode::Pat(pat))?;
/* Pat isn't normalized, but the beauty of it
is that it doesn't matter */
match pat.node {
PatKind::Wild => self.s.word("_")?,
PatKind::Ident(binding_mode, ident, ref sub) => {
match binding_mode {
ast::BindingMode::ByRef(mutbl) => {
self.word_nbsp("ref")?;
self.print_mutability(mutbl)?;
}
ast::BindingMode::ByValue(ast::Mutability::Immutable) => {}
ast::BindingMode::ByValue(ast::Mutability::Mutable) => {
self.word_nbsp("mut")?;
}
}
self.print_ident(ident)?;
if let Some(ref p) = *sub {
self.s.word("@")?;
self.print_pat(p)?;
}
}
PatKind::TupleStruct(ref path, ref elts, ddpos) => {
self.print_path(path, true, 0)?;
self.popen()?;
if let Some(ddpos) = ddpos {
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
self.s.word("..")?;
if ddpos != elts.len() {
self.s.word(",")?;
self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
}
self.pclose()?;
}
PatKind::Path(None, ref path) => {
self.print_path(path, true, 0)?;
}
PatKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, false)?;
}
PatKind::Struct(ref path, ref fields, etc) => {
self.print_path(path, true, 0)?;
self.nbsp()?;
self.word_space("{")?;
self.commasep_cmnt(
Consistent, &fields[..],
|s, f| {
s.cbox(INDENT_UNIT)?;
if !f.node.is_shorthand {
s.print_ident(f.node.ident)?;
s.word_nbsp(":")?;
}
s.print_pat(&f.node.pat)?;
s.end()
},
|f| f.node.pat.span)?;
if etc {
if !fields.is_empty() { self.word_space(",")?; }
self.s.word("..")?;
}
self.s.space()?;
self.s.word("}")?;
}
PatKind::Tuple(ref elts, ddpos) => {
self.popen()?;
if let Some(ddpos) = ddpos {
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p))?;
if ddpos != 0 {
self.word_space(",")?;
}
self.s.word("..")?;
if ddpos != elts.len() {
self.s.word(",")?;
self.commasep(Inconsistent, &elts[ddpos..], |s, p| s.print_pat(p))?;
}
} else {
self.commasep(Inconsistent, &elts[..], |s, p| s.print_pat(p))?;
if elts.len() == 1 {
self.s.word(",")?;
}
}
self.pclose()?;
}
PatKind::Box(ref inner) => {
self.s.word("box ")?;
self.print_pat(inner)?;
}
PatKind::Ref(ref inner, mutbl) => {
self.s.word("&")?;
if mutbl == ast::Mutability::Mutable {
self.s.word("mut ")?;
}
self.print_pat(inner)?;
}
PatKind::Lit(ref e) => self.print_expr(&**e)?,
PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => {
self.print_expr(begin)?;
self.s.space()?;
match *end_kind {
RangeEnd::Included(RangeSyntax::DotDotDot) => self.s.word("...")?,
RangeEnd::Included(RangeSyntax::DotDotEq) => self.s.word("..=")?,
RangeEnd::Excluded => self.s.word("..")?,
}
self.print_expr(end)?;
}
PatKind::Slice(ref before, ref slice, ref after) => {
self.s.word("[")?;
self.commasep(Inconsistent,
&before[..],
|s, p| s.print_pat(p))?;
if let Some(ref p) = *slice {
if !before.is_empty() { self.word_space(",")?; }
if let PatKind::Wild = p.node {
// Print nothing
} else {
self.print_pat(p)?;
}
self.s.word("..")?;
if !after.is_empty() { self.word_space(",")?; }
}
self.commasep(Inconsistent,
&after[..],
|s, p| s.print_pat(p))?;
self.s.word("]")?;
}
PatKind::Paren(ref inner) => {
self.popen()?;
self.print_pat(inner)?;
self.pclose()?;
}
PatKind::Mac(ref m) => self.print_mac(m)?,
}
self.ann.post(self, AnnNode::Pat(pat))
}
fn print_pats(&mut self, pats: &[P<ast::Pat>]) -> io::Result<()> {
let mut first = true;
for p in pats {
if first {
first = false;
} else {
self.s.space()?;
self.word_space("|")?;
}
self.print_pat(p)?;
}
Ok(())
}
fn print_arm(&mut self, arm: &ast::Arm) -> io::Result<()> {
// I have no idea why this check is necessary, but here it
// is :(
if arm.attrs.is_empty() {
self.s.space()?;
}
self.cbox(INDENT_UNIT)?;
self.ibox(0)?;
self.maybe_print_comment(arm.pats[0].span.lo())?;
self.print_outer_attributes(&arm.attrs)?;
self.print_pats(&arm.pats)?;
self.s.space()?;
if let Some(ref g) = arm.guard {
match g {
ast::Guard::If(ref e) => {
self.word_space("if")?;
self.print_expr(e)?;
self.s.space()?;
}
}
}
self.word_space("=>")?;
match arm.body.node {
ast::ExprKind::Block(ref blk, opt_label) => {
if let Some(label) = opt_label {
self.print_ident(label.ident)?;
self.word_space(":")?;
}
// the block will close the pattern's ibox
self.print_block_unclosed_indent(blk, INDENT_UNIT)?;
// If it is a user-provided unsafe block, print a comma after it
if let BlockCheckMode::Unsafe(ast::UserProvided) = blk.rules {
self.s.word(",")?;
}
}
_ => {
self.end()?; // close the ibox for the pattern
self.print_expr(&arm.body)?;
self.s.word(",")?;
}
}
self.end() // close enclosing cbox
}
fn print_explicit_self(&mut self, explicit_self: &ast::ExplicitSelf) -> io::Result<()> {
match explicit_self.node {
SelfKind::Value(m) => {
self.print_mutability(m)?;
self.s.word("self")
}
SelfKind::Region(ref lt, m) => {
self.s.word("&")?;
self.print_opt_lifetime(lt)?;
self.print_mutability(m)?;
self.s.word("self")
}
SelfKind::Explicit(ref typ, m) => {
self.print_mutability(m)?;
self.s.word("self")?;
self.word_space(":")?;
self.print_type(typ)
}
}
}
pub fn print_fn(&mut self,
decl: &ast::FnDecl,
header: ast::FnHeader,
name: Option<ast::Ident>,
generics: &ast::Generics,
vis: &ast::Visibility) -> io::Result<()> {
self.print_fn_header_info(header, vis)?;
if let Some(name) = name {
self.nbsp()?;
self.print_ident(name)?;
}
self.print_generic_params(&generics.params)?;
self.print_fn_args_and_ret(decl)?;
self.print_where_clause(&generics.where_clause)
}
pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl)
-> io::Result<()> {
self.popen()?;
self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, false))?;
self.pclose()?;
self.print_fn_output(decl)
}
pub fn print_fn_block_args(
&mut self,
decl: &ast::FnDecl)
-> io::Result<()> {
self.s.word("|")?;
self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, true))?;
self.s.word("|")?;
if let ast::FunctionRetTy::Default(..) = decl.output {
return Ok(());
}
self.space_if_not_bol()?;
self.word_space("->")?;
match decl.output {
ast::FunctionRetTy::Ty(ref ty) => {
self.print_type(ty)?;
self.maybe_print_comment(ty.span.lo())
}
ast::FunctionRetTy::Default(..) => unreachable!(),
}
}
pub fn print_movability(&mut self, movability: ast::Movability)
-> io::Result<()> {
match movability {
ast::Movability::Static => self.word_space("static"),
ast::Movability::Movable => Ok(()),
}
}
pub fn print_asyncness(&mut self, asyncness: ast::IsAsync)
-> io::Result<()> {
if asyncness.is_async() {
self.word_nbsp("async")?;
}
Ok(())
}
pub fn print_capture_clause(&mut self, capture_clause: ast::CaptureBy)
-> io::Result<()> {
match capture_clause {
ast::CaptureBy::Value => self.word_space("move"),
ast::CaptureBy::Ref => Ok(()),
}
}
pub fn print_type_bounds(&mut self, prefix: &'static str, bounds: &[ast::GenericBound])
-> io::Result<()> {
if !bounds.is_empty() {
self.s.word(prefix)?;
let mut first = true;
for bound in bounds {
if !(first && prefix.is_empty()) {
self.nbsp()?;
}
if first {
first = false;
} else {
self.word_space("+")?;
}
match bound {
GenericBound::Trait(tref, modifier) => {
if modifier == &TraitBoundModifier::Maybe {
self.s.word("?")?;
}
self.print_poly_trait_ref(tref)?;
}
GenericBound::Outlives(lt) => self.print_lifetime(*lt)?,
}
}
}
Ok(())
}
pub fn print_lifetime(&mut self, lifetime: ast::Lifetime) -> io::Result<()> {
self.print_name(lifetime.ident.name)
}
pub fn print_lifetime_bounds(&mut self, lifetime: ast::Lifetime, bounds: &ast::GenericBounds)
-> io::Result<()>
{
self.print_lifetime(lifetime)?;
if !bounds.is_empty() {
self.s.word(": ")?;
for (i, bound) in bounds.iter().enumerate() {
if i != 0 {
self.s.word(" + ")?;
}
match bound {
ast::GenericBound::Outlives(lt) => self.print_lifetime(*lt)?,
_ => panic!(),
}
}
}
Ok(())
}
pub fn print_generic_params(
&mut self,
generic_params: &[ast::GenericParam]
) -> io::Result<()> {
if generic_params.is_empty() {
return Ok(());
}
self.s.word("<")?;
self.commasep(Inconsistent, &generic_params, |s, param| {
match param.kind {
ast::GenericParamKind::Lifetime => {
s.print_outer_attributes_inline(¶m.attrs)?;
let lt = ast::Lifetime { id: param.id, ident: param.ident };
s.print_lifetime_bounds(lt, ¶m.bounds)
}
ast::GenericParamKind::Type { ref default } => {
s.print_outer_attributes_inline(¶m.attrs)?;
s.print_ident(param.ident)?;
s.print_type_bounds(":", ¶m.bounds)?;
match default {
Some(ref default) => {
s.s.space()?;
s.word_space("=")?;
s.print_type(default)
}
_ => Ok(())
}
}
ast::GenericParamKind::Const { ref ty } => {
s.print_outer_attributes_inline(¶m.attrs)?;
s.word_space("const")?;
s.print_ident(param.ident)?;
s.s.space()?;
s.word_space(":")?;
s.print_type(ty)?;
s.print_type_bounds(":", ¶m.bounds)
}
}
})?;
self.s.word(">")?;
Ok(())
}
pub fn print_where_clause(&mut self, where_clause: &ast::WhereClause)
-> io::Result<()> {
if where_clause.predicates.is_empty() {
return Ok(())
}
self.s.space()?;
self.word_space("where")?;
for (i, predicate) in where_clause.predicates.iter().enumerate() {
if i != 0 {
self.word_space(",")?;
}
match *predicate {
ast::WherePredicate::BoundPredicate(ast::WhereBoundPredicate {
ref bound_generic_params,
ref bounded_ty,
ref bounds,
..
}) => {
self.print_formal_generic_params(bound_generic_params)?;
self.print_type(bounded_ty)?;
self.print_type_bounds(":", bounds)?;
}
ast::WherePredicate::RegionPredicate(ast::WhereRegionPredicate{ref lifetime,
ref bounds,
..}) => {
self.print_lifetime_bounds(*lifetime, bounds)?;
}
ast::WherePredicate::EqPredicate(ast::WhereEqPredicate{ref lhs_ty,
ref rhs_ty,
..}) => {
self.print_type(lhs_ty)?;
self.s.space()?;
self.word_space("=")?;
self.print_type(rhs_ty)?;
}
}
}
Ok(())
}
pub fn print_use_tree(&mut self, tree: &ast::UseTree) -> io::Result<()> {
match tree.kind {
ast::UseTreeKind::Simple(rename, ..) => {
self.print_path(&tree.prefix, false, 0)?;
if let Some(rename) = rename {
self.s.space()?;
self.word_space("as")?;
self.print_ident(rename)?;
}
}
ast::UseTreeKind::Glob => {
if !tree.prefix.segments.is_empty() {
self.print_path(&tree.prefix, false, 0)?;
self.s.word("::")?;
}
self.s.word("*")?;
}
ast::UseTreeKind::Nested(ref items) => {
if tree.prefix.segments.is_empty() {
self.s.word("{")?;
} else {
self.print_path(&tree.prefix, false, 0)?;
self.s.word("::{")?;
}
self.commasep(Inconsistent, &items[..], |this, &(ref tree, _)| {
this.print_use_tree(tree)
})?;
self.s.word("}")?;
}
}
Ok(())
}
pub fn print_mutability(&mut self,
mutbl: ast::Mutability) -> io::Result<()> {
match mutbl {
ast::Mutability::Mutable => self.word_nbsp("mut"),
ast::Mutability::Immutable => Ok(()),
}
}
pub fn print_mt(&mut self, mt: &ast::MutTy) -> io::Result<()> {
self.print_mutability(mt.mutbl)?;
self.print_type(&mt.ty)
}
pub fn print_arg(&mut self, input: &ast::Arg, is_closure: bool) -> io::Result<()> {
self.ibox(INDENT_UNIT)?;
match input.ty.node {
ast::TyKind::Infer if is_closure => self.print_pat(&input.pat)?,
_ => {
if let Some(eself) = input.to_self() {
self.print_explicit_self(&eself)?;
} else {
let invalid = if let PatKind::Ident(_, ident, _) = input.pat.node {
ident.name == keywords::Invalid.name()
} else {
false
};
if !invalid {
self.print_pat(&input.pat)?;
self.s.word(":")?;
self.s.space()?;
}
self.print_type(&input.ty)?;
}
}
}
self.end()
}
pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> io::Result<()> {
if let ast::FunctionRetTy::Default(..) = decl.output {
return Ok(());
}
self.space_if_not_bol()?;
self.ibox(INDENT_UNIT)?;
self.word_space("->")?;
match decl.output {
ast::FunctionRetTy::Default(..) => unreachable!(),
ast::FunctionRetTy::Ty(ref ty) =>
self.print_type(ty)?
}
self.end()?;
match decl.output {
ast::FunctionRetTy::Ty(ref output) => self.maybe_print_comment(output.span.lo()),
_ => Ok(())
}
}
pub fn print_ty_fn(&mut self,
abi: abi::Abi,
unsafety: ast::Unsafety,
decl: &ast::FnDecl,
name: Option<ast::Ident>,
generic_params: &[ast::GenericParam])
-> io::Result<()> {
self.ibox(INDENT_UNIT)?;
if !generic_params.is_empty() {
self.s.word("for")?;
self.print_generic_params(generic_params)?;
}
let generics = ast::Generics {
params: Vec::new(),
where_clause: ast::WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: syntax_pos::DUMMY_SP,
},
span: syntax_pos::DUMMY_SP,
};
self.print_fn(decl,
ast::FnHeader { unsafety, abi, ..ast::FnHeader::default() },
name,
&generics,
&source_map::dummy_spanned(ast::VisibilityKind::Inherited))?;
self.end()
}
pub fn maybe_print_trailing_comment(&mut self, span: syntax_pos::Span,
next_pos: Option<BytePos>)
-> io::Result<()> {
let cm = match self.cm {
Some(cm) => cm,
_ => return Ok(())
};
if let Some(ref cmnt) = self.next_comment() {
if cmnt.style != comments::Trailing { return Ok(()) }
let span_line = cm.lookup_char_pos(span.hi());
let comment_line = cm.lookup_char_pos(cmnt.pos);
let next = next_pos.unwrap_or_else(|| cmnt.pos + BytePos(1));
if span.hi() < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line {
self.print_comment(cmnt)?;
}
}
Ok(())
}
pub fn print_remaining_comments(&mut self) -> io::Result<()> {
// If there aren't any remaining comments, then we need to manually
// make sure there is a line break at the end.
if self.next_comment().is_none() {
self.s.hardbreak()?;
}
while let Some(ref cmnt) = self.next_comment() {
self.print_comment(cmnt)?;
}
Ok(())
}
pub fn print_opt_abi_and_extern_if_nondefault(&mut self,
opt_abi: Option<Abi>)
-> io::Result<()> {
match opt_abi {
Some(Abi::Rust) => Ok(()),
Some(abi) => {
self.word_nbsp("extern")?;
self.word_nbsp(abi.to_string())
}
None => Ok(())
}
}
pub fn print_extern_opt_abi(&mut self,
opt_abi: Option<Abi>) -> io::Result<()> {
match opt_abi {
Some(abi) => {
self.word_nbsp("extern")?;
self.word_nbsp(abi.to_string())
}
None => Ok(())
}
}
pub fn print_fn_header_info(&mut self,
header: ast::FnHeader,
vis: &ast::Visibility) -> io::Result<()> {
self.s.word(visibility_qualified(vis, ""))?;
match header.constness.node {
ast::Constness::NotConst => {}
ast::Constness::Const => self.word_nbsp("const")?
}
self.print_asyncness(header.asyncness.node)?;
self.print_unsafety(header.unsafety)?;
if header.abi != Abi::Rust {
self.word_nbsp("extern")?;
self.word_nbsp(header.abi.to_string())?;
}
self.s.word("fn")
}
pub fn print_unsafety(&mut self, s: ast::Unsafety) -> io::Result<()> {
match s {
ast::Unsafety::Normal => Ok(()),
ast::Unsafety::Unsafe => self.word_nbsp("unsafe"),
}
}
pub fn print_is_auto(&mut self, s: ast::IsAuto) -> io::Result<()> {
match s {
ast::IsAuto::Yes => self.word_nbsp("auto"),
ast::IsAuto::No => Ok(()),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::ast;
use crate::source_map;
use crate::with_globals;
use syntax_pos;
#[test]
fn test_fun_to_string() {
with_globals(|| {
let abba_ident = ast::Ident::from_str("abba");
let decl = ast::FnDecl {
inputs: Vec::new(),
output: ast::FunctionRetTy::Default(syntax_pos::DUMMY_SP),
c_variadic: false
};
let generics = ast::Generics::default();
assert_eq!(
fun_to_string(
&decl,
ast::FnHeader {
unsafety: ast::Unsafety::Normal,
constness: source_map::dummy_spanned(ast::Constness::NotConst),
asyncness: source_map::dummy_spanned(ast::IsAsync::NotAsync),
abi: Abi::Rust,
},
abba_ident,
&generics
),
"fn abba()"
);
})
}
#[test]
fn test_variant_to_string() {
with_globals(|| {
let ident = ast::Ident::from_str("principal_skinner");
let var = source_map::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
ident,
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
// making this up as I go.... ?
data: ast::VariantData::Unit(ast::DUMMY_NODE_ID),
disr_expr: None,
});
let varstr = variant_to_string(&var);
assert_eq!(varstr, "principal_skinner");
})
}
}
| 36.525129 | 99 | 0.45169 |
6479814885a2e7acd3630d80619e69c1b431462d | 4,408 | #[doc = "Reader of register FS_GCCFG"]
pub type R = crate::R<u32, super::FS_GCCFG>;
#[doc = "Writer for register FS_GCCFG"]
pub type W = crate::W<u32, super::FS_GCCFG>;
#[doc = "Register FS_GCCFG `reset()`'s with value 0"]
impl crate::ResetValue for super::FS_GCCFG {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `PWRDWN`"]
pub type PWRDWN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PWRDWN`"]
pub struct PWRDWN_W<'a> {
w: &'a mut W,
}
impl<'a> PWRDWN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16);
self.w
}
}
#[doc = "Reader of field `VBUSASEN`"]
pub type VBUSASEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `VBUSASEN`"]
pub struct VBUSASEN_W<'a> {
w: &'a mut W,
}
impl<'a> VBUSASEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `VBUSBSEN`"]
pub type VBUSBSEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `VBUSBSEN`"]
pub struct VBUSBSEN_W<'a> {
w: &'a mut W,
}
impl<'a> VBUSBSEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `SOFOUTEN`"]
pub type SOFOUTEN_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SOFOUTEN`"]
pub struct SOFOUTEN_W<'a> {
w: &'a mut W,
}
impl<'a> SOFOUTEN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
impl R {
#[doc = "Bit 16 - Power down"]
#[inline(always)]
pub fn pwrdwn(&self) -> PWRDWN_R {
PWRDWN_R::new(((self.bits >> 16) & 0x01) != 0)
}
#[doc = "Bit 18 - Enable the VBUS sensing device"]
#[inline(always)]
pub fn vbusasen(&self) -> VBUSASEN_R {
VBUSASEN_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - Enable the VBUS sensing device"]
#[inline(always)]
pub fn vbusbsen(&self) -> VBUSBSEN_R {
VBUSBSEN_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - SOF output enable"]
#[inline(always)]
pub fn sofouten(&self) -> SOFOUTEN_R {
SOFOUTEN_R::new(((self.bits >> 20) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 16 - Power down"]
#[inline(always)]
pub fn pwrdwn(&mut self) -> PWRDWN_W {
PWRDWN_W { w: self }
}
#[doc = "Bit 18 - Enable the VBUS sensing device"]
#[inline(always)]
pub fn vbusasen(&mut self) -> VBUSASEN_W {
VBUSASEN_W { w: self }
}
#[doc = "Bit 19 - Enable the VBUS sensing device"]
#[inline(always)]
pub fn vbusbsen(&mut self) -> VBUSBSEN_W {
VBUSBSEN_W { w: self }
}
#[doc = "Bit 20 - SOF output enable"]
#[inline(always)]
pub fn sofouten(&mut self) -> SOFOUTEN_W {
SOFOUTEN_W { w: self }
}
}
| 28.810458 | 86 | 0.544011 |
390522ef419b1dac1c9508885618a126ece35c72 | 6,268 | use parser::Span;
use parser::Span::{Literal, Text};
mod br;
mod code;
mod emphasis;
mod image;
mod link;
mod strong;
use self::br::parse_break;
use self::code::parse_code;
use self::emphasis::parse_emphasis;
use self::image::parse_image;
use self::link::parse_link;
use self::strong::parse_strong;
pub fn parse_spans(text: &str) -> Vec<Span> {
let mut tokens = vec![];
let mut t = String::new();
let mut i = 0;
while i < text.len() {
match parse_span(&text[i..text.len()]) {
Some((span, consumed_chars)) => {
if !t.is_empty() {
// if this text is on the very left
// trim the left whitespace
if tokens.is_empty() {
t = t.trim_start().to_owned()
}
tokens.push(Text(t));
}
tokens.push(span);
t = String::new();
i += consumed_chars;
}
None => {
let mut e = i + 1;
while !text.is_char_boundary(e) {
e += 1;
}
t.push_str(&text[i..e]);
i += e - i;
}
}
}
if !t.is_empty() {
// if this text is on the very left
// trim the left whitespace
if tokens.is_empty() {
t = t.trim_start().to_owned();
}
// we're at the very end of this line,
// trim trailing whitespace
t = t.trim_end().to_owned();
tokens.push(Text(t));
}
tokens
}
fn parse_escape(text: &str) -> Option<(Span, usize)> {
let mut chars = text.chars();
if let Some('\\') = chars.next() {
return match chars.next() {
Some(x @ '\\') | Some(x @ '`') | Some(x @ '*') | Some(x @ '_') | Some(x @ '{')
| Some(x @ '}') | Some(x @ '[') | Some(x @ ']') | Some(x @ '(') | Some(x @ ')')
| Some(x @ '#') | Some(x @ '+') | Some(x @ '-') | Some(x @ '.') | Some(x @ '!') => {
Some((Literal(x), 2))
}
_ => None,
};
}
None
}
fn parse_span(text: &str) -> Option<(Span, usize)> {
pipe_opt!(
text
=> parse_escape
=> parse_code
=> parse_strong
=> parse_emphasis
=> parse_break
=> parse_image
=> parse_link
)
}
#[cfg(test)]
mod test {
use parser::span::parse_spans;
use parser::Span::{Break, Code, Emphasis, Image, Link, Literal, Strong, Text};
use std::str;
#[test]
fn converts_into_text() {
assert_eq!(
parse_spans("this is a test"),
vec![Text("this is a test".to_owned())]
);
}
#[test]
fn finds_escapes() {
assert_eq!(parse_spans(r"\*"), vec![Literal('*')]);
}
#[test]
fn finds_breaks() {
assert_eq!(
parse_spans("this is a test "),
vec![Text("this is a test".to_owned()), Break]
);
}
#[test]
fn finds_code() {
assert_eq!(
parse_spans("this `is a` test"),
vec![
Text("this ".to_owned()),
Code("is a".to_owned()),
Text(" test".to_owned())
]
);
assert_eq!(
parse_spans("this ``is a`` test"),
vec![
Text("this ".to_owned()),
Code("is a".to_owned()),
Text(" test".to_owned())
]
);
}
#[test]
fn finds_emphasis() {
assert_eq!(
parse_spans("this _is a_ test"),
vec![
Text("this ".to_owned()),
Emphasis(vec![Text("is a".to_owned())]),
Text(" test".to_owned())
]
);
assert_eq!(
parse_spans("this *is a* test"),
vec![
Text("this ".to_owned()),
Emphasis(vec![Text("is a".to_owned())]),
Text(" test".to_owned())
]
);
}
#[test]
fn finds_strong() {
assert_eq!(
parse_spans("this __is a__ test"),
vec![
Text("this ".to_owned()),
Strong(vec![Text("is a".to_owned())]),
Text(" test".to_owned())
]
);
assert_eq!(
parse_spans("this **is a** test"),
vec![
Text("this ".to_owned()),
Strong(vec![Text("is a".to_owned())]),
Text(" test".to_owned())
]
);
}
#[test]
fn finds_link() {
assert_eq!(
parse_spans("this is [an example](example.com) test"),
vec![
Text("this is ".to_owned()),
Link(
vec![Text("an example".to_owned())],
"example.com".to_owned(),
None
),
Text(" test".to_owned())
]
);
}
#[test]
fn finds_image() {
assert_eq!(
parse_spans("this is  test"),
vec![
Text("this is ".to_owned()),
Image("an example".to_owned(), "example.com".to_owned(), None),
Text(" test".to_owned())
]
);
}
#[test]
fn finds_everything() {
assert_eq!(
parse_spans("some text  _emphasis_ __strong__ `teh codez` [a link](example.com) "),
vec![
Text("some text ".to_owned()),
Image("an image".to_owned(), "image.com".to_owned(), None),
Text(" ".to_owned()),
Emphasis(vec![Text("emphasis".to_owned())]),
Text(" ".to_owned()),
Strong(vec![Text("strong".to_owned())]),
Text(" ".to_owned()),
Code("teh codez".to_owned()),
Text(" ".to_owned()),
Link(vec![Text("a link".to_owned())], "example.com".to_owned(), None),
Break
]
);
}
#[test]
fn properly_consumes_multibyte_utf8() {
let test_phrase = str::from_utf8(b"This shouldn\xE2\x80\x99t panic").unwrap();
let _ = parse_spans(&test_phrase);
}
}
| 27.252174 | 118 | 0.435227 |
9c5f9d7e42f2985fe894223d18a036090995a785 | 10,416 | // DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
//! This documentation was generated from *Binary Authorization* crate version *2.0.0+20210318*, where *20210318* is the exact revision of the *binaryauthorization:v1* schema built by the [mako](http://www.makotemplates.org/) code generator *v2.0.0*.
//!
//! Everything else about the *Binary Authorization* *v1* API can be found at the
//! [official documentation site](https://cloud.google.com/binary-authorization/).
//! The original source code is [on github](https://github.com/Byron/google-apis-rs/tree/master/gen/binaryauthorization1).
//! # Features
//!
//! Handle the following *Resources* with ease from the central [hub](BinaryAuthorization) ...
//!
//! * projects
//! * [*attestors create*](api::ProjectAttestorCreateCall), [*attestors delete*](api::ProjectAttestorDeleteCall), [*attestors get*](api::ProjectAttestorGetCall), [*attestors get iam policy*](api::ProjectAttestorGetIamPolicyCall), [*attestors list*](api::ProjectAttestorListCall), [*attestors set iam policy*](api::ProjectAttestorSetIamPolicyCall), [*attestors test iam permissions*](api::ProjectAttestorTestIamPermissionCall), [*attestors update*](api::ProjectAttestorUpdateCall), [*attestors validate attestation occurrence*](api::ProjectAttestorValidateAttestationOccurrenceCall), [*get policy*](api::ProjectGetPolicyCall), [*policy get iam policy*](api::ProjectPolicyGetIamPolicyCall), [*policy set iam policy*](api::ProjectPolicySetIamPolicyCall), [*policy test iam permissions*](api::ProjectPolicyTestIamPermissionCall) and [*update policy*](api::ProjectUpdatePolicyCall)
//! * systempolicy
//! * [*get policy*](api::SystempolicyGetPolicyCall)
//!
//!
//!
//!
//! Not what you are looking for ? Find all other Google APIs in their Rust [documentation index](http://byron.github.io/google-apis-rs).
//!
//! # Structure of this Library
//!
//! The API is structured into the following primary items:
//!
//! * **[Hub](BinaryAuthorization)**
//! * a central object to maintain state and allow accessing all *Activities*
//! * creates [*Method Builders*](client::MethodsBuilder) which in turn
//! allow access to individual [*Call Builders*](client::CallBuilder)
//! * **[Resources](client::Resource)**
//! * primary types that you can apply *Activities* to
//! * a collection of properties and *Parts*
//! * **[Parts](client::Part)**
//! * a collection of properties
//! * never directly used in *Activities*
//! * **[Activities](client::CallBuilder)**
//! * operations to apply to *Resources*
//!
//! All *structures* are marked with applicable traits to further categorize them and ease browsing.
//!
//! Generally speaking, you can invoke *Activities* like this:
//!
//! ```Rust,ignore
//! let r = hub.resource().activity(...).doit().await
//! ```
//!
//! Or specifically ...
//!
//! ```ignore
//! let r = hub.projects().attestors_get_iam_policy(...).doit().await
//! let r = hub.projects().attestors_set_iam_policy(...).doit().await
//! let r = hub.projects().policy_get_iam_policy(...).doit().await
//! let r = hub.projects().policy_set_iam_policy(...).doit().await
//! ```
//!
//! The `resource()` and `activity(...)` calls create [builders][builder-pattern]. The second one dealing with `Activities`
//! supports various methods to configure the impending operation (not shown here). It is made such that all required arguments have to be
//! specified right away (i.e. `(...)`), whereas all optional ones can be [build up][builder-pattern] as desired.
//! The `doit()` method performs the actual communication with the server and returns the respective result.
//!
//! # Usage
//!
//! ## Setting up your Project
//!
//! To use this library, you would put the following lines into your `Cargo.toml` file:
//!
//! ```toml
//! [dependencies]
//! google-binaryauthorization1 = "*"
//! # This project intentionally uses an old version of Hyper. See
//! # https://github.com/Byron/google-apis-rs/issues/173 for more
//! # information.
//! hyper = "^0.14"
//! hyper-rustls = "^0.22"
//! serde = "^1.0"
//! serde_json = "^1.0"
//! yup-oauth2 = "^5.0"
//! ```
//!
//! ## A complete example
//!
//! ```test_harness,no_run
//! extern crate hyper;
//! extern crate hyper_rustls;
//! extern crate yup_oauth2 as oauth2;
//! extern crate google_binaryauthorization1 as binaryauthorization1;
//! use binaryauthorization1::{Result, Error};
//! # async fn dox() {
//! use std::default::Default;
//! use oauth2;
//! use binaryauthorization1::BinaryAuthorization;
//!
//! // Get an ApplicationSecret instance by some means. It contains the `client_id` and
//! // `client_secret`, among other things.
//! let secret: oauth2::ApplicationSecret = Default::default();
//! // Instantiate the authenticator. It will choose a suitable authentication flow for you,
//! // unless you replace `None` with the desired Flow.
//! // Provide your own `AuthenticatorDelegate` to adjust the way it operates and get feedback about
//! // what's going on. You probably want to bring in your own `TokenStorage` to persist tokens and
//! // retrieve them from storage.
//! let auth = yup_oauth2::InstalledFlowAuthenticator::builder(
//! secret,
//! yup_oauth2::InstalledFlowReturnMethod::HTTPRedirect,
//! ).build().await.unwrap();
//! let mut hub = BinaryAuthorization::new(hyper::Client::builder().build(hyper_rustls::HttpsConnector::with_native_roots()), auth);
//! // You can configure optional parameters by calling the respective setters at will, and
//! // execute the final call using `doit()`.
//! // Values shown here are possibly random and not representative !
//! let result = hub.projects().attestors_get_iam_policy("resource")
//! .options_requested_policy_version(-55)
//! .doit().await;
//!
//! match result {
//! Err(e) => match e {
//! // The Error enum provides details about what exactly happened.
//! // You can also just use its `Debug`, `Display` or `Error` traits
//! Error::HttpError(_)
//! |Error::Io(_)
//! |Error::MissingAPIKey
//! |Error::MissingToken(_)
//! |Error::Cancelled
//! |Error::UploadSizeLimitExceeded(_, _)
//! |Error::Failure(_)
//! |Error::BadRequest(_)
//! |Error::FieldClash(_)
//! |Error::JsonDecodeError(_, _) => println!("{}", e),
//! },
//! Ok(res) => println!("Success: {:?}", res),
//! }
//! # }
//! ```
//! ## Handling Errors
//!
//! All errors produced by the system are provided either as [Result](client::Result) enumeration as return value of
//! the doit() methods, or handed as possibly intermediate results to either the
//! [Hub Delegate](client::Delegate), or the [Authenticator Delegate](https://docs.rs/yup-oauth2/*/yup_oauth2/trait.AuthenticatorDelegate.html).
//!
//! When delegates handle errors or intermediate values, they may have a chance to instruct the system to retry. This
//! makes the system potentially resilient to all kinds of errors.
//!
//! ## Uploads and Downloads
//! If a method supports downloads, the response body, which is part of the [Result](client::Result), should be
//! read by you to obtain the media.
//! If such a method also supports a [Response Result](client::ResponseResult), it will return that by default.
//! You can see it as meta-data for the actual media. To trigger a media download, you will have to set up the builder by making
//! this call: `.param("alt", "media")`.
//!
//! Methods supporting uploads can do so using up to 2 different protocols:
//! *simple* and *resumable*. The distinctiveness of each is represented by customized
//! `doit(...)` methods, which are then named `upload(...)` and `upload_resumable(...)` respectively.
//!
//! ## Customization and Callbacks
//!
//! You may alter the way an `doit()` method is called by providing a [delegate](client::Delegate) to the
//! [Method Builder](client::CallBuilder) before making the final `doit()` call.
//! Respective methods will be called to provide progress information, as well as determine whether the system should
//! retry on failure.
//!
//! The [delegate trait](client::Delegate) is default-implemented, allowing you to customize it with minimal effort.
//!
//! ## Optional Parts in Server-Requests
//!
//! All structures provided by this library are made to be [encodable](client::RequestValue) and
//! [decodable](client::ResponseResult) via *json*. Optionals are used to indicate that partial requests are responses
//! are valid.
//! Most optionals are are considered [Parts](client::Part) which are identifiable by name, which will be sent to
//! the server to indicate either the set parts of the request or the desired parts in the response.
//!
//! ## Builder Arguments
//!
//! Using [method builders](client::CallBuilder), you are able to prepare an action call by repeatedly calling it's methods.
//! These will always take a single argument, for which the following statements are true.
//!
//! * [PODs][wiki-pod] are handed by copy
//! * strings are passed as `&str`
//! * [request values](client::RequestValue) are moved
//!
//! Arguments will always be copied or cloned into the builder, to make them independent of their original life times.
//!
//! [wiki-pod]: http://en.wikipedia.org/wiki/Plain_old_data_structure
//! [builder-pattern]: http://en.wikipedia.org/wiki/Builder_pattern
//! [google-go-api]: https://github.com/google/google-api-go-client
//!
//!
// Unused attributes happen thanks to defined, but unused structures
// We don't warn about this, as depending on the API, some data structures or facilities are never used.
// Instead of pre-determining this, we just disable the lint. It's manually tuned to not have any
// unused imports in fully featured APIs. Same with unused_mut ... .
#![allow(unused_imports, unused_mut, dead_code)]
// DO NOT EDIT !
// This file was generated automatically from 'src/mako/api/lib.rs.mako'
// DO NOT EDIT !
#[macro_use]
extern crate serde_derive;
extern crate hyper;
extern crate serde;
extern crate serde_json;
extern crate yup_oauth2 as oauth2;
extern crate mime;
extern crate url;
pub mod api;
pub mod client;
// Re-export the hub type and some basic client structs
pub use api::BinaryAuthorization;
pub use client::{Result, Error, Delegate};
| 48.672897 | 877 | 0.6947 |
4b964a12e732dfbefe406f462bab03f731836264 | 20,684 | //! This module contains the implementation of a virtual component (`VComp`).
use super::{Transformer, VDiff, VNode};
use crate::html::{AnyScope, Component, ComponentUpdate, NodeRef, Scope, Scoped};
use crate::utils::document;
use cfg_if::cfg_if;
use std::any::TypeId;
use std::borrow::Borrow;
use std::fmt;
use std::ops::Deref;
cfg_if! {
if #[cfg(feature = "std_web")] {
use stdweb::web::{Element, Node};
} else if #[cfg(feature = "web_sys")] {
use web_sys::{Element, Node};
}
}
/// A virtual component.
pub struct VComp {
type_id: TypeId,
scope: Option<Box<dyn Scoped>>,
props: Option<Box<dyn Mountable>>,
pub(crate) node_ref: NodeRef,
pub(crate) key: Option<String>,
}
impl Clone for VComp {
fn clone(&self) -> Self {
if self.scope.is_some() {
panic!("Mounted components are not allowed to be cloned!");
}
Self {
type_id: self.type_id,
scope: None,
props: self.props.as_ref().map(|m| m.copy()),
node_ref: self.node_ref.clone(),
key: self.key.clone(),
}
}
}
/// A virtual child component.
pub struct VChild<COMP: Component> {
/// The component properties
pub props: COMP::Properties,
/// Reference to the mounted node
node_ref: NodeRef,
key: Option<String>,
}
impl<COMP: Component> Clone for VChild<COMP> {
fn clone(&self) -> Self {
VChild {
props: self.props.clone(),
node_ref: self.node_ref.clone(),
key: self.key.clone(),
}
}
}
impl<COMP: Component> PartialEq for VChild<COMP>
where
COMP::Properties: PartialEq,
{
fn eq(&self, other: &VChild<COMP>) -> bool {
self.props == other.props
}
}
impl<COMP> VChild<COMP>
where
COMP: Component,
{
/// Creates a child component that can be accessed and modified by its parent.
pub fn new(props: COMP::Properties, node_ref: NodeRef, key: Option<String>) -> Self {
Self {
props,
node_ref,
key,
}
}
}
impl<COMP> From<VChild<COMP>> for VComp
where
COMP: Component,
{
fn from(vchild: VChild<COMP>) -> Self {
VComp::new::<COMP>(vchild.props, vchild.node_ref, vchild.key)
}
}
impl VComp {
/// Creates a new `VComp` instance.
pub fn new<COMP>(props: COMP::Properties, node_ref: NodeRef, key: Option<String>) -> Self
where
COMP: Component,
{
VComp {
type_id: TypeId::of::<COMP>(),
node_ref,
props: Some(Box::new(PropsWrapper::<COMP>::new(props))),
scope: None,
key,
}
}
#[allow(unused)]
pub(crate) fn root_vnode(&self) -> Option<impl Deref<Target = VNode> + '_> {
self.scope.as_ref().and_then(|scope| scope.root_vnode())
}
}
trait Mountable {
fn copy(&self) -> Box<dyn Mountable>;
fn mount(
self: Box<Self>,
node_ref: NodeRef,
parent_scope: &AnyScope,
parent: Element,
next_sibling: NodeRef,
) -> Box<dyn Scoped>;
fn reuse(self: Box<Self>, scope: &dyn Scoped, next_sibling: NodeRef);
}
struct PropsWrapper<COMP: Component> {
props: COMP::Properties,
}
impl<COMP: Component> PropsWrapper<COMP> {
pub fn new(props: COMP::Properties) -> Self {
Self { props }
}
}
impl<COMP: Component> Mountable for PropsWrapper<COMP> {
fn copy(&self) -> Box<dyn Mountable> {
let wrapper: PropsWrapper<COMP> = PropsWrapper {
props: self.props.clone(),
};
Box::new(wrapper)
}
fn mount(
self: Box<Self>,
node_ref: NodeRef,
parent_scope: &AnyScope,
parent: Element,
next_sibling: NodeRef,
) -> Box<dyn Scoped> {
let scope: Scope<COMP> = Scope::new(Some(parent_scope.clone()));
let scope = scope.mount_in_place(
parent,
next_sibling,
Some(VNode::VRef(node_ref.get().unwrap())),
node_ref,
self.props,
);
Box::new(scope)
}
fn reuse(self: Box<Self>, scope: &dyn Scoped, next_sibling: NodeRef) {
let scope: Scope<COMP> = scope.to_any().downcast();
scope.update(ComponentUpdate::Properties(self.props, next_sibling), false);
}
}
impl VDiff for VComp {
fn detach(&mut self, _parent: &Element) {
self.scope.take().expect("VComp is not mounted").destroy();
}
fn apply(
&mut self,
parent_scope: &AnyScope,
parent: &Element,
next_sibling: NodeRef,
ancestor: Option<VNode>,
) -> NodeRef {
let mountable = self.props.take().expect("VComp has already been mounted");
if let Some(mut ancestor) = ancestor {
if let VNode::VComp(ref mut vcomp) = &mut ancestor {
// If the ancestor is the same type, reuse it and update its properties
if self.type_id == vcomp.type_id {
self.node_ref.link(vcomp.node_ref.clone());
let scope = vcomp.scope.take().expect("VComp is not mounted");
mountable.reuse(scope.borrow(), next_sibling);
self.scope = Some(scope);
return vcomp.node_ref.clone();
}
}
ancestor.detach(parent);
}
let placeholder: Node = document().create_text_node("").into();
super::insert_node(&placeholder, parent, next_sibling.get());
self.node_ref.set(Some(placeholder));
let scope = mountable.mount(
self.node_ref.clone(),
parent_scope,
parent.to_owned(),
next_sibling,
);
self.scope = Some(scope);
self.node_ref.clone()
}
}
impl<T> Transformer<T, T> for VComp {
fn transform(from: T) -> T {
from
}
}
impl<'a, T> Transformer<&'a T, T> for VComp
where
T: Clone,
{
fn transform(from: &'a T) -> T {
from.clone()
}
}
impl<'a> Transformer<&'a str, String> for VComp {
fn transform(from: &'a str) -> String {
from.to_owned()
}
}
impl<T> Transformer<T, Option<T>> for VComp {
fn transform(from: T) -> Option<T> {
Some(from)
}
}
impl<'a, T> Transformer<&'a T, Option<T>> for VComp
where
T: Clone,
{
fn transform(from: &T) -> Option<T> {
Some(from.clone())
}
}
impl<'a> Transformer<&'a str, Option<String>> for VComp {
fn transform(from: &'a str) -> Option<String> {
Some(from.to_owned())
}
}
impl<'a> Transformer<Option<&'a str>, Option<String>> for VComp {
fn transform(from: Option<&'a str>) -> Option<String> {
from.map(|s| s.to_owned())
}
}
impl PartialEq for VComp {
fn eq(&self, other: &VComp) -> bool {
self.type_id == other.type_id
}
}
impl fmt::Debug for VComp {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("VComp")
}
}
impl<COMP: Component> fmt::Debug for VChild<COMP> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("VChild<_>")
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::macros::Properties;
use crate::{html, Children, Component, ComponentLink, Html, NodeRef, ShouldRender};
#[cfg(feature = "wasm_test")]
use wasm_bindgen_test::{wasm_bindgen_test as test, wasm_bindgen_test_configure};
#[cfg(feature = "wasm_test")]
wasm_bindgen_test_configure!(run_in_browser);
struct Comp;
#[derive(Clone, PartialEq, Properties)]
struct Props {
#[prop_or_default]
field_1: u32,
#[prop_or_default]
field_2: u32,
}
impl Component for Comp {
type Message = ();
type Properties = Props;
fn create(_: Self::Properties, _: ComponentLink<Self>) -> Self {
Comp
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
unimplemented!();
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
unimplemented!();
}
fn view(&self) -> Html {
unimplemented!();
}
}
#[test]
fn set_properties_to_component() {
html! {
<Comp />
};
html! {
<Comp field_1=1 />
};
html! {
<Comp field_2=2 />
};
html! {
<Comp field_1=1 field_2=2 />
};
let props = Props {
field_1: 1,
field_2: 1,
};
html! {
<Comp with props />
};
}
#[test]
fn set_component_key() {
let test_key = "test".to_string();
let check_key = |vnode: VNode| {
assert_eq!(vnode.key().as_ref(), Some(&test_key));
};
let props = Props {
field_1: 1,
field_2: 1,
};
let props_2 = props.clone();
check_key(html! { <Comp key=test_key.clone() /> });
check_key(html! { <Comp key=test_key.clone() field_1=1 /> });
check_key(html! { <Comp field_1=1 key=test_key.clone() /> });
check_key(html! { <Comp with props key=test_key.clone() /> });
check_key(html! { <Comp key=test_key.clone() with props_2 /> });
}
#[test]
fn set_component_node_ref() {
let test_node: Node = document().create_text_node("test").into();
let test_node_ref = NodeRef::new(test_node);
let check_node_ref = |vnode: VNode| {
assert_eq!(vnode.first_node(), test_node_ref.get().unwrap());
};
let props = Props {
field_1: 1,
field_2: 1,
};
let props_2 = props.clone();
check_node_ref(html! { <Comp ref=test_node_ref.clone() /> });
check_node_ref(html! { <Comp ref=test_node_ref.clone() field_1=1 /> });
check_node_ref(html! { <Comp field_1=1 ref=test_node_ref.clone() /> });
check_node_ref(html! { <Comp with props ref=test_node_ref.clone() /> });
check_node_ref(html! { <Comp ref=test_node_ref.clone() with props_2 /> });
}
#[test]
fn vchild_partialeq() {
let vchild1: VChild<Comp> = VChild::new(
Props {
field_1: 1,
field_2: 1,
},
NodeRef::default(),
None,
);
let vchild2: VChild<Comp> = VChild::new(
Props {
field_1: 1,
field_2: 1,
},
NodeRef::default(),
None,
);
let vchild3: VChild<Comp> = VChild::new(
Props {
field_1: 2,
field_2: 2,
},
NodeRef::default(),
None,
);
assert_eq!(vchild1, vchild2);
assert_ne!(vchild1, vchild3);
assert_ne!(vchild2, vchild3);
}
#[derive(Clone, Properties)]
pub struct ListProps {
pub children: Children,
}
pub struct List(ListProps);
impl Component for List {
type Message = ();
type Properties = ListProps;
fn create(props: Self::Properties, _: ComponentLink<Self>) -> Self {
Self(props)
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
unimplemented!();
}
fn change(&mut self, _: Self::Properties) -> ShouldRender {
unimplemented!();
}
fn view(&self) -> Html {
let item_iter = self.0.children.iter().map(|item| html! {<li>{ item }</li>});
html! {
<ul>{ for item_iter }</ul>
}
}
}
#[cfg(feature = "web_sys")]
use super::{AnyScope, Element};
#[cfg(feature = "web_sys")]
fn setup_parent() -> (AnyScope, Element) {
let scope = AnyScope {
type_id: std::any::TypeId::of::<()>(),
parent: None,
state: std::rc::Rc::new(()),
};
let parent = document().create_element("div").unwrap();
document().body().unwrap().append_child(&parent).unwrap();
(scope, parent)
}
#[cfg(feature = "web_sys")]
fn get_html(mut node: Html, scope: &AnyScope, parent: &Element) -> String {
// clear parent
parent.set_inner_html("");
node.apply(&scope, &parent, NodeRef::default(), None);
parent.inner_html()
}
#[test]
#[cfg(feature = "web_sys")]
fn all_ways_of_passing_children_work() {
let (scope, parent) = setup_parent();
let children: Vec<_> = vec!["a", "b", "c"]
.drain(..)
.map(|text| html! {<span>{ text }</span>})
.collect();
let children_renderer = Children::new(children.clone());
let expected_html = "\
<ul>\
<li><span>a</span></li>\
<li><span>b</span></li>\
<li><span>c</span></li>\
</ul>";
let prop_method = html! {
<List children=children_renderer.clone()/>
};
assert_eq!(get_html(prop_method, &scope, &parent), expected_html);
let children_renderer_method = html! {
<List>
{ children_renderer.clone() }
</List>
};
assert_eq!(
get_html(children_renderer_method, &scope, &parent),
expected_html
);
let direct_method = html! {
<List>
{ children.clone() }
</List>
};
assert_eq!(get_html(direct_method, &scope, &parent), expected_html);
let for_method = html! {
<List>
{ for children }
</List>
};
assert_eq!(get_html(for_method, &scope, &parent), expected_html);
}
}
#[cfg(all(test, feature = "web_sys"))]
mod layout_tests {
use crate::virtual_dom::layout_tests::{diff_layouts, TestLayout};
use crate::{Children, Component, ComponentLink, Html, Properties, ShouldRender};
use std::marker::PhantomData;
#[cfg(feature = "wasm_test")]
use wasm_bindgen_test::{wasm_bindgen_test as test, wasm_bindgen_test_configure};
#[cfg(feature = "wasm_test")]
wasm_bindgen_test_configure!(run_in_browser);
struct Comp<T> {
_marker: PhantomData<T>,
props: CompProps,
}
#[derive(Properties, Clone)]
struct CompProps {
#[prop_or_default]
children: Children,
}
impl<T: 'static> Component for Comp<T> {
type Message = ();
type Properties = CompProps;
fn create(props: Self::Properties, _: ComponentLink<Self>) -> Self {
Comp {
_marker: PhantomData::default(),
props,
}
}
fn update(&mut self, _: Self::Message) -> ShouldRender {
unimplemented!();
}
fn change(&mut self, props: Self::Properties) -> ShouldRender {
self.props = props;
true
}
fn view(&self) -> Html {
html! {
<>{ self.props.children.clone() }</>
}
}
}
struct A;
struct B;
#[test]
fn diff() {
let layout1 = TestLayout {
node: html! {
<Comp<A>>
<Comp<B>></Comp<B>>
{"C"}
</Comp<A>>
},
expected: "C",
};
let layout2 = TestLayout {
node: html! {
<Comp<A>>
{"A"}
</Comp<A>>
},
expected: "A",
};
let layout3 = TestLayout {
node: html! {
<Comp<B>>
<Comp<A>></Comp<A>>
{"B"}
</Comp<B>>
},
expected: "B",
};
let layout4 = TestLayout {
node: html! {
<Comp<B>>
<Comp<A>>{"A"}</Comp<A>>
{"B"}
</Comp<B>>
},
expected: "AB",
};
let layout5 = TestLayout {
node: html! {
<Comp<B>>
<>
<Comp<A>>
{"A"}
</Comp<A>>
</>
{"B"}
</Comp<B>>
},
expected: "AB",
};
let layout6 = TestLayout {
node: html! {
<Comp<B>>
<>
<Comp<A>>
{"A"}
</Comp<A>>
{"B"}
</>
{"C"}
</Comp<B>>
},
expected: "ABC",
};
let layout7 = TestLayout {
node: html! {
<Comp<B>>
<>
<Comp<A>>
{"A"}
</Comp<A>>
<Comp<A>>
{"B"}
</Comp<A>>
</>
{"C"}
</Comp<B>>
},
expected: "ABC",
};
let layout8 = TestLayout {
node: html! {
<Comp<B>>
<>
<Comp<A>>
{"A"}
</Comp<A>>
<Comp<A>>
<Comp<A>>
{"B"}
</Comp<A>>
</Comp<A>>
</>
{"C"}
</Comp<B>>
},
expected: "ABC",
};
let layout9 = TestLayout {
node: html! {
<Comp<B>>
<>
<>
{"A"}
</>
<Comp<A>>
<Comp<A>>
{"B"}
</Comp<A>>
</Comp<A>>
</>
{"C"}
</Comp<B>>
},
expected: "ABC",
};
let layout10 = TestLayout {
node: html! {
<Comp<B>>
<>
<Comp<A>>
<Comp<A>>
{"A"}
</Comp<A>>
</Comp<A>>
<>
{"B"}
</>
</>
{"C"}
</Comp<B>>
},
expected: "ABC",
};
let layout11 = TestLayout {
node: html! {
<Comp<B>>
<>
<>
<Comp<A>>
<Comp<A>>
{"A"}
</Comp<A>>
{"B"}
</Comp<A>>
</>
</>
{"C"}
</Comp<B>>
},
expected: "ABC",
};
let layout12 = TestLayout {
node: html! {
<Comp<B>>
<>
<Comp<A>></Comp<A>>
<>
<Comp<A>>
<>
<Comp<A>>
{"A"}
</Comp<A>>
<></>
<Comp<A>>
<Comp<A>></Comp<A>>
<></>
{"B"}
<></>
<Comp<A>></Comp<A>>
</Comp<A>>
</>
</Comp<A>>
<></>
</>
<Comp<A>></Comp<A>>
</>
{"C"}
<Comp<A>></Comp<A>>
<></>
</Comp<B>>
},
expected: "ABC",
};
diff_layouts(vec![
layout1, layout2, layout3, layout4, layout5, layout6, layout7, layout8, layout9,
layout10, layout11, layout12,
]);
}
}
| 26.517949 | 93 | 0.43744 |
64ef0c9579fddc6bb1b039af41dfe401232aa77f | 454 | use std::collections::HashMap;
// Use to expose non-numeric metrics
pub fn get_json_metrics() -> HashMap<String, String> {
let mut json_metrics: HashMap<String, String> = HashMap::new();
json_metrics = add_revision_hash(json_metrics);
json_metrics
}
fn add_revision_hash(mut json_metrics: HashMap<String, String>) -> HashMap<String, String> {
json_metrics.insert("revision".to_string(), env!("GIT_REV").to_string());
json_metrics
}
| 32.428571 | 92 | 0.726872 |
e8eba46960c45690941acb26b89d0190d2379847 | 27,201 | use super::{Format, FormatEvent, FormatFields, FormatTime};
use crate::{
field::{RecordFields, VisitOutput},
fmt::fmt_subscriber::{FmtContext, FormattedFields},
registry::LookupSpan,
};
use serde::ser::{SerializeMap, Serializer as _};
use serde_json::Serializer;
use std::{
collections::BTreeMap,
fmt::{self, Write},
io,
};
use tracing_core::{
field::{self, Field},
span::Record,
Collect, Event,
};
use tracing_serde::AsSerde;
#[cfg(feature = "tracing-log")]
use tracing_log::NormalizeEvent;
/// Marker for `Format` that indicates that the verbose json log format should be used.
///
/// The full format includes fields from all entered spans.
///
/// # Example Output
///
/// ```json
/// {
/// "timestamp":"Feb 20 11:28:15.096",
/// "level":"INFO",
/// "fields":{"message":"some message","key":"value"}
/// "target":"mycrate",
/// "span":{"name":"leaf"},
/// "spans":[{"name":"root"},{"name":"leaf"}],
/// }
/// ```
///
/// # Options
///
/// - [`Json::flatten_event`] can be used to enable flattening event fields into
/// the root
/// - [`Json::with_current_span`] can be used to control logging of the current
/// span
/// - [`Json::with_span_list`] can be used to control logging of the span list
/// object.
///
/// By default, event fields are not flattened, and both current span and span
/// list are logged.
///
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub struct Json {
pub(crate) flatten_event: bool,
pub(crate) display_current_span: bool,
pub(crate) display_span_list: bool,
}
impl Json {
/// If set to `true` event metadata will be flattened into the root object.
pub fn flatten_event(&mut self, flatten_event: bool) {
self.flatten_event = flatten_event;
}
/// If set to `false`, formatted events won't contain a field for the current span.
pub fn with_current_span(&mut self, display_current_span: bool) {
self.display_current_span = display_current_span;
}
/// If set to `false`, formatted events won't contain a list of all currently
/// entered spans. Spans are logged in a list from root to leaf.
pub fn with_span_list(&mut self, display_span_list: bool) {
self.display_span_list = display_span_list;
}
}
struct SerializableContext<'a, 'b, Span, N>(
&'b crate::subscribe::Context<'a, Span>,
std::marker::PhantomData<N>,
)
where
Span: Collect + for<'lookup> crate::registry::LookupSpan<'lookup>,
N: for<'writer> FormatFields<'writer> + 'static;
impl<'a, 'b, Span, N> serde::ser::Serialize for SerializableContext<'a, 'b, Span, N>
where
Span: Collect + for<'lookup> crate::registry::LookupSpan<'lookup>,
N: for<'writer> FormatFields<'writer> + 'static,
{
fn serialize<Ser>(&self, serializer_o: Ser) -> Result<Ser::Ok, Ser::Error>
where
Ser: serde::ser::Serializer,
{
use serde::ser::SerializeSeq;
let mut serializer = serializer_o.serialize_seq(None)?;
if let Some(leaf_span) = self.0.lookup_current() {
for span in leaf_span.scope().from_root() {
serializer.serialize_element(&SerializableSpan(&span, self.1))?;
}
}
serializer.end()
}
}
struct SerializableSpan<'a, 'b, Span, N>(
&'b crate::registry::SpanRef<'a, Span>,
std::marker::PhantomData<N>,
)
where
Span: for<'lookup> crate::registry::LookupSpan<'lookup>,
N: for<'writer> FormatFields<'writer> + 'static;
impl<'a, 'b, Span, N> serde::ser::Serialize for SerializableSpan<'a, 'b, Span, N>
where
Span: for<'lookup> crate::registry::LookupSpan<'lookup>,
N: for<'writer> FormatFields<'writer> + 'static,
{
fn serialize<Ser>(&self, serializer: Ser) -> Result<Ser::Ok, Ser::Error>
where
Ser: serde::ser::Serializer,
{
let mut serializer = serializer.serialize_map(None)?;
let ext = self.0.extensions();
let data = ext
.get::<FormattedFields<N>>()
.expect("Unable to find FormattedFields in extensions; this is a bug");
// TODO: let's _not_ do this, but this resolves
// https://github.com/tokio-rs/tracing/issues/391.
// We should probably rework this to use a `serde_json::Value` or something
// similar in a JSON-specific layer, but I'd (david)
// rather have a uglier fix now rather than shipping broken JSON.
match serde_json::from_str::<serde_json::Value>(&data) {
Ok(serde_json::Value::Object(fields)) => {
for field in fields {
serializer.serialize_entry(&field.0, &field.1)?;
}
}
// We have fields for this span which are valid JSON but not an object.
// This is probably a bug, so panic if we're in debug mode
Ok(_) if cfg!(debug_assertions) => panic!(
"span '{}' had malformed fields! this is a bug.\n error: invalid JSON object\n fields: {:?}",
self.0.metadata().name(),
data
),
// If we *aren't* in debug mode, it's probably best not to
// crash the program, let's log the field found but also an
// message saying it's type is invalid
Ok(value) => {
serializer.serialize_entry("field", &value)?;
serializer.serialize_entry("field_error", "field was no a valid object")?
}
// We have previously recorded fields for this span
// should be valid JSON. However, they appear to *not*
// be valid JSON. This is almost certainly a bug, so
// panic if we're in debug mode
Err(e) if cfg!(debug_assertions) => panic!(
"span '{}' had malformed fields! this is a bug.\n error: {}\n fields: {:?}",
self.0.metadata().name(),
e,
data
),
// If we *aren't* in debug mode, it's probably best not
// crash the program, but let's at least make sure it's clear
// that the fields are not supposed to be missing.
Err(e) => serializer.serialize_entry("field_error", &format!("{}", e))?,
};
serializer.serialize_entry("name", self.0.metadata().name())?;
serializer.end()
}
}
impl<C, N, T> FormatEvent<C, N> for Format<Json, T>
where
C: Collect + for<'lookup> LookupSpan<'lookup>,
N: for<'writer> FormatFields<'writer> + 'static,
T: FormatTime,
{
fn format_event(
&self,
ctx: &FmtContext<'_, C, N>,
writer: &mut dyn fmt::Write,
event: &Event<'_>,
) -> fmt::Result
where
C: Collect + for<'a> LookupSpan<'a>,
{
let mut timestamp = String::new();
self.timer.format_time(&mut timestamp)?;
#[cfg(feature = "tracing-log")]
let normalized_meta = event.normalized_metadata();
#[cfg(feature = "tracing-log")]
let meta = normalized_meta.as_ref().unwrap_or_else(|| event.metadata());
#[cfg(not(feature = "tracing-log"))]
let meta = event.metadata();
let mut visit = || {
let mut serializer = Serializer::new(WriteAdaptor::new(writer));
let mut serializer = serializer.serialize_map(None)?;
if self.display_timestamp {
serializer.serialize_entry("timestamp", ×tamp)?;
}
if self.display_level {
serializer.serialize_entry("level", &meta.level().as_serde())?;
}
let format_field_marker: std::marker::PhantomData<N> = std::marker::PhantomData;
let current_span = if self.format.display_current_span || self.format.display_span_list
{
event
.parent()
.and_then(|id| ctx.span(id))
.or_else(|| ctx.lookup_current())
} else {
None
};
if self.format.flatten_event {
let mut visitor = tracing_serde::SerdeMapVisitor::new(serializer);
event.record(&mut visitor);
serializer = visitor.take_serializer()?;
} else {
use tracing_serde::fields::AsMap;
serializer.serialize_entry("fields", &event.field_map())?;
};
if self.display_target {
serializer.serialize_entry("target", meta.target())?;
}
if self.format.display_current_span {
if let Some(ref span) = current_span {
serializer
.serialize_entry("span", &SerializableSpan(span, format_field_marker))
.unwrap_or(());
}
}
if self.format.display_span_list && current_span.is_some() {
serializer.serialize_entry(
"spans",
&SerializableContext(&ctx.ctx, format_field_marker),
)?;
}
if self.display_thread_name {
let current_thread = std::thread::current();
match current_thread.name() {
Some(name) => {
serializer.serialize_entry("threadName", name)?;
}
// fall-back to thread id when name is absent and ids are not enabled
None if !self.display_thread_id => {
serializer
.serialize_entry("threadName", &format!("{:?}", current_thread.id()))?;
}
_ => {}
}
}
if self.display_thread_id {
serializer
.serialize_entry("threadId", &format!("{:?}", std::thread::current().id()))?;
}
serializer.end()
};
visit().map_err(|_| fmt::Error)?;
writeln!(writer)
}
}
impl Default for Json {
fn default() -> Json {
Json {
flatten_event: false,
display_current_span: true,
display_span_list: true,
}
}
}
/// The JSON [`FormatFields`] implementation.
///
#[derive(Debug)]
pub struct JsonFields {
// reserve the ability to add fields to this without causing a breaking
// change in the future.
_private: (),
}
impl JsonFields {
/// Returns a new JSON [`FormatFields`] implementation.
///
pub fn new() -> Self {
Self { _private: () }
}
}
impl Default for JsonFields {
fn default() -> Self {
Self::new()
}
}
impl<'a> FormatFields<'a> for JsonFields {
/// Format the provided `fields` to the provided `writer`, returning a result.
fn format_fields<R: RecordFields>(
&self,
writer: &'a mut dyn fmt::Write,
fields: R,
) -> fmt::Result {
let mut v = JsonVisitor::new(writer);
fields.record(&mut v);
v.finish()
}
/// Record additional field(s) on an existing span.
///
/// By default, this appends a space to the current set of fields if it is
/// non-empty, and then calls `self.format_fields`. If different behavior is
/// required, the default implementation of this method can be overridden.
fn add_fields(&self, current: &'a mut String, fields: &Record<'_>) -> fmt::Result {
if !current.is_empty() {
// If fields were previously recorded on this span, we need to parse
// the current set of fields as JSON, add the new fields, and
// re-serialize them. Otherwise, if we just appended the new fields
// to a previously serialized JSON object, we would end up with
// malformed JSON.
//
// XXX(eliza): this is far from efficient, but unfortunately, it is
// necessary as long as the JSON formatter is implemented on top of
// an interface that stores all formatted fields as strings.
//
// We should consider reimplementing the JSON formatter as a
// separate layer, rather than a formatter for the `fmt` layer —
// then, we could store fields as JSON values, and add to them
// without having to parse and re-serialize.
let mut new = String::new();
let map: BTreeMap<&'_ str, serde_json::Value> =
serde_json::from_str(current).map_err(|_| fmt::Error)?;
let mut v = JsonVisitor::new(&mut new);
v.values = map;
fields.record(&mut v);
v.finish()?;
*current = new;
} else {
// If there are no previously recorded fields, we can just reuse the
// existing string.
let mut v = JsonVisitor::new(current);
fields.record(&mut v);
v.finish()?;
}
Ok(())
}
}
/// The [visitor] produced by [`JsonFields`]'s [`MakeVisitor`] implementation.
///
/// [visitor]: crate::field::Visit
/// [`MakeVisitor`]: crate::field::MakeVisitor
pub struct JsonVisitor<'a> {
values: BTreeMap<&'a str, serde_json::Value>,
writer: &'a mut dyn Write,
}
impl<'a> fmt::Debug for JsonVisitor<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_fmt(format_args!("JsonVisitor {{ values: {:?} }}", self.values))
}
}
impl<'a> JsonVisitor<'a> {
/// Returns a new default visitor that formats to the provided `writer`.
///
/// # Arguments
/// - `writer`: the writer to format to.
/// - `is_empty`: whether or not any fields have been previously written to
/// that writer.
pub fn new(writer: &'a mut dyn Write) -> Self {
Self {
values: BTreeMap::new(),
writer,
}
}
}
impl<'a> crate::field::VisitFmt for JsonVisitor<'a> {
fn writer(&mut self) -> &mut dyn fmt::Write {
self.writer
}
}
impl<'a> crate::field::VisitOutput<fmt::Result> for JsonVisitor<'a> {
fn finish(self) -> fmt::Result {
let inner = || {
let mut serializer = Serializer::new(WriteAdaptor::new(self.writer));
let mut ser_map = serializer.serialize_map(None)?;
for (k, v) in self.values {
ser_map.serialize_entry(k, &v)?;
}
ser_map.end()
};
if inner().is_err() {
Err(fmt::Error)
} else {
Ok(())
}
}
}
impl<'a> field::Visit for JsonVisitor<'a> {
/// Visit a signed 64-bit integer value.
fn record_i64(&mut self, field: &Field, value: i64) {
self.values
.insert(&field.name(), serde_json::Value::from(value));
}
/// Visit an unsigned 64-bit integer value.
fn record_u64(&mut self, field: &Field, value: u64) {
self.values
.insert(&field.name(), serde_json::Value::from(value));
}
/// Visit a boolean value.
fn record_bool(&mut self, field: &Field, value: bool) {
self.values
.insert(&field.name(), serde_json::Value::from(value));
}
/// Visit a string value.
fn record_str(&mut self, field: &Field, value: &str) {
self.values
.insert(&field.name(), serde_json::Value::from(value));
}
fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) {
match field.name() {
// Skip fields that are actually log metadata that have already been handled
#[cfg(feature = "tracing-log")]
name if name.starts_with("log.") => (),
name if name.starts_with("r#") => {
self.values
.insert(&name[2..], serde_json::Value::from(format!("{:?}", value)));
}
name => {
self.values
.insert(name, serde_json::Value::from(format!("{:?}", value)));
}
};
}
}
/// A bridge between `fmt::Write` and `io::Write`.
///
/// This is needed because tracing-subscriber's FormatEvent expects a fmt::Write
/// while serde_json's Serializer expects an io::Write.
struct WriteAdaptor<'a> {
fmt_write: &'a mut dyn fmt::Write,
}
impl<'a> WriteAdaptor<'a> {
fn new(fmt_write: &'a mut dyn fmt::Write) -> Self {
Self { fmt_write }
}
}
impl<'a> io::Write for WriteAdaptor<'a> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let s =
std::str::from_utf8(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
self.fmt_write
.write_str(&s)
.map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;
Ok(s.as_bytes().len())
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl<'a> fmt::Debug for WriteAdaptor<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.pad("WriteAdaptor { .. }")
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::fmt::{format::FmtSpan, test::MockMakeWriter, time::FormatTime, CollectorBuilder};
use tracing::{self, collect::with_default};
use std::fmt;
struct MockTime;
impl FormatTime for MockTime {
fn format_time(&self, w: &mut dyn fmt::Write) -> fmt::Result {
write!(w, "fake time")
}
}
fn collector() -> CollectorBuilder<JsonFields, Format<Json>> {
crate::fmt::CollectorBuilder::default().json()
}
#[test]
fn json() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":42,\"name\":\"json_span\",\"number\":3},\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"fields\":{\"message\":\"some json test\"}}\n";
let collector = collector()
.flatten_event(false)
.with_current_span(true)
.with_span_list(true);
test_json(expected, collector, || {
let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
let _guard = span.enter();
tracing::info!("some json test");
});
}
#[test]
fn json_flattened_event() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":42,\"name\":\"json_span\",\"number\":3},\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"message\":\"some json test\"}\n";
let collector = collector()
.flatten_event(true)
.with_current_span(true)
.with_span_list(true);
test_json(expected, collector, || {
let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
let _guard = span.enter();
tracing::info!("some json test");
});
}
#[test]
fn json_disabled_current_span_event() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"fields\":{\"message\":\"some json test\"}}\n";
let collector = collector()
.flatten_event(false)
.with_current_span(false)
.with_span_list(true);
test_json(expected, collector, || {
let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
let _guard = span.enter();
tracing::info!("some json test");
});
}
#[test]
fn json_disabled_span_list_event() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":42,\"name\":\"json_span\",\"number\":3},\"target\":\"tracing_subscriber::fmt::format::json::test\",\"fields\":{\"message\":\"some json test\"}}\n";
let collector = collector()
.flatten_event(false)
.with_current_span(true)
.with_span_list(false);
test_json(expected, collector, || {
let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
let _guard = span.enter();
tracing::info!("some json test");
});
}
#[test]
fn json_nested_span() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"span\":{\"answer\":43,\"name\":\"nested_json_span\",\"number\":4},\"spans\":[{\"answer\":42,\"name\":\"json_span\",\"number\":3},{\"answer\":43,\"name\":\"nested_json_span\",\"number\":4}],\"target\":\"tracing_subscriber::fmt::format::json::test\",\"fields\":{\"message\":\"some json test\"}}\n";
let collector = collector()
.flatten_event(false)
.with_current_span(true)
.with_span_list(true);
test_json(expected, collector, || {
let span = tracing::span!(tracing::Level::INFO, "json_span", answer = 42, number = 3);
let _guard = span.enter();
let span = tracing::span!(
tracing::Level::INFO,
"nested_json_span",
answer = 43,
number = 4
);
let _guard = span.enter();
tracing::info!("some json test");
});
}
#[test]
fn json_no_span() {
let expected =
"{\"timestamp\":\"fake time\",\"level\":\"INFO\",\"target\":\"tracing_subscriber::fmt::format::json::test\",\"fields\":{\"message\":\"some json test\"}}\n";
let collector = collector()
.flatten_event(false)
.with_current_span(true)
.with_span_list(true);
test_json(expected, collector, || {
tracing::info!("some json test");
});
}
#[test]
fn record_works() {
// This test reproduces issue #707, where using `Span::record` causes
// any events inside the span to be ignored.
let buffer = MockMakeWriter::default();
let subscriber = crate::fmt().json().with_writer(buffer.clone()).finish();
with_default(subscriber, || {
tracing::info!("an event outside the root span");
assert_eq!(
parse_as_json(&buffer)["fields"]["message"],
"an event outside the root span"
);
let span = tracing::info_span!("the span", na = tracing::field::Empty);
span.record("na", &"value");
let _enter = span.enter();
tracing::info!("an event inside the root span");
assert_eq!(
parse_as_json(&buffer)["fields"]["message"],
"an event inside the root span"
);
});
}
#[test]
fn json_span_event_show_correct_context() {
let buffer = MockMakeWriter::default();
let subscriber = collector()
.with_writer(buffer.clone())
.flatten_event(false)
.with_current_span(true)
.with_span_list(false)
.with_span_events(FmtSpan::FULL)
.finish();
with_default(subscriber, || {
let context = "parent";
let parent_span = tracing::info_span!("parent_span", context);
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "new");
assert_eq!(event["span"]["context"], "parent");
let _parent_enter = parent_span.enter();
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "enter");
assert_eq!(event["span"]["context"], "parent");
let context = "child";
let child_span = tracing::info_span!("child_span", context);
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "new");
assert_eq!(event["span"]["context"], "child");
let _child_enter = child_span.enter();
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "enter");
assert_eq!(event["span"]["context"], "child");
drop(_child_enter);
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "exit");
assert_eq!(event["span"]["context"], "child");
drop(child_span);
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "close");
assert_eq!(event["span"]["context"], "child");
drop(_parent_enter);
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "exit");
assert_eq!(event["span"]["context"], "parent");
drop(parent_span);
let event = parse_as_json(&buffer);
assert_eq!(event["fields"]["message"], "close");
assert_eq!(event["span"]["context"], "parent");
});
}
#[test]
fn json_span_event_with_no_fields() {
// Check span events serialize correctly.
// Discussion: https://github.com/tokio-rs/tracing/issues/829#issuecomment-661984255
//
let buffer = MockMakeWriter::default();
let subscriber = collector()
.with_writer(buffer.clone())
.flatten_event(false)
.with_current_span(false)
.with_span_list(false)
.with_span_events(FmtSpan::FULL)
.finish();
with_default(subscriber, || {
let span = tracing::info_span!("valid_json");
assert_eq!(parse_as_json(&buffer)["fields"]["message"], "new");
let _enter = span.enter();
assert_eq!(parse_as_json(&buffer)["fields"]["message"], "enter");
drop(_enter);
assert_eq!(parse_as_json(&buffer)["fields"]["message"], "exit");
drop(span);
assert_eq!(parse_as_json(&buffer)["fields"]["message"], "close");
});
}
fn parse_as_json(buffer: &MockMakeWriter) -> serde_json::Value {
let buf = String::from_utf8(buffer.buf().to_vec()).unwrap();
let json = buf
.lines()
.last()
.expect("expected at least one line to be written!");
match serde_json::from_str(&json) {
Ok(v) => v,
Err(e) => panic!(
"assertion failed: JSON shouldn't be malformed\n error: {}\n json: {}",
e, json
),
}
}
fn test_json<T>(
expected: &str,
builder: crate::fmt::CollectorBuilder<JsonFields, Format<Json>>,
producer: impl FnOnce() -> T,
) {
let make_writer = MockMakeWriter::default();
let collector = builder
.with_writer(make_writer.clone())
.with_timer(MockTime)
.finish();
with_default(collector, producer);
let buf = make_writer.buf();
let actual = std::str::from_utf8(&buf[..]).unwrap();
assert_eq!(
serde_json::from_str::<std::collections::HashMap<&str, serde_json::Value>>(expected)
.unwrap(),
serde_json::from_str(actual).unwrap()
);
}
}
| 35.234456 | 355 | 0.550494 |
5d8e8ae28c3640a207925c7f432e8c9b958e4085 | 8,313 | #[macro_use]
extern crate criterion;
use core::time::Duration;
use criterion::{black_box, BatchSize, Criterion, ParameterizedBenchmark, Throughput};
use serde_derive::{Deserialize, Serialize};
use toctoc::{Deserialize as MiniDeserialize, Serialize as MiniSerialize};
const LEN: usize = 100_000;
const WARM_UP_TIME: Duration = Duration::from_secs(5);
const MEASUREMENT_TIME: Duration = Duration::from_secs(55);
fn input_json() -> String {
std::fs::read_to_string("benches/twitter.json").unwrap()
}
fn input_struct() -> Twitter {
let j = input_json();
serde_json::from_str(&j).unwrap()
}
fn cmp(c: &mut Criterion) {
let core_ids = core_affinity::get_core_ids().unwrap();
core_affinity::set_for_current(core_ids[0]);
c.bench(
"ser/json",
ParameterizedBenchmark::new(
"toctoc",
|b, _| {
b.iter_batched(
|| input_struct(),
|value| black_box(toctoc::json::to_string(&value, &mut ())),
BatchSize::NumIterations(LEN as u64),
)
},
vec![()],
)
.with_function("serde_json", |b, _| {
b.iter_batched(
|| input_struct(),
|value| black_box(serde_json::to_string(&value).unwrap()),
BatchSize::NumIterations(LEN as u64),
)
})
//.throughput(|d| Throughput::Bytes(d.0.len() as u64))
.warm_up_time(WARM_UP_TIME)
.measurement_time(MEASUREMENT_TIME),
);
c.bench(
"de/json",
ParameterizedBenchmark::new(
"toctoc",
|b, data| {
b.iter_batched(
|| data.clone(),
|mut value| {
black_box(toctoc::json::from_str::<Twitter>(&mut value, &mut ()).unwrap())
},
BatchSize::NumIterations(LEN as u64),
)
},
vec![input_json()],
)
.with_function("serde_json", |b, data| {
b.iter_batched(
|| data.clone(),
|value| black_box(serde_json::from_str::<Twitter>(&value).unwrap()),
BatchSize::NumIterations(LEN as u64),
)
})
.with_function("simd-json", |b, data| {
b.iter_batched(
|| data.clone(),
|mut value| black_box(simd_json::serde::from_str::<Twitter>(&mut value).unwrap()),
BatchSize::NumIterations(LEN as u64),
)
})
.throughput(|d| Throughput::Bytes(d.as_bytes().len() as u64))
.warm_up_time(WARM_UP_TIME)
.measurement_time(MEASUREMENT_TIME),
);
c.bench(
"ser/bson",
ParameterizedBenchmark::new(
"toctoc",
|b, _| {
b.iter_batched(
|| input_struct(),
|value| black_box(toctoc::bson::to_bin(&value, &mut ())),
BatchSize::NumIterations(LEN as u64),
)
},
vec![()],
)
//.throughput(|d| Throughput::Bytes(d.0.len() as u64))
.warm_up_time(WARM_UP_TIME)
.measurement_time(MEASUREMENT_TIME),
);
c.bench(
"de/bson",
ParameterizedBenchmark::new(
"toctoc",
|b, data| {
b.iter_batched(
|| data.clone(),
|value| black_box(toctoc::bson::from_bin::<Twitter>(&value, &mut ()).unwrap()),
BatchSize::NumIterations(LEN as u64),
)
},
vec![toctoc::bson::to_bin(&input_struct(), &mut ())],
)
.throughput(|d| Throughput::Bytes(d.len() as u64))
.warm_up_time(WARM_UP_TIME)
.measurement_time(MEASUREMENT_TIME),
);
}
criterion_group!(benches, cmp);
criterion_main!(benches);
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Twitter {
statuses: Vec<Status>,
search_metadata: SearchMetadata,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Status {
metadata: Metadata,
created_at: String,
id: u64,
id_str: String,
text: String,
source: String,
truncated: bool,
in_reply_to_status_id: Option<u64>,
in_reply_to_status_id_str: Option<String>,
in_reply_to_user_id: Option<u32>,
in_reply_to_user_id_str: Option<String>,
in_reply_to_screen_name: Option<String>,
user: User,
geo: (),
coordinates: (),
place: (),
contributors: (),
retweeted_status: Option<Box<Status>>,
retweet_count: u32,
favorite_count: u32,
entities: StatusEntities,
favorited: bool,
retweeted: bool,
possibly_sensitive: Option<bool>,
lang: String,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Metadata {
result_type: String,
iso_language_code: String,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct User {
id: u32,
id_str: String,
name: String,
screen_name: String,
location: String,
description: String,
url: Option<String>,
entities: UserEntities,
protected: bool,
followers_count: u32,
friends_count: u32,
listed_count: u32,
created_at: String,
favourites_count: u32,
utc_offset: Option<i32>,
time_zone: Option<String>,
geo_enabled: bool,
verified: bool,
statuses_count: u32,
lang: String,
contributors_enabled: bool,
is_translator: bool,
is_translation_enabled: bool,
profile_background_color: String,
profile_background_image_url: String,
profile_background_image_url_https: String,
profile_background_tile: bool,
profile_image_url: String,
profile_image_url_https: String,
profile_banner_url: Option<String>,
profile_link_color: String,
profile_sidebar_border_color: String,
profile_sidebar_fill_color: String,
profile_text_color: String,
profile_use_background_image: bool,
default_profile: bool,
default_profile_image: bool,
following: bool,
follow_request_sent: bool,
notifications: bool,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct UserEntities {
url: Option<UserUrl>,
description: UserEntitiesDescription,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct UserUrl {
urls: Vec<Url>,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Url {
url: String,
expanded_url: String,
display_url: String,
indices: Indices,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct UserEntitiesDescription {
urls: Vec<Url>,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct StatusEntities {
hashtags: Vec<Hashtag>,
symbols: Vec<()>,
urls: Vec<Url>,
user_mentions: Vec<UserMention>,
media: Option<Vec<Media>>,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Hashtag {
text: String,
indices: Indices,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct UserMention {
screen_name: String,
name: String,
id: u32,
id_str: String,
indices: Indices,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Media {
id: u64,
id_str: String,
indices: Indices,
media_url: String,
media_url_https: String,
url: String,
display_url: String,
expanded_url: String,
#[serde(rename = "type")]
media_type: String,
sizes: Sizes,
source_status_id: Option<u64>,
source_status_id_str: Option<String>,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Sizes {
medium: Size,
small: Size,
thumb: Size,
large: Size,
}
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct Size {
w: u16,
h: u16,
resize: String,
}
type Indices = (u8, u8);
#[derive(Serialize, MiniSerialize, Deserialize, MiniDeserialize)]
struct SearchMetadata {
completed_in: f32,
max_id: u64,
max_id_str: String,
next_results: String,
query: String,
refresh_url: String,
count: u8,
since_id: u64,
since_id_str: String,
}
| 27.255738 | 99 | 0.61073 |
39cb3e7e7762a52fd9a2747d2aed98b9c306b94a | 3,803 | use std::collections::HashSet;
use std::collections::VecDeque;
use std::hash::Hash;
use std::rc::Rc;
#[derive(Debug)]
pub enum BFSResult<T> {
FoundResults(Vec<Rc<T>>),
VisitedNodes(HashSet<Rc<T>>)
}
impl<T: Eq + Hash> PartialEq for BFSResult<T> {
fn eq(&self, other: &Self) -> bool {
match &self {
Self::FoundResults(results) => match other {
Self::FoundResults(other_results) => results == other_results,
Self::VisitedNodes(_) => false
},
Self::VisitedNodes(nodes) => match other {
Self::FoundResults(_) => false,
Self::VisitedNodes(other_nodes) => {
nodes.len() == other_nodes.len() && nodes.iter().all(|e| other_nodes.contains(e))
}
}
}
}
}
pub fn bfs<T, U, V>(start: Rc<T>, get_nexts: U, is_wanted_node: V, max_results: usize) -> BFSResult<T>
where
T: Eq + Hash,
U: Fn(&T) -> Vec<T>,
V: Fn(&T) -> bool
{
let mut results: Vec<Rc<T>> = Vec::new();
let mut queue: VecDeque<Rc<T>> = VecDeque::new();
let mut visited: HashSet<Rc<T>> = HashSet::new();
queue.push_back(Rc::clone(&start));
visited.insert(Rc::clone(&start));
while let Some(f) = queue.pop_front() {
if is_wanted_node(&f) {
results.push(Rc::clone(&f));
if results.len() == max_results {
return BFSResult::FoundResults(results);
}
}
// items are of type T
for item in get_nexts(&f) {
if !visited.contains(&item) {
let item_p = Rc::new(item);
queue.push_back(Rc::clone(&item_p));
visited.insert(item_p);
}
}
}
if results.len() > 0 {
BFSResult::FoundResults(results)
} else {
BFSResult::VisitedNodes(visited)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_find_start_node() {
let start = 0;
let get_nexts = |i: &i32| -> Vec<i32> { vec![*i] };
let is_wanted_node = |i: &i32| -> bool { *i == 0 };
let max_results = 1;
assert_eq!(bfs(Rc::new(start), get_nexts, is_wanted_node, max_results),
BFSResult::FoundResults(vec![Rc::new(0)]));
}
#[test]
fn should_find_nothing() {
let start = 0;
let get_nexts = |i: &i32| -> Vec<i32> { vec![*i] };
let is_wanted_node = |i: &i32| -> bool { *i == 1 };
let max_results = 1;
assert_eq!(bfs(Rc::new(start), get_nexts, is_wanted_node, max_results),
BFSResult::VisitedNodes(HashSet::from([Rc::new(0)])));
}
#[test]
fn should_find_one_end_node() {
let start = 0;
let get_nexts = |i: &i32| -> Vec<i32> { vec![*i + 1] };
let is_wanted_node = |i: &i32| -> bool { *i == 1 };
let max_results = 1;
assert_eq!(bfs(Rc::new(start), get_nexts, is_wanted_node, max_results),
BFSResult::FoundResults(vec![Rc::new(1)]));
}
#[test]
fn should_find_multiple_end_nodes() {
let start = 0;
let get_nexts = |i: &i32| -> Vec<i32> { vec![*i + 1] };
let is_wanted_node = |i: &i32| -> bool { *i % 3 == 2 };
let max_results = 3;
assert_eq!(bfs(Rc::new(start), get_nexts, is_wanted_node, max_results),
BFSResult::FoundResults(vec![Rc::new(2), Rc::new(5), Rc::new(8)]));
}
#[test]
fn should_check_visited() {
let start = 0;
let get_nexts = |i: &i32| -> Vec<i32> { vec![*i - 1, *i + 1] };
let is_wanted_node = |i: &i32| -> bool { *i == 5 };
let max_results = 1;
assert_eq!(bfs(Rc::new(start), get_nexts, is_wanted_node, max_results),
BFSResult::FoundResults(vec![Rc::new(5)]));
}
}
| 31.957983 | 102 | 0.526952 |
e68d00d4a5f3f52a4d0da1c18fcfddef85505eac | 28,566 | // Generated from definition io.k8s.api.discovery.v1beta1.EndpointSlice
/// EndpointSlice represents a subset of the endpoints that implement a service. For a given service there may be multiple EndpointSlice objects, selected by labels, which must be joined to produce the full set of endpoints.
#[derive(Clone, Debug, Default, PartialEq)]
pub struct EndpointSlice {
/// addressType specifies the type of address carried by this EndpointSlice. All addresses in this slice must be the same type. This field is immutable after creation. The following address types are currently supported: * IPv4: Represents an IPv4 Address. * IPv6: Represents an IPv6 Address. * FQDN: Represents a Fully Qualified Domain Name.
pub address_type: String,
/// endpoints is a list of unique endpoints in this slice. Each slice may include a maximum of 1000 endpoints.
pub endpoints: Vec<crate::api::discovery::v1beta1::Endpoint>,
/// Standard object's metadata.
pub metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta>,
/// ports specifies the list of network ports exposed by each endpoint in this slice. Each port must have a unique name. When ports is empty, it indicates that there are no defined ports. When a port is defined with a nil port value, it indicates "all ports". Each slice may include a maximum of 100 ports.
pub ports: Option<Vec<crate::api::discovery::v1beta1::EndpointPort>>,
}
// Begin discovery.k8s.io/v1beta1/EndpointSlice
// Generated from operation createDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// create an EndpointSlice
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::CreateResponse`]`<Self>>` constructor, or [`crate::CreateResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn create_namespaced_endpoint_slice(
namespace: &str,
body: &crate::api::discovery::v1beta1::EndpointSlice,
optional: crate::CreateOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::CreateResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::post(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteDiscoveryV1beta1CollectionNamespacedEndpointSlice
impl EndpointSlice {
/// delete collection of EndpointSlice
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>>` constructor, or [`crate::DeleteResponse`]`<`[`crate::List`]`<Self>>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `delete_optional`
///
/// Delete options. Use `Default::default()` to not pass any.
///
/// * `list_optional`
///
/// List options. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_collection_namespaced_endpoint_slice(
namespace: &str,
delete_optional: crate::DeleteOptional<'_>,
list_optional: crate::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<crate::List<Self>>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
list_optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&delete_optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation deleteDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// delete an EndpointSlice
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::DeleteResponse`]`<Self>>` constructor, or [`crate::DeleteResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the EndpointSlice
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn delete_namespaced_endpoint_slice(
name: &str,
namespace: &str,
optional: crate::DeleteOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::DeleteResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices/{name}",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let __request = http::Request::delete(__url);
let __body = serde_json::to_vec(&optional).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listDiscoveryV1beta1EndpointSliceForAllNamespaces
impl EndpointSlice {
/// list or watch objects of kind EndpointSlice
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>>` constructor, or [`crate::ListResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_endpoint_slice_for_all_namespaces(
optional: crate::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = "/apis/discovery.k8s.io/v1beta1/endpointslices?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation listDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// list or watch objects of kind EndpointSlice
///
/// This operation only supports listing all items of this type.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ListResponse`]`<Self>>` constructor, or [`crate::ListResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn list_namespaced_endpoint_slice(
namespace: &str,
optional: crate::ListOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::ListResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation patchDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// partially update the specified EndpointSlice
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::PatchResponse`]`<Self>>` constructor, or [`crate::PatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the EndpointSlice
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn patch_namespaced_endpoint_slice(
name: &str,
namespace: &str,
body: &crate::apimachinery::pkg::apis::meta::v1::Patch,
optional: crate::PatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::PatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::patch(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static(match body {
crate::apimachinery::pkg::apis::meta::v1::Patch::Json(_) => "application/json-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::Merge(_) => "application/merge-patch+json",
crate::apimachinery::pkg::apis::meta::v1::Patch::StrategicMerge(_) => "application/strategic-merge-patch+json",
}));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation readDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// read the specified EndpointSlice
///
/// Use the returned [`crate::ResponseBody`]`<`[`ReadNamespacedEndpointSliceResponse`]`>` constructor, or [`ReadNamespacedEndpointSliceResponse`] directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the EndpointSlice
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn read_namespaced_endpoint_slice(
name: &str,
namespace: &str,
optional: ReadNamespacedEndpointSliceOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<ReadNamespacedEndpointSliceResponse>), crate::RequestError> {
let ReadNamespacedEndpointSliceOptional {
exact,
export,
pretty,
} = optional;
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
if let Some(exact) = exact {
__query_pairs.append_pair("exact", &exact.to_string());
}
if let Some(export) = export {
__query_pairs.append_pair("export", &export.to_string());
}
if let Some(pretty) = pretty {
__query_pairs.append_pair("pretty", pretty);
}
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
/// Optional parameters of [`EndpointSlice::read_namespaced_endpoint_slice`]
#[cfg(feature = "api")]
#[derive(Clone, Copy, Debug, Default)]
pub struct ReadNamespacedEndpointSliceOptional<'a> {
/// Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. Deprecated. Planned for removal in 1.18.
pub exact: Option<bool>,
/// Should this value be exported. Export strips fields that a user can not specify. Deprecated. Planned for removal in 1.18.
pub export: Option<bool>,
/// If 'true', then the output is pretty printed.
pub pretty: Option<&'a str>,
}
/// Use `<ReadNamespacedEndpointSliceResponse as Response>::try_from_parts` to parse the HTTP response body of [`EndpointSlice::read_namespaced_endpoint_slice`]
#[cfg(feature = "api")]
#[derive(Debug)]
pub enum ReadNamespacedEndpointSliceResponse {
Ok(crate::api::discovery::v1beta1::EndpointSlice),
Other(Result<Option<serde_json::Value>, serde_json::Error>),
}
#[cfg(feature = "api")]
impl crate::Response for ReadNamespacedEndpointSliceResponse {
fn try_from_parts(status_code: http::StatusCode, buf: &[u8]) -> Result<(Self, usize), crate::ResponseError> {
match status_code {
http::StatusCode::OK => {
let result = match serde_json::from_slice(buf) {
Ok(value) => value,
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => return Err(crate::ResponseError::Json(err)),
};
Ok((ReadNamespacedEndpointSliceResponse::Ok(result), buf.len()))
},
_ => {
let (result, read) =
if buf.is_empty() {
(Ok(None), 0)
}
else {
match serde_json::from_slice(buf) {
Ok(value) => (Ok(Some(value)), buf.len()),
Err(ref err) if err.is_eof() => return Err(crate::ResponseError::NeedMoreData),
Err(err) => (Err(err), 0),
}
};
Ok((ReadNamespacedEndpointSliceResponse::Other(result), read))
},
}
}
}
// Generated from operation replaceDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// replace the specified EndpointSlice
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::ReplaceResponse`]`<Self>>` constructor, or [`crate::ReplaceResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `name`
///
/// name of the EndpointSlice
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `body`
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn replace_namespaced_endpoint_slice(
name: &str,
namespace: &str,
body: &crate::api::discovery::v1beta1::EndpointSlice,
optional: crate::ReplaceOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::ReplaceResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices/{name}?",
name = crate::percent_encoding::percent_encode(name.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::put(__url);
let __body = serde_json::to_vec(body).map_err(crate::RequestError::Json)?;
let __request = __request.header(http::header::CONTENT_TYPE, http::header::HeaderValue::from_static("application/json"));
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchDiscoveryV1beta1EndpointSliceForAllNamespaces
impl EndpointSlice {
/// list or watch objects of kind EndpointSlice
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_endpoint_slice_for_all_namespaces(
optional: crate::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = "/apis/discovery.k8s.io/v1beta1/endpointslices?".to_owned();
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// Generated from operation watchDiscoveryV1beta1NamespacedEndpointSlice
impl EndpointSlice {
/// list or watch objects of kind EndpointSlice
///
/// This operation only supports watching one item, or a list of items, of this type for changes.
///
/// Use the returned [`crate::ResponseBody`]`<`[`crate::WatchResponse`]`<Self>>` constructor, or [`crate::WatchResponse`]`<Self>` directly, to parse the HTTP response.
///
/// # Arguments
///
/// * `namespace`
///
/// object name and auth scope, such as for teams and projects
///
/// * `optional`
///
/// Optional parameters. Use `Default::default()` to not pass any.
#[cfg(feature = "api")]
pub fn watch_namespaced_endpoint_slice(
namespace: &str,
optional: crate::WatchOptional<'_>,
) -> Result<(http::Request<Vec<u8>>, fn(http::StatusCode) -> crate::ResponseBody<crate::WatchResponse<Self>>), crate::RequestError> {
let __url = format!("/apis/discovery.k8s.io/v1beta1/namespaces/{namespace}/endpointslices?",
namespace = crate::percent_encoding::percent_encode(namespace.as_bytes(), crate::percent_encoding2::PATH_SEGMENT_ENCODE_SET),
);
let mut __query_pairs = crate::url::form_urlencoded::Serializer::new(__url);
optional.__serialize(&mut __query_pairs);
let __url = __query_pairs.finish();
let __request = http::Request::get(__url);
let __body = vec![];
match __request.body(__body) {
Ok(request) => Ok((request, crate::ResponseBody::new)),
Err(err) => Err(crate::RequestError::Http(err)),
}
}
}
// End discovery.k8s.io/v1beta1/EndpointSlice
impl crate::Resource for EndpointSlice {
const API_VERSION: &'static str = "discovery.k8s.io/v1beta1";
const GROUP: &'static str = "discovery.k8s.io";
const KIND: &'static str = "EndpointSlice";
const VERSION: &'static str = "v1beta1";
}
impl crate::ListableResource for EndpointSlice {
const LIST_KIND: &'static str = concat!("EndpointSlice", "List");
}
impl crate::Metadata for EndpointSlice {
type Ty = crate::apimachinery::pkg::apis::meta::v1::ObjectMeta;
fn metadata(&self) -> Option<&<Self as crate::Metadata>::Ty> {
self.metadata.as_ref()
}
}
impl<'de> serde::Deserialize<'de> for EndpointSlice {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
#[allow(non_camel_case_types)]
enum Field {
Key_api_version,
Key_kind,
Key_address_type,
Key_endpoints,
Key_metadata,
Key_ports,
Other,
}
impl<'de> serde::Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: serde::Deserializer<'de> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = Field;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("field identifier")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E> where E: serde::de::Error {
Ok(match v {
"apiVersion" => Field::Key_api_version,
"kind" => Field::Key_kind,
"addressType" => Field::Key_address_type,
"endpoints" => Field::Key_endpoints,
"metadata" => Field::Key_metadata,
"ports" => Field::Key_ports,
_ => Field::Other,
})
}
}
deserializer.deserialize_identifier(Visitor)
}
}
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = EndpointSlice;
fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(<Self::Value as crate::Resource>::KIND)
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error> where A: serde::de::MapAccess<'de> {
let mut value_address_type: Option<String> = None;
let mut value_endpoints: Option<Vec<crate::api::discovery::v1beta1::Endpoint>> = None;
let mut value_metadata: Option<crate::apimachinery::pkg::apis::meta::v1::ObjectMeta> = None;
let mut value_ports: Option<Vec<crate::api::discovery::v1beta1::EndpointPort>> = None;
while let Some(key) = serde::de::MapAccess::next_key::<Field>(&mut map)? {
match key {
Field::Key_api_version => {
let value_api_version: String = serde::de::MapAccess::next_value(&mut map)?;
if value_api_version != <Self::Value as crate::Resource>::API_VERSION {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_api_version), &<Self::Value as crate::Resource>::API_VERSION));
}
},
Field::Key_kind => {
let value_kind: String = serde::de::MapAccess::next_value(&mut map)?;
if value_kind != <Self::Value as crate::Resource>::KIND {
return Err(serde::de::Error::invalid_value(serde::de::Unexpected::Str(&value_kind), &<Self::Value as crate::Resource>::KIND));
}
},
Field::Key_address_type => value_address_type = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_endpoints => value_endpoints = Some(serde::de::MapAccess::next_value(&mut map)?),
Field::Key_metadata => value_metadata = serde::de::MapAccess::next_value(&mut map)?,
Field::Key_ports => value_ports = serde::de::MapAccess::next_value(&mut map)?,
Field::Other => { let _: serde::de::IgnoredAny = serde::de::MapAccess::next_value(&mut map)?; },
}
}
Ok(EndpointSlice {
address_type: value_address_type.ok_or_else(|| serde::de::Error::missing_field("addressType"))?,
endpoints: value_endpoints.ok_or_else(|| serde::de::Error::missing_field("endpoints"))?,
metadata: value_metadata,
ports: value_ports,
})
}
}
deserializer.deserialize_struct(
<Self as crate::Resource>::KIND,
&[
"apiVersion",
"kind",
"addressType",
"endpoints",
"metadata",
"ports",
],
Visitor,
)
}
}
impl serde::Serialize for EndpointSlice {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer {
let mut state = serializer.serialize_struct(
<Self as crate::Resource>::KIND,
4 +
self.metadata.as_ref().map_or(0, |_| 1) +
self.ports.as_ref().map_or(0, |_| 1),
)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "apiVersion", <Self as crate::Resource>::API_VERSION)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "kind", <Self as crate::Resource>::KIND)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "addressType", &self.address_type)?;
serde::ser::SerializeStruct::serialize_field(&mut state, "endpoints", &self.endpoints)?;
if let Some(value) = &self.metadata {
serde::ser::SerializeStruct::serialize_field(&mut state, "metadata", value)?;
}
if let Some(value) = &self.ports {
serde::ser::SerializeStruct::serialize_field(&mut state, "ports", value)?;
}
serde::ser::SerializeStruct::end(state)
}
}
| 44.915094 | 346 | 0.610061 |
fc0c3c8ade41c974c4d0d106a936920e7491a4aa | 97,613 | use crate::types::{DataKeyword, Expression, ExpressionFactory, ExpressionType, FunctionHeader, Literal, ProgramError, SourceCodeLocation, Statement, StatementType, Token, TokenType, Type, StatementFactory};
use std::cell::RefCell;
use std::iter::Peekable;
const INTERNAL_MATCH_VALUE_NAME: &str = "@match_value";
macro_rules! parse_branch {
($self: ident, $vec: ident, $location: ident, $value: expr, $next_location: ident) => {
$self.consume(
TokenType::Arrow,
"Expecting `=>` after `*` on match!",
&$location,
)?;
$vec.push(($value, $self.parse_block_statement($next_location)?));
$self.consume(
TokenType::Comma,
"Expecting `,` at the end of branch on match!",
&$location,
)?;
}
}
struct MethodSet<T> {
getters: Vec<T>,
methods: Vec<T>,
setters: Vec<T>,
static_methods: Vec<T>,
}
pub struct Parser<'a, I: Iterator<Item = Token<'a>>> {
block_stack: RefCell<u8>,
content: RefCell<Peekable<I>>,
expression_factory: RefCell<ExpressionFactory>,
statement_factory: RefCell<StatementFactory>,
}
impl<'a, I: Iterator<Item = Token<'a>>> Parser<'a, I> {
pub fn new(content: Peekable<I>) -> Parser<'a, I> {
Parser {
block_stack: RefCell::new(0),
expression_factory: RefCell::new(ExpressionFactory::new()),
content: RefCell::new(content),
statement_factory: RefCell::new(StatementFactory::new()),
}
}
pub fn parse(&self) -> Result<(Vec<Statement<'a>>, StatementFactory, ExpressionFactory), Vec<ProgramError<'a>>> {
let mut output_vec = vec![];
let mut error_vec = vec![];
while self.content.borrow_mut().peek().is_some() {
match self.parse_statement() {
Ok(s) => output_vec.push(s),
Err(e) => error_vec.push(e),
}
}
if error_vec.is_empty() {
Ok((output_vec, self.statement_factory.borrow().clone(), self.expression_factory.borrow().clone()))
} else {
Err(error_vec)
}
}
pub(crate) fn parse_statement(&self) -> Result<Statement<'a>, ProgramError<'a>> {
match self.dry_next() {
Some(Token {
location,
token_type: TokenType::Trait,
}) => self.parse_trait_statement(&location),
Some(Token {
location,
token_type: TokenType::Import,
}) => {
self.next();
let module = self.parse_identifier()?;
self.consume(TokenType::Semicolon, "Expected `;` at the end of statement.", &location)?;
Ok(self.statement_factory.borrow_mut().new_statement(location, StatementType::Import {
name: module,
}))
}
Some(Token {
location,
token_type: TokenType::Class,
}) => self.parse_class_statement(&location),
Some(Token {
location,
token_type: TokenType::If,
}) => self.parse_if_statement(&location),
Some(Token {
location,
token_type: TokenType::Match,
}) => self.parse_match_statement(&location),
Some(Token {
location,
token_type: TokenType::Return,
}) => self.parse_return_statement(location),
Some(Token {
location,
token_type: TokenType::Var,
}) => self.parse_var_statement(&location),
Some(Token {
location,
token_type: TokenType::LeftBrace,
}) => self.parse_block_statement(location),
Some(Token {
location,
token_type: TokenType::Fun,
}) => self.parse_fun_statement(&location),
Some(Token {
location,
token_type: TokenType::While,
}) => self.parse_while_statement(&location),
Some(Token {
location,
token_type: TokenType::For,
}) => self.parse_for_statement(&location),
Some(Token {
location,
token_type: TokenType::EOF,
}) => Ok(self.statement_factory.borrow_mut().new_statement(location, StatementType::EOF)),
Some(Token {
location,
token_type: TokenType::Mod
}) => {
self.next();
let name = self.parse_identifier()?;
let block = self.parse_block_statement(location.clone())?;
if let StatementType::Block { body } = block.statement_type {
Ok(self.statement_factory.borrow_mut().new_statement(location, StatementType::Module {
name,
statements: body,
}))
} else {
panic!("Cannot happen")
}
}
Some(Token {
location,
token_type: TokenType::Print,
}) => {
self.next();
let expression = self.parse_expression()?;
self.consume(TokenType::Semicolon, "Expected semicolon", &location)?;
Ok(self.statement_factory.borrow_mut().new_statement(location, StatementType::PrintStatement {
expression
}))
}
Some(Token {
location,
token_type: TokenType::Break,
}) => {
self.next();
self.consume(
TokenType::Semicolon,
"Expected semicolon after break statement",
&location,
)?;
if *self.block_stack.borrow() > 0 {
Ok(self.statement_factory.borrow_mut().new_statement(location, StatementType::Break))
} else {
Err(ProgramError {
message: "Break statement can't go here".to_owned(),
location,
})
}
}
None => Err(ProgramError {
message: "Unexpected end of file".to_owned(),
location: SourceCodeLocation {
line: 0,
file: "",
},
}),
_ => {
let expression = self.parse_expression()?;
self.consume(
TokenType::Semicolon,
"Expected semicolon",
&expression.location,
)?;
Ok(self.statement_factory.borrow_mut().new_statement(
expression.location.clone(),
StatementType::Expression { expression }
))
}
}
}
#[inline]
fn dry_next(&self) -> Option<Token<'a>> {
self.content.borrow_mut().peek().cloned()
}
#[inline]
fn next(&self) -> Option<Token<'a>> {
self.content.borrow_mut().next()
}
#[cfg(test)]
#[inline]
fn is_empty(&self) -> bool {
self.content.borrow_mut().peek().is_none()
}
fn parse_method_set<T, C: Fn(&mut Vec<T>, &SourceCodeLocation<'a>) -> Result<(), ProgramError<'a>>>(
&self,
location: &SourceCodeLocation<'a>,
action: C,
) -> Result<MethodSet<T>, ProgramError<'a>> {
let mut methods = vec![];
let mut static_methods = vec![];
let mut setters = vec![];
let mut getters = vec![];
while !self.peek(TokenType::RightBrace) {
let vector = match self.dry_next().map(|t| t.token_type) {
Some(TokenType::Class) => {
self.next();
&mut static_methods
}
Some(TokenType::Getter) => {
self.next();
&mut getters
}
Some(TokenType::Setter) => {
self.next();
&mut setters
}
_ => &mut methods,
};
action(vector, location)?;
}
Ok(MethodSet {
getters,
methods,
setters,
static_methods,
})
}
fn parse_trait_statement(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.consume(TokenType::Trait, "Expected trait keyword", location)?;
if let Some(Token {
token_type: TokenType::Identifier { name },
location,
}) = self.next()
{
if self.peek(TokenType::LeftBrace) {
self.parse_trait_declaration(name, &location)
} else {
self.parse_trait_implementation(name, &location)
}
} else {
Err(ProgramError {
message: "Expected trait name".to_owned(),
location: location.clone(),
})
}
}
fn parse_trait_implementation(
&self,
trait_name: &'a str,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
let trait_name = self.parse_variable_or_module_access(trait_name, &location)?;
self.consume(TokenType::For, "Expected 'for' after trait name", location)?;
let class_name = if let Some(Token {
token_type: TokenType::Identifier { name },
location,
}) = self.next() {
self.parse_variable_or_module_access(name, &location)
} else {
Err(ProgramError {
location: location.clone(),
message: "Expected object name for trait implementation".to_owned(),
})
}?;
self.consume(
TokenType::LeftBrace,
"Expected '{' before trait body",
location,
)?;
let method_set = self.parse_class_methods(&location)?;
self.consume(
TokenType::RightBrace,
"Expected '}' after trait body",
location,
)?;
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::TraitImplementation {
getters: method_set.getters,
methods: method_set.methods,
setters: method_set.setters,
static_methods: method_set.static_methods,
trait_name,
class_name,
},
))
}
fn parse_trait_declaration(
&self,
name: &'a str,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.consume(
TokenType::LeftBrace,
"Expected '{' before trait body",
location,
)?;
let method_set = self.parse_method_set(location, |vector, location| {
let mut arguments = vec!["this"];
let (name, extra_arguments) = self.parse_function_header(location)?;
arguments.extend(&extra_arguments);
self.consume(
TokenType::Semicolon,
"Expected ';' after function header",
location,
)?;
vector.push(FunctionHeader {
arity: arguments.len(),
name,
});
Ok(())
})?;
self.consume(
TokenType::RightBrace,
"Expected '}' after trait body",
location,
)?;
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::TraitDeclaration {
getters: method_set.getters,
methods: method_set.methods,
name,
setters: method_set.setters,
static_methods: method_set.static_methods,
},
))
}
fn parse_class_statement(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.next();
if let Some(Token {
token_type: TokenType::Identifier { name },
location,
}) = self.next()
{
let superclass = if self.peek(TokenType::Less) {
self.next();
if let Some(TokenType::Identifier { name }) = self.next().map(|t| t.token_type) {
Some(self.parse_variable_or_module_access(name, &location)?)
} else {
return Err(ProgramError {
message: "Expect superclass name.".to_owned(),
location: location.clone(),
});
}
} else {
None
};
self.consume(
TokenType::LeftBrace,
"Expected '{' before class body",
&location,
)?;
let properties = self.parse_class_properties()?;
let method_set = self.parse_class_methods(&location)?;
self.consume(
TokenType::RightBrace,
"Expected '}' after class body",
&location,
)?;
Ok(self.statement_factory.borrow_mut().new_statement(
location,
StatementType::ClassDeclaration {
getters: method_set.getters,
methods: method_set.methods,
setters: method_set.setters,
static_methods: method_set.static_methods,
name,
properties,
superclass,
},
))
} else {
Err(ProgramError {
message: "Expected name in class definition".to_owned(),
location: location.clone(),
})
}
}
fn parse_class_properties(&self) -> Result<Vec<Box<Statement<'a>>>, ProgramError<'a>> {
let mut result = vec![];
while let Some(Token { token_type: TokenType::Var, location }) = self.dry_next() {
let s = self.parse_var_statement(&location)?;
result.push(Box::new(s));
}
Ok(result)
}
fn parse_class_methods(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<MethodSet<Box<Statement<'a>>>, ProgramError<'a>> {
let method_set = self.parse_method_set(&location, |vector, location| {
let f = Box::new(self.parse_function(location, true)?);
vector.push(f);
Ok(())
})?;
for getter in method_set.getters.iter() {
if let StatementType::FunctionDeclaration { arguments, .. } = &getter.statement_type {
if arguments.len() != 1 {
return Err(ProgramError {
message: "Getter function should take no arguments".to_owned(),
location: getter.location.clone(),
});
}
}
}
for setter in method_set.setters.iter() {
if let StatementType::FunctionDeclaration { arguments, .. } = &setter.statement_type {
if arguments.len() != 2 {
return Err(ProgramError {
message: "Setter function should take one argument".to_owned(),
location: setter.location.clone(),
});
}
}
}
Ok(method_set)
}
fn parse_match_statement(&self, location: &SourceCodeLocation<'a>) -> Result<Statement<'a>, ProgramError<'a>> {
self.next();
let value = self.parse_expression()?;
self.consume(
TokenType::LeftBrace,
"Expected '{' before match branches",
location,
)?;
let (literal_branches, types_branches) = self.parse_match_branches(location)?;
if !literal_branches.is_empty() && !types_branches.is_empty() {
return Err(ProgramError {
message: "`match` statement mixes values and types in branches".to_owned(),
location: location.clone(),
})
}
self.consume(
TokenType::Star,
"No `*` branch on match!",
&location,
)?;
self.consume(
TokenType::Arrow,
"Expecting `=>` after `*` on match!",
&location,
)?;
let match_all = self.parse_statement()?;
self.consume(
TokenType::Comma,
"Expecting `,` at the end of branch on match!",
&location,
)?;
self.consume(
TokenType::RightBrace,
"Expected '}' after match branches",
location,
)?;
let if_elses = if types_branches.is_empty() {
self.build_if_literal_chain(literal_branches, match_all)
} else {
self.build_if_type_chain(types_branches, match_all)
};
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::Block {
body: vec![
Box::new(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::VariableDeclaration {
name: INTERNAL_MATCH_VALUE_NAME,
expression: Some(value),
}
)),
Box::new(if_elses),
]
}
))
}
fn build_if_type_chain(&self, branches: Vec<(Type<'a>, Statement<'a>)>, match_all: Statement<'a>) -> Statement<'a> {
branches.into_iter()
.fold(match_all, |acc, (checked_type, s)| {
let value = Box::new(self.expression_factory.borrow_mut().new_expression(
ExpressionType::VariableLiteral {
identifier: INTERNAL_MATCH_VALUE_NAME
},
s.location.clone(),
));
self.statement_factory.borrow_mut().new_statement(
s.location.clone(),
StatementType::If {
condition: self.expression_factory.borrow_mut().new_expression(
ExpressionType::IsType {
value,
checked_type,
},
s.location.clone(),
),
then: Box::new(s),
otherwise: Some(Box::new(acc)),
}
)
})
}
fn build_if_literal_chain(&self, branches: Vec<(Literal<'a>, Statement<'a>)>, match_all: Statement<'a>) -> Statement<'a> {
branches.into_iter()
.fold(match_all, |acc, (value, s)| {
let left = Box::new(self.expression_factory.borrow_mut().new_expression(
ExpressionType::ExpressionLiteral { value },
s.location.clone(),
));
let right = Box::new(self.expression_factory.borrow_mut().new_expression(
ExpressionType::VariableLiteral {
identifier: INTERNAL_MATCH_VALUE_NAME
},
s.location.clone(),
));
self.statement_factory.borrow_mut().new_statement(
s.location.clone(),
StatementType::If {
condition: self.expression_factory.borrow_mut().new_expression(
ExpressionType::Binary {
operator: TokenType::EqualEqual,
left,
right,
},
s.location.clone(),
),
then: Box::new(s),
otherwise: Some(Box::new(acc)),
}
)
})
}
fn parse_match_branches(
&self, location: &SourceCodeLocation<'a>
) -> Result<(Vec<(Literal<'a>, Statement<'a>)>, Vec<(Type<'a>, Statement<'a>)>), ProgramError<'a>> {
let mut branches = vec![];
let mut types = vec![];
while !self.peek(TokenType::Star) && !self.peek(TokenType::RightBrace) {
let next = self.next();
let next_location = next.clone().map_or(location.clone(), |n| n.location);
match next.map(|t| t.token_type) {
Some(TokenType::TokenLiteral { value }) => {
parse_branch!(self, branches, location, value, next_location);
}
Some(TokenType::UppercaseNil) => {
parse_branch!(self, types, location, Type::Nil, next_location);
}
Some(TokenType::Boolean) => {
parse_branch!(self, types, location, Type::Boolean, next_location);
}
Some(TokenType::Integer) => {
parse_branch!(self, types, location, Type::Integer, next_location);
}
Some(TokenType::Float) => {
parse_branch!(self, types, location, Type::Float, next_location);
}
Some(TokenType::String) => {
parse_branch!(self, types, location, Type::String, next_location);
}
Some(TokenType::Array) => {
parse_branch!(self, types, location, Type::Array, next_location);
}
Some(TokenType::Function) => {
parse_branch!(self, types, location, Type::Function, next_location);
}
Some(TokenType::Module) => {
parse_branch!(self, types, location, Type::Module, next_location);
}
Some(TokenType::UppercaseClass) => {
parse_branch!(self, types, location, Type::Class, next_location);
}
Some(TokenType::UppercaseTrait) => {
parse_branch!(self, types, location, Type::Trait, next_location);
}
Some(TokenType::Identifier { name }) => {
let _obj = Box::new(self.parse_variable_or_module_access(name, location)?);
parse_branch!(self, types, location, Type::UserDefined(_obj), next_location);
}
_ => return Err(ProgramError {
message: "All branches should match using literals".to_owned(),
location: next_location,
}),
}
}
Ok((branches, types))
}
fn parse_if_statement(&self, location: &SourceCodeLocation<'a>) -> Result<Statement<'a>, ProgramError<'a>> {
self.next();
self.consume(
TokenType::LeftParen,
"Expected '(' after if token",
location,
)?;
let condition = self.parse_expression()?;
self.consume(
TokenType::RightParen,
"Expected ')' after if token",
location,
)?;
let then = Box::new(self.parse_statement()?);
let otherwise = if self.peek(TokenType::Else) {
self.next();
Some(Box::new(self.parse_statement()?))
} else {
None
};
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::If {
condition,
then,
otherwise,
},
))
}
fn parse_return_statement(
&self,
location: SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.content.borrow_mut().next();
if self.peek(TokenType::Semicolon) {
self.consume(TokenType::Semicolon, "Expected semicolon", &location)?;
Ok(self.statement_factory.borrow_mut().new_statement(
location,
StatementType::Return { value: None },
))
} else {
let value = self.parse_expression()?;
self.consume(TokenType::Semicolon, "Expected semicolon", &location)?;
Ok(self.statement_factory.borrow_mut().new_statement(
location,
StatementType::Return { value: Some(value) },
))
}
}
fn parse_var_statement(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.next();
if let Some(TokenType::Identifier { name }) = self.next().map(|t| t.token_type) {
match self.next() {
Some(Token {
token_type: TokenType::Semicolon,
..
}) => Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::VariableDeclaration {
name,
expression: None,
},
)),
Some(Token {
token_type: TokenType::Equal,
..
}) => {
let expression = Some(self.parse_expression()?);
self.consume(TokenType::Semicolon, "Expected semicolon", location)?;
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::VariableDeclaration { name, expression },
))
}
t => Err(ProgramError {
location: location.clone(),
message: format!("Invalid variable declaration! Unexpected {:?}", t),
}),
}
} else {
Err(ProgramError {
location: location.clone(),
message: "Invalid variable declaration!".to_owned(),
})
}
}
fn parse_block_statement(
&self,
mut location: SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.consume(TokenType::LeftBrace, "Expected left brace", &location)?;
let mut statements = vec![];
*self.block_stack.borrow_mut() += 1;
while !self.peek(TokenType::RightBrace) {
let statement = self.parse_statement()?;
location = statement.location.clone();
statements.push(Box::new(statement));
}
self.consume(TokenType::RightBrace, "Expected '}' after block", &location)?;
*self.block_stack.borrow_mut() -= 1;
Ok(self.statement_factory.borrow_mut().new_statement(
location,
StatementType::Block { body: statements },
))
}
fn parse_anonymous_function(
&self,
location: SourceCodeLocation<'a>,
) -> Result<Expression<'a>, ProgramError<'a>> {
self.next();
let arguments = self.parse_parameters(&location, Parser::parse_identifier)?;
self.consume(
TokenType::RightParen,
"Expected a parenthesis after parameters!",
&location,
)?;
let body = if let StatementType::Block { body } =
self.parse_block_statement(location.clone())?.statement_type
{
body.into_iter().map(|b| *b).collect::<Vec<Statement>>()
} else {
panic!("Can't happen")
};
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::AnonymousFunction { arguments, body },
location,
))
}
fn parse_fun_statement(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.content.borrow_mut().next();
self.parse_function(location, false)
}
fn parse_function_header(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<(&'a str, Vec<&'a str>), ProgramError<'a>> {
if let Some(Token {
token_type: TokenType::Identifier { name },
location,
}) = self.next()
{
self.consume(
TokenType::LeftParen,
"Expected a parenthesis after name!",
&location,
)?;
let arguments = self.parse_parameters(&location, Parser::parse_identifier)?;
self.consume(
TokenType::RightParen,
"Expected a parenthesis after parameters!",
&location,
)?;
Ok((name, arguments))
} else {
Err(ProgramError {
location: location.clone(),
message: "Expected a function name!".to_owned(),
})
}
}
fn parse_function(&self, location: &SourceCodeLocation<'a>, add_this: bool) -> Result<Statement<'a>, ProgramError<'a>> {
let mut arguments = vec![];
if add_this {
arguments.push("this");
}
let (name, extra_arguments) = self.parse_function_header(location)?;
arguments.extend(&extra_arguments);
let body = if let StatementType::Block { body } =
self.parse_block_statement(location.clone())?.statement_type
{
body
} else {
panic!("Can't happen")
};
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::FunctionDeclaration {
context_variables: vec![],
name,
arguments,
body,
},
))
}
fn parse_while_statement(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.next();
self.consume(
TokenType::LeftParen,
"Expected '(' after while token",
location,
)?;
let condition = self.parse_expression()?;
self.consume(
TokenType::RightParen,
"Expected ')' after while condition",
&condition.location,
)?;
*self.block_stack.borrow_mut() += 1;
let body = self.parse_statement()?;
*self.block_stack.borrow_mut() -= 1;
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::While {
condition,
action: Box::new(body),
},
))
}
fn parse_for_statement(
&self,
location: &SourceCodeLocation<'a>,
) -> Result<Statement<'a>, ProgramError<'a>> {
self.next();
self.consume(
TokenType::LeftParen,
"Expected '(' after while token",
location,
)?;
let temp_init = match self.dry_next() {
Some(Token {
location,
token_type: TokenType::Semicolon,
}) => {
self.next();
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::Expression {
expression: self.expression_factory.borrow_mut().new_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Keyword(DataKeyword::Nil),
},
location,
),
},
))
}
Some(_) => self.parse_statement(),
_ => Err(ProgramError {
message: "Unexpected end of file".to_owned(),
location: SourceCodeLocation {
line: 0,
file: "",
},
}),
}?;
let init = match &temp_init.statement_type {
StatementType::VariableDeclaration { .. } | StatementType::Expression { .. } => {
Ok(temp_init)
}
_ => Err(ProgramError {
location: temp_init.location,
message: "Invalid statement for initialization!".to_owned(),
}),
}?;
let condition = {
if !self.peek(TokenType::Semicolon) {
let expression = self.parse_expression()?;
self.consume(
TokenType::Semicolon,
"Expecting ';' after expression.",
&expression.location,
)?;
expression
} else {
self.expression_factory.borrow_mut().new_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Keyword(DataKeyword::Nil),
},
location.clone(),
)
}
};
let incr = {
let expression = if !self.peek(TokenType::RightParen) {
let expression = self.parse_expression()?;
self.consume(
TokenType::RightParen,
"Expecting ')' after expression.",
&expression.location,
)?;
expression
} else {
self.expression_factory.borrow_mut().new_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Keyword(DataKeyword::Nil),
},
location.clone(),
)
};
self.statement_factory.borrow_mut().new_statement(
expression.location.clone(),
StatementType::Expression { expression },
)
};
let body = self.parse_statement()?;
let action = Box::new(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::Block {
body: vec![Box::new(body), Box::new(incr)],
},
));
let body_block = Box::new(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::While {
condition,
action,
},
));
Ok(self.statement_factory.borrow_mut().new_statement(
location.clone(),
StatementType::Block {
body: vec![ Box::new(init), body_block ],
},
))
}
pub(crate) fn parse_expression(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_assignment()
}
fn parse_assignment(&self) -> Result<Expression<'a>, ProgramError<'a>> {
let variable = self.parse_ternary()?;
match self.dry_next() {
Some(Token {
token_type: TokenType::Equal,
location,
}) => {
self.next();
if self.dry_next().is_some() {
let expression = self.parse_assignment()?;
match variable {
Expression {
expression_type: ExpressionType::VariableLiteral { identifier },
location,
..
} => Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::VariableAssignment {
identifier,
expression: Box::new(expression),
},
location,
)),
Expression {
expression_type: ExpressionType::Get { callee, property },
location,
..
} => Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Set {
callee,
property,
value: Box::new(expression),
},
location,
)),
Expression {
expression_type: ExpressionType::ArrayElement { array, index },
location,
..
} => Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::ArrayElementSet {
array,
index,
value: Box::new(expression),
},
location,
)),
_ => Err(ProgramError {
location,
message: "Invalid assignment target".to_owned(),
}),
}
} else {
Err(ProgramError {
location,
message: "No right side in assignment".to_owned(),
})
}
}
_ => Ok(variable),
}
}
fn parse_ternary(&self) -> Result<Expression<'a>, ProgramError<'a>> {
let condition = self.parse_sequence()?;
if self.peek(TokenType::Question) {
self.next();
let then_branch = self.parse_expression()?;
self.consume(
TokenType::Colon,
"Expected ':' after then branch of conditional expression",
&then_branch.location,
)?;
let else_branch = self.parse_ternary()?;
let location = condition.location.clone();
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Conditional {
condition: Box::new(condition),
then_branch: Box::new(then_branch),
else_branch: Box::new(else_branch),
},
location,
))
} else {
Ok(condition)
}
}
fn parse_sequence(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(Parser::parse_or, Parser::parse_sequence, &[TokenType::Bar])
}
fn parse_or(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(Parser::parse_and, Parser::parse_or, &[TokenType::Or])
}
fn parse_and(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(Parser::parse_equality, Parser::parse_and, &[TokenType::And])
}
fn parse_equality(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(
Parser::parse_comparison,
Parser::parse_equality,
&[TokenType::EqualEqual, TokenType::BangEqual],
)
}
fn parse_comparison(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(
Parser::parse_addition,
Parser::parse_comparison,
&[
TokenType::Greater,
TokenType::GreaterEqual,
TokenType::Less,
TokenType::LessEqual,
],
)
}
fn parse_addition(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(
Parser::parse_multiplication,
Parser::parse_addition,
&[TokenType::Minus, TokenType::Plus],
)
}
fn parse_multiplication(&self) -> Result<Expression<'a>, ProgramError<'a>> {
self.parse_binary(
Parser::parse_unary,
Parser::parse_multiplication,
&[TokenType::Star, TokenType::Slash],
)
}
fn parse_unary(&self) -> Result<Expression<'a>, ProgramError<'a>> {
if self
.content
.borrow_mut()
.peek()
.map(|t| [TokenType::Minus, TokenType::Plus, TokenType::Bang].contains(&t.token_type))
.unwrap_or(false)
{
let t = self.next().unwrap();
let value = self.parse_unary()?;
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Unary {
operator: t.token_type,
operand: Box::new(value),
},
t.location,
))
} else {
self.parse_istype()
}
}
fn parse_istype(&self) -> Result<Expression<'a>, ProgramError<'a>> {
let element = self.parse_call()?;
if self.peek(TokenType::IsType) {
let location = self.next().unwrap().location;
let checked_type = self.parse_checked_type(&location)?;
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::IsType {
value: Box::new(element),
checked_type,
},
location
))
} else {
Ok(element)
}
}
fn parse_checked_type(&self, location: &SourceCodeLocation<'a>) -> Result<Type<'a>, ProgramError<'a>> {
match self.next() {
Some(Token { token_type: TokenType::UppercaseNil, .. }) => Ok(Type::Nil),
Some(Token { token_type: TokenType::Boolean, .. }) => Ok(Type::Boolean),
Some(Token { token_type: TokenType::Integer, .. }) => Ok(Type::Integer),
Some(Token { token_type: TokenType::Float, .. }) => Ok(Type::Float),
Some(Token { token_type: TokenType::String, .. }) => Ok(Type::String),
Some(Token { token_type: TokenType::Array, .. }) => Ok(Type::Array),
Some(Token { token_type: TokenType::Function, .. }) => Ok(Type::Function),
Some(Token { token_type: TokenType::UppercaseClass, .. }) => Ok(Type::Class),
Some(Token { token_type: TokenType::UppercaseTrait, .. }) => Ok(Type::Trait),
Some(Token { token_type: TokenType::Module, .. }) => Ok(Type::Module),
Some(Token {
token_type: TokenType::Identifier { name }, location, ..
}) => {
Ok(Type::UserDefined(Box::new(
self.parse_variable_or_module_access(name, &location)?
)))
}
_ => Err(ProgramError {
message: "Expected type literal in istype right hand operand".to_owned(),
location: location.clone(),
})
}
}
fn parse_call(&self) -> Result<Expression<'a>, ProgramError<'a>> {
let mut callee = self.parse_array_element()?;
loop {
match self.dry_next().map(|t| t.token_type) {
Some(TokenType::LeftParen) => {
callee = self.parse_call_function(callee)?;
}
Some(TokenType::Dot) => {
callee = self.parse_call_property(callee)?;
}
_ => return Ok(callee),
}
}
}
fn parse_call_property(&self, callee: Expression<'a>) -> Result<Expression<'a>, ProgramError<'a>> {
self.consume(
TokenType::Dot,
"Expected '.' on property call expression",
&callee.location,
)?;
if let Some(TokenType::Identifier { name }) = self.next().map(|t| t.token_type) {
let location = callee.location.clone();
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Get {
callee: Box::new(callee),
property: name,
},
location,
))
} else {
Err(callee.create_program_error("Expected property name after '.'"))
}
}
fn parse_call_function(&self, callee: Expression<'a>) -> Result<Expression<'a>, ProgramError<'a>> {
self.consume(
TokenType::LeftParen,
"Expected '(' on function call expression",
&callee.location,
)?;
let args = self.parse_parameters(&callee.location, Parser::parse_ternary)?;
self.consume(
TokenType::RightParen,
"Expected ')' on function call expression",
&callee.location,
)?;
let location = callee.location.clone();
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Call {
callee: Box::new(callee),
arguments: args.into_iter().map(Box::new).collect(),
},
location,
))
}
fn parse_parameters<R, F: Fn(&Parser<'a, I>) -> Result<R, ProgramError<'a>>>(
&self,
location: &SourceCodeLocation<'a>,
parser: F,
) -> Result<Vec<R>, ProgramError<'a>> {
let mut args = vec![];
loop {
match self.dry_next() {
None
| Some(Token {
token_type: TokenType::RightParen,
..
}) => break,
_ => {
let arg = parser(self)?;
args.push(arg);
if !self.peek(TokenType::RightParen) {
self.consume(
TokenType::Comma,
"Expected ',' after call argument",
location,
)?;
}
}
}
}
Ok(args)
}
fn parse_identifier(&self) -> Result<&'a str, ProgramError<'a>> {
match self.next() {
Some(Token {
token_type: TokenType::Identifier { name },
..
}) => Ok(name),
Some(Token { location, token_type, .. }) => Err(ProgramError {
location,
message: format!("Expected identifier! Got {:?}", token_type),
}),
None => panic!("Can't happen"),
}
}
fn parse_array_element(&self) -> Result<Expression<'a>, ProgramError<'a>> {
let array = self.parse_primary()?;
if self.peek(TokenType::LeftSquareBrace) {
self.next();
let index = Box::new(self.parse_expression()?);
self.consume(
TokenType::RightSquareBrace,
"Expected `]` at the end of array indexing",
&index.location,
)?;
let location = array.location.clone();
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::ArrayElement {
array: Box::new(array),
index,
},
location,
))
} else {
Ok(array)
}
}
fn parse_primary(&self) -> Result<Expression<'a>, ProgramError<'a>> {
match self.next() {
Some(Token {
token_type: TokenType::LeftSquareBrace,
location,
}) => self.parse_array(location),
Some(Token {
token_type: TokenType::Fun,
location,
}) => self.parse_anonymous_function(location),
Some(Token {
token_type: TokenType::TokenLiteral { value },
location,
}) => Ok(self
.expression_factory
.borrow_mut()
.new_expression(ExpressionType::ExpressionLiteral { value }, location)),
Some(Token {
token_type: TokenType::Identifier { name },
location,
}) => self.parse_variable_or_module_access(name, &location),
Some(Token {
token_type: TokenType::LeftParen,
location,
}) => self.parse_group(location),
None => Err(ProgramError {
message: "Unexpected end of file! Expecting primary".to_owned(),
location: SourceCodeLocation {
file: "",
line: 0,
},
}),
Some(Token {
token_type,
location,
}) => self.parse_left_side_missing(&token_type, &location),
}
}
fn parse_variable_or_module_access(&self, name: &'a str, location: &SourceCodeLocation<'a>) -> Result<Expression<'a>, ProgramError<'a>> {
if self.peek(TokenType::DoubleColon) {
self.consume(TokenType::DoubleColon, "Expected `::` on module access", location)?;
let field = Box::new(self.parse_call()?);
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::ModuleLiteral {
module: name,
field,
},
location.clone(),
))
} else {
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::VariableLiteral { identifier: name },
location.clone(),
))
}
}
fn parse_array(&self, location: SourceCodeLocation<'a>) -> Result<Expression<'a>, ProgramError<'a>> {
if self.peek(TokenType::RightSquareBrace) {
self.next();
return Ok(self
.expression_factory
.borrow_mut()
.new_expression(ExpressionType::Array { elements: vec![] }, location));
}
let element = Box::new(self.parse_expression()?);
match self.next() {
Some(Token {
token_type: TokenType::Semicolon,
..
}) => {
let length = Box::new(self.parse_expression()?);
self.consume(
TokenType::RightSquareBrace,
"Expected `]` at the end of array literal",
&length.location,
)?;
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::RepeatedElementArray { element, length },
location,
))
}
Some(Token {
token_type: TokenType::Comma,
..
}) => {
let mut last_location = element.location.clone();
let mut elements = vec![element, Box::new(self.parse_expression()?)];
while !self.peek(TokenType::RightSquareBrace) {
self.consume(
TokenType::Comma,
"Expected `,` between array elements",
&last_location,
)?;
let last_element = Box::new(self.parse_expression()?);
last_location = last_element.location.clone();
elements.push(last_element);
}
self.consume(
TokenType::RightSquareBrace,
"Expected `]` at the end of array literal",
&last_location,
)?;
Ok(self
.expression_factory
.borrow_mut()
.new_expression(ExpressionType::Array { elements }, location))
}
Some(Token {
token_type: TokenType::RightSquareBrace,
..
}) => Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Array {
elements: vec![element],
},
location,
)),
Some(Token {
location, token_type,
}) => Err(ProgramError {
message: format!("Unexpected token `{:?}`", token_type),
location,
}),
None => Err(ProgramError {
message: "Unexpected end of file".to_owned(),
location,
}),
}
}
fn parse_left_side_missing(
&self,
token_type: &TokenType,
location: &SourceCodeLocation<'a>,
) -> Result<Expression<'a>, ProgramError<'a>> {
match token_type {
TokenType::EqualEqual | TokenType::BangEqual => {
self.parse_equality()?;
Err(ProgramError {
location: location.clone(),
message: "Equality without left side".to_owned(),
})
}
TokenType::Greater
| TokenType::GreaterEqual
| TokenType::Less
| TokenType::LessEqual => {
self.parse_comparison()?;
Err(ProgramError {
location: location.clone(),
message: "Comparision without left side".to_owned(),
})
}
TokenType::Plus => {
self.parse_addition()?;
Err(ProgramError {
location: location.clone(),
message: "Addition without left side".to_owned(),
})
}
TokenType::Slash | TokenType::Star => {
self.parse_multiplication()?;
Err(ProgramError {
location: location.clone(),
message: "Multiplication without left side".to_owned(),
})
}
_ => Err(ProgramError {
location: location.clone(),
message: format!("Expecting a literal, but got {:?}!", token_type),
}),
}
}
fn parse_group(&self, location: SourceCodeLocation<'a>) -> Result<Expression<'a>, ProgramError<'a>> {
let expression = Box::new(self.parse_expression()?);
self.consume(TokenType::RightParen, "Missing `)`", &location)?;
Ok(self
.expression_factory
.borrow_mut()
.new_expression(ExpressionType::Grouping { expression }, location))
}
fn parse_binary<
L: Fn(&Parser<'a, I>) -> Result<Expression<'a>, ProgramError<'a>>,
R: Fn(&Parser<'a, I>) -> Result<Expression<'a>, ProgramError<'a>>,
>(
&self,
parse_left: L,
parse_right: R,
operators: &[TokenType],
) -> Result<Expression<'a>, ProgramError<'a>> {
let left = parse_left(self)?;
if self
.dry_next()
.map(|t| operators.contains(&t.token_type))
.unwrap_or(false)
{
let t = self.next().unwrap();
let right = parse_right(self)?;
let location = left.location.clone();
Ok(self.expression_factory.borrow_mut().new_expression(
ExpressionType::Binary {
left: Box::new(left),
operator: t.token_type,
right: Box::new(right),
},
location,
))
} else {
Ok(left)
}
}
fn peek(&self, token: TokenType) -> bool {
if let Some(t) = self.content.borrow_mut().peek() {
token == t.token_type
} else {
false
}
}
fn consume(
&self,
token: TokenType,
message: &str,
location: &SourceCodeLocation<'a>,
) -> Result<(), ProgramError<'a>> {
match self.next() {
Some(t) if t.token_type == token => Ok(()),
Some(t) => Err(ProgramError {
location: t.location,
message: message.to_owned(),
}),
_ => Err(ProgramError {
location: location.clone(),
message: message.to_owned(),
}),
}
}
}
#[cfg(test)]
mod test {
use super::Parser;
use crate::types::ExpressionType::ExpressionLiteral;
use crate::types::StatementType::VariableDeclaration;
use crate::types::{Expression, ExpressionFactory, ExpressionType, Literal, SourceCodeLocation, Statement, StatementType, Token, TokenType, StatementFactory};
fn create_expression<'a>(
expression_type: ExpressionType<'a>,
location: SourceCodeLocation<'a>,
) -> Expression<'a> {
let mut factory = ExpressionFactory::new();
factory.new_expression(expression_type, location)
}
fn create_expression_with_id<'a>(
expression_type: ExpressionType<'a>,
location: SourceCodeLocation<'a>,
counter: usize,
) -> Expression<'a> {
let mut factory = ExpressionFactory::new_starting(counter);
factory.new_expression(expression_type, location)
}
fn create_statement_with_id<'a>(
statement_type: StatementType<'a>,
location: SourceCodeLocation<'a>,
counter: usize,
) -> Statement<'a> {
let mut factory = StatementFactory::new_starting(counter);
factory.new_statement(location, statement_type)
}
fn create_statement_expression<'a>(
expression_type: ExpressionType<'a>,
location: SourceCodeLocation<'a>,
counter: usize,
) -> Statement<'a> {
let mut statement_factory = StatementFactory::new();
statement_factory.new_statement(
location.clone(),
StatementType::Expression {
expression: create_expression_with_id(expression_type, location, counter),
},
)
}
fn create_statement_expression_with_id<'a>(
expression_type: ExpressionType<'a>,
location: SourceCodeLocation<'a>,
counter: usize,
statement_counter: usize,
) -> Statement<'a> {
let mut statement_factory = StatementFactory::new_starting(statement_counter);
statement_factory.new_statement(
location.clone(),
StatementType::Expression {
expression: create_expression_with_id(expression_type, location, counter),
},
)
}
#[test]
fn parse_literal() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
}];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_identifier() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
}];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_identifier_group() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Grouping {
expression: Box::new(create_expression(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
)),
},
location,
1,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_identifier_group_without_right_paren() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression();
assert!(result.is_err());
assert!(parser.is_empty());
}
#[test]
fn parse_identifier_group_without_right_paren_and_more_content() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression();
assert!(result.is_err());
assert!(parser.is_empty());
}
#[test]
fn parse_call_with_no_arguments() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Call {
callee: Box::new(create_expression(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
)),
arguments: vec![],
},
location.clone(),
1,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_call_with_one_arguments() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Call {
callee: Box::new(create_expression(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
)),
arguments: vec![Box::new(create_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
1,
))],
},
location.clone(),
2,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_call_with_multiple_arguments() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Comma,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Call {
callee: Box::new(create_expression(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
)),
arguments: vec![
Box::new(create_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
1,
)),
Box::new(create_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
2,
)),
],
},
location.clone(),
3,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_unary_with_minus() {
test_unary(TokenType::Minus);
}
#[test]
fn parse_unary_with_plus() {
test_unary(TokenType::Plus)
}
#[test]
fn parse_multiplication_with_star() {
test_binary(TokenType::Star);
}
#[test]
fn parse_multiplication_with_slash() {
test_binary(TokenType::Star);
}
#[test]
fn parse_addition_with_minus() {
test_binary(TokenType::Minus);
}
#[test]
fn parse_comparison_with_less_equal() {
test_binary(TokenType::LessEqual);
}
#[test]
fn parse_comparison_with_less() {
test_binary(TokenType::Less);
}
#[test]
fn parse_comparison_with_greater_equal() {
test_binary(TokenType::GreaterEqual);
}
#[test]
fn parse_comparison_with_greater() {
test_binary(TokenType::Greater);
}
#[test]
fn parse_addition_with_plus() {
test_binary(TokenType::Plus);
}
#[test]
fn parse_equality_with_equal_equal() {
test_binary(TokenType::EqualEqual);
}
#[test]
fn parse_equality_with_bang_equal() {
test_binary(TokenType::BangEqual);
}
#[test]
fn parse_and() {
test_binary(TokenType::And);
}
#[test]
fn parse_or() {
test_binary(TokenType::Or);
}
#[test]
fn parse_ternary() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Question,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier1",
},
},
Token {
location: location.clone(),
token_type: TokenType::Colon,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier2",
},
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Conditional {
condition: Box::new(create_expression(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
)),
then_branch: Box::new(create_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier1",
},
location.clone(),
1,
)),
else_branch: Box::new(create_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier2",
},
location.clone(),
2
)),
},
location.clone(),
3,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_assignment() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Equal,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::VariableAssignment {
identifier: "identifier",
expression: Box::new(create_expression_with_id(
ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
1,
))
},
location.clone(),
2,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_if() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::If,
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
let mut statement_factory = StatementFactory::new();
let then = Box::new(statement_factory.new_statement(
location.clone(),
StatementType::Expression {
expression: create_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
1,
),
},
));
assert_eq!(
result,
statement_factory.new_statement(
location.clone(),
StatementType::If {
condition: create_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
),
otherwise: None,
then,
},
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_if_else() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::If,
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::Else,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
assert_eq!(
result,
create_statement_with_id(
StatementType::If {
condition: create_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
),
then: Box::new(create_statement_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0)
},
location.clone(),
1
)),
otherwise: Some(Box::new(create_statement_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0)
},
location.clone(),
2,
1,
))),
},
location.clone(),
2
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_var() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Var,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
assert_eq!(result.location, location);
assert_eq!(result.statement_type, StatementType::VariableDeclaration {
name: "identifier",
expression: None,
});
assert!(parser.is_empty());
}
#[test]
fn parse_var_with_expression() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Var,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Equal,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
assert_eq!(result.location, location);
assert_eq!(result.statement_type, StatementType::VariableDeclaration {
name: "identifier",
expression: Some(create_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
)),
});
assert!(parser.is_empty());
}
#[test]
fn parse_block() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::LeftBrace,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::RightBrace,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
assert_eq!(result.location, location);
assert_eq!(result.statement_type, StatementType::Block {
body: vec![
Box::new(create_statement_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
0,
0,
)),
Box::new(create_statement_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
1,
1,
)),
]
});
assert!(parser.is_empty());
}
#[test]
fn parse_fun() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::Fun,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "argument",
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
Token {
location: location.clone(),
token_type: TokenType::LeftBrace,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::RightBrace,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
assert_eq!(
result,
create_statement_with_id(
StatementType::FunctionDeclaration {
name: "identifier",
arguments: vec!["argument"],
body: vec![
Box::new(create_statement_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
0,
0,
)),
Box::new(create_statement_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
1,
1,
)),
],
context_variables: vec![],
},
location.clone(),
3,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_while() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::While,
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "argument",
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
Token {
location: location.clone(),
token_type: TokenType::LeftBrace,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::RightBrace,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
assert_eq!(
result,
create_statement_with_id(
StatementType::While {
condition: create_expression(
ExpressionType::VariableLiteral {
identifier: "argument",
},
location.clone(),
),
action: Box::new(create_statement_with_id(
StatementType::Block {
body: vec![
Box::new(create_statement_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
1,
0,
)),
Box::new(create_statement_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
2,
1,
)),
],
},
location.clone(),
2,
))
},
location.clone(),
3,
)
);
assert!(parser.is_empty());
}
#[test]
fn parse_full_for() {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::For,
},
Token {
location: location.clone(),
token_type: TokenType::LeftParen,
},
Token {
location: location.clone(),
token_type: TokenType::Var,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "argument",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "argument",
},
},
Token {
location: location.clone(),
token_type: TokenType::RightParen,
},
Token {
location: location.clone(),
token_type: TokenType::LeftBrace,
},
Token {
location: location.clone(),
token_type: TokenType::Identifier {
name: "identifier",
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: TokenType::Semicolon,
},
Token {
location: location.clone(),
token_type: TokenType::RightBrace,
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_statement().unwrap();
let body_statement = create_statement_with_id(
StatementType::Block {
body: vec![
Box::new(create_statement_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "identifier",
},
location.clone(),
2,
2,
)),
Box::new(create_statement_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
3,
3,
)),
],
},
location.clone(),
4,
);
let while_statement = create_statement_with_id(
StatementType::While {
condition: create_expression(
ExpressionType::VariableLiteral {
identifier: "argument",
},
location.clone(),
),
action: Box::new(create_statement_with_id(
StatementType::Block {
body: vec![
Box::new(body_statement),
Box::new(create_statement_with_id(
StatementType::Expression {
expression: create_expression_with_id(
ExpressionType::VariableLiteral {
identifier: "argument",
},
location.clone(),
1,
),
},
location.clone(),
1
)),
],
},
location.clone(),
5,
)),
},
location.clone(),
6,
);
let for_block = create_statement_with_id(
StatementType::Block {
body: vec![
Box::new(create_statement_with_id(
VariableDeclaration {
expression: None,
name: "identifier",
},
location.clone(),
0
)),
Box::new(while_statement),
],
},
location.clone(),
7,
);
assert_eq!(result, for_block);
assert!(parser.is_empty());
}
fn test_binary(token_type: TokenType) {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
Token {
location: location.clone(),
token_type: token_type.clone(),
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Binary {
operator: token_type,
left: Box::new(create_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
)),
right: Box::new(create_expression_with_id(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
1,
)),
},
location.clone(),
2,
)
);
assert!(parser.is_empty());
}
fn test_unary(token_type: TokenType) {
let location = SourceCodeLocation {
line: 1,
file: "",
};
let input = vec![
Token {
location: location.clone(),
token_type: token_type.clone(),
},
Token {
location: location.clone(),
token_type: TokenType::TokenLiteral {
value: Literal::Float(1.0),
},
},
];
let parser = Parser::new(input.into_iter().peekable());
let result = parser.parse_expression().unwrap();
assert_eq!(
result,
create_expression_with_id(
ExpressionType::Unary {
operator: token_type,
operand: Box::new(create_expression(
ExpressionType::ExpressionLiteral {
value: Literal::Float(1.0),
},
location.clone(),
)),
},
location.clone(),
1,
),
);
assert!(parser.is_empty());
}
}
| 34.602269 | 206 | 0.451487 |
910ae3ce01215836538026571bdfaef933abf72b | 1,285 | // Copyright (c) 2018 The rust-gpio-cdev Project Developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate gpio_cdev;
#[macro_use]
extern crate quicli;
use gpio_cdev::*;
use quicli::prelude::*;
#[derive(Debug, StructOpt)]
struct Cli {
/// The gpiochip device (e.g. /dev/gpiochip0)
chip: String,
/// The offset of the GPIO line for the provided chip
line: u32,
/// The value to write
value: u8,
}
fn do_main(args: Cli) -> errors::Result<()> {
let mut chip = Chip::new(args.chip)?;
// NOTE: we set the default value to the desired state so
// setting it separately is not required
let _handle =
chip.get_line(args.line)?
.request(LineRequestFlags::OUTPUT, args.value, "driveoutput")?;
println!("Output being driven... Enter to exit");
let mut buf = String::new();
::std::io::stdin().read_line(&mut buf)?;
Ok(())
}
main!(|args: Cli| match do_main(args) {
Ok(()) => {}
Err(e) => {
println!("Error: {:?}", e);
}
});
| 26.770833 | 75 | 0.634241 |
90a4107f773e181fe55b0ee7fd471cceff3c67dc | 62,360 | use std::cmp::Reverse;
use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use log::debug;
use rustc::hir::def::{self, DefKind, CtorKind, NonMacroAttrKind};
use rustc::hir::def::Namespace::{self, *};
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::hir::PrimTy;
use rustc::session::{Session, config::nightly_options};
use rustc::ty::{self, DefIdTree};
use rustc::util::nodemap::FxHashSet;
use syntax::ast::{self, Expr, ExprKind, Ident, NodeId, Path, Ty, TyKind};
use syntax::ext::base::MacroKind;
use syntax::feature_gate::BUILTIN_ATTRIBUTES;
use syntax::symbol::{Symbol, kw};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{BytePos, Span};
use crate::resolve_imports::{ImportDirective, ImportDirectiveSubclass, ImportResolver};
use crate::{is_self_type, is_self_value, path_names_to_string, KNOWN_TOOLS};
use crate::{CrateLint, LegacyScope, Module, ModuleKind, ModuleOrUniformRoot};
use crate::{PathResult, PathSource, ParentScope, Resolver, RibKind, Scope, ScopeSet, Segment};
type Res = def::Res<ast::NodeId>;
/// A vector of spans and replacements, a message and applicability.
crate type Suggestion = (Vec<(Span, String)>, String, Applicability);
/// A field or associated item from self type suggested in case of resolution failure.
enum AssocSuggestion {
Field,
MethodWithSelf,
AssocItem,
}
struct TypoSuggestion {
candidate: Symbol,
/// The kind of the binding ("crate", "module", etc.)
kind: &'static str,
/// An appropriate article to refer to the binding ("a", "an", etc.)
article: &'static str,
}
impl TypoSuggestion {
fn from_res(candidate: Symbol, res: Res) -> TypoSuggestion {
TypoSuggestion { candidate, kind: res.descr(), article: res.article() }
}
}
/// A free importable items suggested in case of resolution failure.
crate struct ImportSuggestion {
did: Option<DefId>,
pub path: Path,
}
fn add_typo_suggestion(
err: &mut DiagnosticBuilder<'_>, suggestion: Option<TypoSuggestion>, span: Span
) -> bool {
if let Some(suggestion) = suggestion {
let msg = format!("{} {} with a similar name exists", suggestion.article, suggestion.kind);
err.span_suggestion(
span, &msg, suggestion.candidate.to_string(), Applicability::MaybeIncorrect
);
return true;
}
false
}
fn add_module_candidates(
module: Module<'_>, names: &mut Vec<TypoSuggestion>, filter_fn: &impl Fn(Res) -> bool
) {
for (&(ident, _), resolution) in module.resolutions.borrow().iter() {
if let Some(binding) = resolution.borrow().binding {
let res = binding.res();
if filter_fn(res) {
names.push(TypoSuggestion::from_res(ident.name, res));
}
}
}
}
impl<'a> Resolver<'a> {
/// Handles error reporting for `smart_resolve_path_fragment` function.
/// Creates base error and amends it with one short label and possibly some longer helps/notes.
pub(crate) fn smart_resolve_report_errors(
&mut self,
path: &[Segment],
span: Span,
source: PathSource<'_>,
res: Option<Res>,
) -> (DiagnosticBuilder<'a>, Vec<ImportSuggestion>) {
let ident_span = path.last().map_or(span, |ident| ident.ident.span);
let ns = source.namespace();
let is_expected = &|res| source.is_expected(res);
let is_enum_variant = &|res| {
if let Res::Def(DefKind::Variant, _) = res { true } else { false }
};
// Make the base error.
let expected = source.descr_expected();
let path_str = Segment::names_to_string(path);
let item_str = path.last().unwrap().ident;
let code = source.error_code(res.is_some());
let (base_msg, fallback_label, base_span) = if let Some(res) = res {
(format!("expected {}, found {} `{}`", expected, res.descr(), path_str),
format!("not a {}", expected),
span)
} else {
let item_span = path.last().unwrap().ident.span;
let (mod_prefix, mod_str) = if path.len() == 1 {
(String::new(), "this scope".to_string())
} else if path.len() == 2 && path[0].ident.name == kw::PathRoot {
(String::new(), "the crate root".to_string())
} else {
let mod_path = &path[..path.len() - 1];
let mod_prefix = match self.resolve_path_without_parent_scope(
mod_path, Some(TypeNS), false, span, CrateLint::No
) {
PathResult::Module(ModuleOrUniformRoot::Module(module)) =>
module.def_kind(),
_ => None,
}.map_or(String::new(), |kind| format!("{} ", kind.descr()));
(mod_prefix, format!("`{}`", Segment::names_to_string(mod_path)))
};
(format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
format!("not found in {}", mod_str),
item_span)
};
let code = DiagnosticId::Error(code.into());
let mut err = self.session.struct_span_err_with_code(base_span, &base_msg, code);
// Emit help message for fake-self from other languages (e.g., `this` in Javascript).
if ["this", "my"].contains(&&*item_str.as_str())
&& self.self_value_is_available(path[0].ident.span, span) {
err.span_suggestion(
span,
"did you mean",
"self".to_string(),
Applicability::MaybeIncorrect,
);
}
// Emit special messages for unresolved `Self` and `self`.
if is_self_type(path, ns) {
__diagnostic_used!(E0411);
err.code(DiagnosticId::Error("E0411".into()));
err.span_label(span, format!("`Self` is only available in impls, traits, \
and type definitions"));
return (err, Vec::new());
}
if is_self_value(path, ns) {
debug!("smart_resolve_path_fragment: E0424, source={:?}", source);
__diagnostic_used!(E0424);
err.code(DiagnosticId::Error("E0424".into()));
err.span_label(span, match source {
PathSource::Pat => {
format!("`self` value is a keyword \
and may not be bound to \
variables or shadowed")
}
_ => {
format!("`self` value is a keyword \
only available in methods \
with `self` parameter")
}
});
return (err, Vec::new());
}
// Try to lookup name in more relaxed fashion for better error reporting.
let ident = path.last().unwrap().ident;
let candidates = self.lookup_import_candidates(ident, ns, is_expected)
.drain(..)
.filter(|ImportSuggestion { did, .. }| {
match (did, res.and_then(|res| res.opt_def_id())) {
(Some(suggestion_did), Some(actual_did)) => *suggestion_did != actual_did,
_ => true,
}
})
.collect::<Vec<_>>();
let crate_def_id = DefId::local(CRATE_DEF_INDEX);
if candidates.is_empty() && is_expected(Res::Def(DefKind::Enum, crate_def_id)) {
let enum_candidates =
self.lookup_import_candidates(ident, ns, is_enum_variant);
let mut enum_candidates = enum_candidates.iter()
.map(|suggestion| {
import_candidate_to_enum_paths(&suggestion)
}).collect::<Vec<_>>();
enum_candidates.sort();
if !enum_candidates.is_empty() {
// Contextualize for E0412 "cannot find type", but don't belabor the point
// (that it's a variant) for E0573 "expected type, found variant".
let preamble = if res.is_none() {
let others = match enum_candidates.len() {
1 => String::new(),
2 => " and 1 other".to_owned(),
n => format!(" and {} others", n)
};
format!("there is an enum variant `{}`{}; ",
enum_candidates[0].0, others)
} else {
String::new()
};
let msg = format!("{}try using the variant's enum", preamble);
err.span_suggestions(
span,
&msg,
enum_candidates.into_iter()
.map(|(_variant_path, enum_ty_path)| enum_ty_path)
// Variants re-exported in prelude doesn't mean `prelude::v1` is the
// type name!
// FIXME: is there a more principled way to do this that
// would work for other re-exports?
.filter(|enum_ty_path| enum_ty_path != "std::prelude::v1")
// Also write `Option` rather than `std::prelude::v1::Option`.
.map(|enum_ty_path| {
// FIXME #56861: DRY-er prelude filtering.
enum_ty_path.trim_start_matches("std::prelude::v1::").to_owned()
}),
Applicability::MachineApplicable,
);
}
}
if path.len() == 1 && self.self_type_is_available(span) {
if let Some(candidate) = self.lookup_assoc_candidate(ident, ns, is_expected) {
let self_is_available = self.self_value_is_available(path[0].ident.span, span);
match candidate {
AssocSuggestion::Field => {
if self_is_available {
err.span_suggestion(
span,
"you might have meant to use the available field",
format!("self.{}", path_str),
Applicability::MachineApplicable,
);
} else {
err.span_label(
span,
"a field by this name exists in `Self`",
);
}
}
AssocSuggestion::MethodWithSelf if self_is_available => {
err.span_suggestion(
span,
"try",
format!("self.{}", path_str),
Applicability::MachineApplicable,
);
}
AssocSuggestion::MethodWithSelf | AssocSuggestion::AssocItem => {
err.span_suggestion(
span,
"try",
format!("Self::{}", path_str),
Applicability::MachineApplicable,
);
}
}
return (err, candidates);
}
}
// Try Levenshtein algorithm.
let levenshtein_worked = add_typo_suggestion(
&mut err, self.lookup_typo_candidate(path, ns, is_expected, span), ident_span
);
// Try context-dependent help if relaxed lookup didn't work.
if let Some(res) = res {
if self.smart_resolve_context_dependent_help(&mut err,
span,
source,
res,
&path_str,
&fallback_label) {
return (err, candidates);
}
}
// Fallback label.
if !levenshtein_worked {
err.span_label(base_span, fallback_label);
self.type_ascription_suggestion(&mut err, base_span);
}
(err, candidates)
}
fn followed_by_brace(&self, span: Span) -> (bool, Option<(Span, String)>) {
// HACK(estebank): find a better way to figure out that this was a
// parser issue where a struct literal is being used on an expression
// where a brace being opened means a block is being started. Look
// ahead for the next text to see if `span` is followed by a `{`.
let sm = self.session.source_map();
let mut sp = span;
loop {
sp = sm.next_point(sp);
match sm.span_to_snippet(sp) {
Ok(ref snippet) => {
if snippet.chars().any(|c| { !c.is_whitespace() }) {
break;
}
}
_ => break,
}
}
let followed_by_brace = match sm.span_to_snippet(sp) {
Ok(ref snippet) if snippet == "{" => true,
_ => false,
};
// In case this could be a struct literal that needs to be surrounded
// by parenthesis, find the appropriate span.
let mut i = 0;
let mut closing_brace = None;
loop {
sp = sm.next_point(sp);
match sm.span_to_snippet(sp) {
Ok(ref snippet) => {
if snippet == "}" {
let sp = span.to(sp);
if let Ok(snippet) = sm.span_to_snippet(sp) {
closing_brace = Some((sp, snippet));
}
break;
}
}
_ => break,
}
i += 1;
// The bigger the span, the more likely we're incorrect --
// bound it to 100 chars long.
if i > 100 {
break;
}
}
return (followed_by_brace, closing_brace)
}
/// Provides context-dependent help for errors reported by the `smart_resolve_path_fragment`
/// function.
/// Returns `true` if able to provide context-dependent help.
fn smart_resolve_context_dependent_help(
&mut self,
err: &mut DiagnosticBuilder<'a>,
span: Span,
source: PathSource<'_>,
res: Res,
path_str: &str,
fallback_label: &str,
) -> bool {
let ns = source.namespace();
let is_expected = &|res| source.is_expected(res);
let path_sep = |err: &mut DiagnosticBuilder<'_>, expr: &Expr| match expr.node {
ExprKind::Field(_, ident) => {
err.span_suggestion(
expr.span,
"use the path separator to refer to an item",
format!("{}::{}", path_str, ident),
Applicability::MaybeIncorrect,
);
true
}
ExprKind::MethodCall(ref segment, ..) => {
let span = expr.span.with_hi(segment.ident.span.hi());
err.span_suggestion(
span,
"use the path separator to refer to an item",
format!("{}::{}", path_str, segment.ident),
Applicability::MaybeIncorrect,
);
true
}
_ => false,
};
let mut bad_struct_syntax_suggestion = || {
let (followed_by_brace, closing_brace) = self.followed_by_brace(span);
let mut suggested = false;
match source {
PathSource::Expr(Some(parent)) => {
suggested = path_sep(err, &parent);
}
PathSource::Expr(None) if followed_by_brace == true => {
if let Some((sp, snippet)) = closing_brace {
err.span_suggestion(
sp,
"surround the struct literal with parenthesis",
format!("({})", snippet),
Applicability::MaybeIncorrect,
);
} else {
err.span_label(
span, // Note the parenthesis surrounding the suggestion below
format!("did you mean `({} {{ /* fields */ }})`?", path_str),
);
}
suggested = true;
},
_ => {}
}
if !suggested {
err.span_label(
span,
format!("did you mean `{} {{ /* fields */ }}`?", path_str),
);
}
};
match (res, source) {
(Res::Def(DefKind::Macro(MacroKind::Bang), _), _) => {
err.span_suggestion(
span,
"use `!` to invoke the macro",
format!("{}!", path_str),
Applicability::MaybeIncorrect,
);
if path_str == "try" && span.rust_2015() {
err.note("if you want the `try` keyword, you need to be in the 2018 edition");
}
}
(Res::Def(DefKind::TyAlias, _), PathSource::Trait(_)) => {
err.span_label(span, "type aliases cannot be used as traits");
if nightly_options::is_nightly_build() {
err.note("did you mean to use a trait alias?");
}
}
(Res::Def(DefKind::Mod, _), PathSource::Expr(Some(parent))) => {
if !path_sep(err, &parent) {
return false;
}
}
(Res::Def(DefKind::Enum, def_id), PathSource::TupleStruct)
| (Res::Def(DefKind::Enum, def_id), PathSource::Expr(..)) => {
if let Some(variants) = self.collect_enum_variants(def_id) {
if !variants.is_empty() {
let msg = if variants.len() == 1 {
"try using the enum's variant"
} else {
"try using one of the enum's variants"
};
err.span_suggestions(
span,
msg,
variants.iter().map(path_names_to_string),
Applicability::MaybeIncorrect,
);
}
} else {
err.note("did you mean to use one of the enum's variants?");
}
},
(Res::Def(DefKind::Struct, def_id), _) if ns == ValueNS => {
if let Some((ctor_def, ctor_vis))
= self.struct_constructors.get(&def_id).cloned() {
let accessible_ctor = self.is_accessible(ctor_vis);
if is_expected(ctor_def) && !accessible_ctor {
err.span_label(
span,
format!("constructor is not visible here due to private fields"),
);
}
} else {
bad_struct_syntax_suggestion();
}
}
(Res::Def(DefKind::Union, _), _) |
(Res::Def(DefKind::Variant, _), _) |
(Res::Def(DefKind::Ctor(_, CtorKind::Fictive), _), _) if ns == ValueNS => {
bad_struct_syntax_suggestion();
}
(Res::SelfTy(..), _) if ns == ValueNS => {
err.span_label(span, fallback_label);
err.note("can't use `Self` as a constructor, you must use the implemented struct");
}
(Res::Def(DefKind::TyAlias, _), _)
| (Res::Def(DefKind::AssocTy, _), _) if ns == ValueNS => {
err.note("can't use a type alias as a constructor");
}
_ => return false,
}
true
}
fn lookup_assoc_candidate<FilterFn>(&mut self,
ident: Ident,
ns: Namespace,
filter_fn: FilterFn)
-> Option<AssocSuggestion>
where FilterFn: Fn(Res) -> bool
{
fn extract_node_id(t: &Ty) -> Option<NodeId> {
match t.node {
TyKind::Path(None, _) => Some(t.id),
TyKind::Rptr(_, ref mut_ty) => extract_node_id(&mut_ty.ty),
// This doesn't handle the remaining `Ty` variants as they are not
// that commonly the self_type, it might be interesting to provide
// support for those in future.
_ => None,
}
}
// Fields are generally expected in the same contexts as locals.
if filter_fn(Res::Local(ast::DUMMY_NODE_ID)) {
if let Some(node_id) = self.current_self_type.as_ref().and_then(extract_node_id) {
// Look for a field with the same name in the current self_type.
if let Some(resolution) = self.partial_res_map.get(&node_id) {
match resolution.base_res() {
Res::Def(DefKind::Struct, did) | Res::Def(DefKind::Union, did)
if resolution.unresolved_segments() == 0 => {
if let Some(field_names) = self.field_names.get(&did) {
if field_names.iter().any(|&field_name| ident.name == field_name) {
return Some(AssocSuggestion::Field);
}
}
}
_ => {}
}
}
}
}
for assoc_type_ident in &self.current_trait_assoc_types {
if *assoc_type_ident == ident {
return Some(AssocSuggestion::AssocItem);
}
}
// Look for associated items in the current trait.
if let Some((module, _)) = self.current_trait_ref {
if let Ok(binding) = self.resolve_ident_in_module(
ModuleOrUniformRoot::Module(module),
ident,
ns,
None,
false,
module.span,
) {
let res = binding.res();
if filter_fn(res) {
return Some(if self.has_self.contains(&res.def_id()) {
AssocSuggestion::MethodWithSelf
} else {
AssocSuggestion::AssocItem
});
}
}
}
None
}
/// Lookup typo candidate in scope for a macro or import.
fn early_lookup_typo_candidate(
&mut self,
scope_set: ScopeSet,
parent_scope: &ParentScope<'a>,
ident: Ident,
filter_fn: &impl Fn(Res) -> bool,
) -> Option<TypoSuggestion> {
let mut suggestions = Vec::new();
self.visit_scopes(scope_set, parent_scope, ident, |this, scope, _| {
match scope {
Scope::DeriveHelpers => {
let res = Res::NonMacroAttr(NonMacroAttrKind::DeriveHelper);
if filter_fn(res) {
for derive in &parent_scope.derives {
let parent_scope = ParentScope { derives: Vec::new(), ..*parent_scope };
if let Ok((Some(ext), _)) = this.resolve_macro_path(
derive, Some(MacroKind::Derive), &parent_scope, false, false
) {
suggestions.extend(ext.helper_attrs.iter().map(|name| {
TypoSuggestion::from_res(*name, res)
}));
}
}
}
}
Scope::MacroRules(legacy_scope) => {
if let LegacyScope::Binding(legacy_binding) = legacy_scope {
let res = legacy_binding.binding.res();
if filter_fn(res) {
suggestions.push(
TypoSuggestion::from_res(legacy_binding.ident.name, res)
)
}
}
}
Scope::CrateRoot => {
let root_ident = Ident::new(kw::PathRoot, ident.span);
let root_module = this.resolve_crate_root(root_ident);
add_module_candidates(root_module, &mut suggestions, filter_fn);
}
Scope::Module(module) => {
add_module_candidates(module, &mut suggestions, filter_fn);
}
Scope::MacroUsePrelude => {
suggestions.extend(this.macro_use_prelude.iter().filter_map(|(name, binding)| {
let res = binding.res();
if filter_fn(res) {
Some(TypoSuggestion::from_res(*name, res))
} else {
None
}
}));
}
Scope::BuiltinMacros => {
suggestions.extend(this.builtin_macros.iter().filter_map(|(name, binding)| {
let res = binding.res();
if filter_fn(res) {
Some(TypoSuggestion::from_res(*name, res))
} else {
None
}
}));
}
Scope::BuiltinAttrs => {
let res = Res::NonMacroAttr(NonMacroAttrKind::Builtin);
if filter_fn(res) {
suggestions.extend(BUILTIN_ATTRIBUTES.iter().map(|(name, ..)| {
TypoSuggestion::from_res(*name, res)
}));
}
}
Scope::LegacyPluginHelpers => {
let res = Res::NonMacroAttr(NonMacroAttrKind::LegacyPluginHelper);
if filter_fn(res) {
let plugin_attributes = this.session.plugin_attributes.borrow();
suggestions.extend(plugin_attributes.iter().map(|(name, _)| {
TypoSuggestion::from_res(*name, res)
}));
}
}
Scope::ExternPrelude => {
suggestions.extend(this.extern_prelude.iter().filter_map(|(ident, _)| {
let res = Res::Def(DefKind::Mod, DefId::local(CRATE_DEF_INDEX));
if filter_fn(res) {
Some(TypoSuggestion::from_res(ident.name, res))
} else {
None
}
}));
}
Scope::ToolPrelude => {
let res = Res::NonMacroAttr(NonMacroAttrKind::Tool);
suggestions.extend(KNOWN_TOOLS.iter().map(|name| {
TypoSuggestion::from_res(*name, res)
}));
}
Scope::StdLibPrelude => {
if let Some(prelude) = this.prelude {
add_module_candidates(prelude, &mut suggestions, filter_fn);
}
}
Scope::BuiltinTypes => {
let primitive_types = &this.primitive_type_table.primitive_types;
suggestions.extend(
primitive_types.iter().flat_map(|(name, prim_ty)| {
let res = Res::PrimTy(*prim_ty);
if filter_fn(res) {
Some(TypoSuggestion::from_res(*name, res))
} else {
None
}
})
)
}
}
None::<()>
});
// Make sure error reporting is deterministic.
suggestions.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
match find_best_match_for_name(
suggestions.iter().map(|suggestion| &suggestion.candidate),
&ident.as_str(),
None,
) {
Some(found) if found != ident.name => suggestions
.into_iter()
.find(|suggestion| suggestion.candidate == found),
_ => None,
}
}
fn lookup_typo_candidate(
&mut self,
path: &[Segment],
ns: Namespace,
filter_fn: &impl Fn(Res) -> bool,
span: Span,
) -> Option<TypoSuggestion> {
let mut names = Vec::new();
if path.len() == 1 {
// Search in lexical scope.
// Walk backwards up the ribs in scope and collect candidates.
for rib in self.ribs[ns].iter().rev() {
// Locals and type parameters
for (ident, &res) in &rib.bindings {
if filter_fn(res) {
names.push(TypoSuggestion::from_res(ident.name, res));
}
}
// Items in scope
if let RibKind::ModuleRibKind(module) = rib.kind {
// Items from this module
add_module_candidates(module, &mut names, &filter_fn);
if let ModuleKind::Block(..) = module.kind {
// We can see through blocks
} else {
// Items from the prelude
if !module.no_implicit_prelude {
names.extend(self.extern_prelude.clone().iter().flat_map(|(ident, _)| {
self.crate_loader
.maybe_process_path_extern(ident.name, ident.span)
.and_then(|crate_id| {
let crate_mod = Res::Def(
DefKind::Mod,
DefId {
krate: crate_id,
index: CRATE_DEF_INDEX,
},
);
if filter_fn(crate_mod) {
Some(TypoSuggestion {
candidate: ident.name,
article: "a",
kind: "crate",
})
} else {
None
}
})
}));
if let Some(prelude) = self.prelude {
add_module_candidates(prelude, &mut names, &filter_fn);
}
}
break;
}
}
}
// Add primitive types to the mix
if filter_fn(Res::PrimTy(PrimTy::Bool)) {
names.extend(
self.primitive_type_table.primitive_types.iter().map(|(name, prim_ty)| {
TypoSuggestion::from_res(*name, Res::PrimTy(*prim_ty))
})
)
}
} else {
// Search in module.
let mod_path = &path[..path.len() - 1];
if let PathResult::Module(module) = self.resolve_path_without_parent_scope(
mod_path, Some(TypeNS), false, span, CrateLint::No
) {
if let ModuleOrUniformRoot::Module(module) = module {
add_module_candidates(module, &mut names, &filter_fn);
}
}
}
let name = path[path.len() - 1].ident.name;
// Make sure error reporting is deterministic.
names.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
match find_best_match_for_name(
names.iter().map(|suggestion| &suggestion.candidate),
&name.as_str(),
None,
) {
Some(found) if found != name => names
.into_iter()
.find(|suggestion| suggestion.candidate == found),
_ => None,
}
}
fn lookup_import_candidates_from_module<FilterFn>(&mut self,
lookup_ident: Ident,
namespace: Namespace,
start_module: Module<'a>,
crate_name: Ident,
filter_fn: FilterFn)
-> Vec<ImportSuggestion>
where FilterFn: Fn(Res) -> bool
{
let mut candidates = Vec::new();
let mut seen_modules = FxHashSet::default();
let not_local_module = crate_name.name != kw::Crate;
let mut worklist = vec![(start_module, Vec::<ast::PathSegment>::new(), not_local_module)];
while let Some((in_module,
path_segments,
in_module_is_extern)) = worklist.pop() {
self.populate_module_if_necessary(in_module);
// We have to visit module children in deterministic order to avoid
// instabilities in reported imports (#43552).
in_module.for_each_child_stable(|ident, ns, name_binding| {
// avoid imports entirely
if name_binding.is_import() && !name_binding.is_extern_crate() { return; }
// avoid non-importable candidates as well
if !name_binding.is_importable() { return; }
// collect results based on the filter function
if ident.name == lookup_ident.name && ns == namespace {
let res = name_binding.res();
if filter_fn(res) {
// create the path
let mut segms = path_segments.clone();
if lookup_ident.span.rust_2018() {
// crate-local absolute paths start with `crate::` in edition 2018
// FIXME: may also be stabilized for Rust 2015 (Issues #45477, #44660)
segms.insert(
0, ast::PathSegment::from_ident(crate_name)
);
}
segms.push(ast::PathSegment::from_ident(ident));
let path = Path {
span: name_binding.span,
segments: segms,
};
// the entity is accessible in the following cases:
// 1. if it's defined in the same crate, it's always
// accessible (since private entities can be made public)
// 2. if it's defined in another crate, it's accessible
// only if both the module is public and the entity is
// declared as public (due to pruning, we don't explore
// outside crate private modules => no need to check this)
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
let did = match res {
Res::Def(DefKind::Ctor(..), did) => self.parent(did),
_ => res.opt_def_id(),
};
candidates.push(ImportSuggestion { did, path });
}
}
}
// collect submodules to explore
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
path_segments.push(ast::PathSegment::from_ident(ident));
let is_extern_crate_that_also_appears_in_prelude =
name_binding.is_extern_crate() &&
lookup_ident.span.rust_2018();
let is_visible_to_user =
!in_module_is_extern || name_binding.vis == ty::Visibility::Public;
if !is_extern_crate_that_also_appears_in_prelude && is_visible_to_user {
// add the module to the lookup
let is_extern = in_module_is_extern || name_binding.is_extern_crate();
if seen_modules.insert(module.def_id().unwrap()) {
worklist.push((module, path_segments, is_extern));
}
}
}
})
}
candidates
}
/// When name resolution fails, this method can be used to look up candidate
/// entities with the expected name. It allows filtering them using the
/// supplied predicate (which should be used to only accept the types of
/// definitions expected, e.g., traits). The lookup spans across all crates.
///
/// N.B., the method does not look into imports, but this is not a problem,
/// since we report the definitions (thus, the de-aliased imports).
crate fn lookup_import_candidates<FilterFn>(
&mut self, lookup_ident: Ident, namespace: Namespace, filter_fn: FilterFn
) -> Vec<ImportSuggestion>
where FilterFn: Fn(Res) -> bool
{
let mut suggestions = self.lookup_import_candidates_from_module(
lookup_ident, namespace, self.graph_root, Ident::with_empty_ctxt(kw::Crate), &filter_fn
);
if lookup_ident.span.rust_2018() {
let extern_prelude_names = self.extern_prelude.clone();
for (ident, _) in extern_prelude_names.into_iter() {
if let Some(crate_id) = self.crate_loader.maybe_process_path_extern(ident.name,
ident.span) {
let crate_root = self.get_module(DefId {
krate: crate_id,
index: CRATE_DEF_INDEX,
});
self.populate_module_if_necessary(&crate_root);
suggestions.extend(self.lookup_import_candidates_from_module(
lookup_ident, namespace, crate_root, ident, &filter_fn));
}
}
}
suggestions
}
fn find_module(&mut self, def_id: DefId) -> Option<(Module<'a>, ImportSuggestion)> {
let mut result = None;
let mut seen_modules = FxHashSet::default();
let mut worklist = vec![(self.graph_root, Vec::new())];
while let Some((in_module, path_segments)) = worklist.pop() {
// abort if the module is already found
if result.is_some() { break; }
self.populate_module_if_necessary(in_module);
in_module.for_each_child_stable(|ident, _, name_binding| {
// abort if the module is already found or if name_binding is private external
if result.is_some() || !name_binding.vis.is_visible_locally() {
return
}
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
path_segments.push(ast::PathSegment::from_ident(ident));
let module_def_id = module.def_id().unwrap();
if module_def_id == def_id {
let path = Path {
span: name_binding.span,
segments: path_segments,
};
result = Some((module, ImportSuggestion { did: Some(def_id), path }));
} else {
// add the module to the lookup
if seen_modules.insert(module_def_id) {
worklist.push((module, path_segments));
}
}
}
});
}
result
}
fn collect_enum_variants(&mut self, def_id: DefId) -> Option<Vec<Path>> {
self.find_module(def_id).map(|(enum_module, enum_import_suggestion)| {
self.populate_module_if_necessary(enum_module);
let mut variants = Vec::new();
enum_module.for_each_child_stable(|ident, _, name_binding| {
if let Res::Def(DefKind::Variant, _) = name_binding.res() {
let mut segms = enum_import_suggestion.path.segments.clone();
segms.push(ast::PathSegment::from_ident(ident));
variants.push(Path {
span: name_binding.span,
segments: segms,
});
}
});
variants
})
}
crate fn unresolved_macro_suggestions(
&mut self,
err: &mut DiagnosticBuilder<'a>,
macro_kind: MacroKind,
parent_scope: &ParentScope<'a>,
ident: Ident,
) {
let is_expected = &|res: Res| res.macro_kind() == Some(macro_kind);
let suggestion = self.early_lookup_typo_candidate(
ScopeSet::Macro(macro_kind), &parent_scope, ident, is_expected
);
add_typo_suggestion(err, suggestion, ident.span);
if macro_kind == MacroKind::Derive &&
(ident.as_str() == "Send" || ident.as_str() == "Sync") {
let msg = format!("unsafe traits like `{}` should be implemented explicitly", ident);
err.span_note(ident.span, &msg);
}
if self.macro_names.contains(&ident.modern()) {
err.help("have you added the `#[macro_use]` on the module/import?");
}
}
}
impl<'a, 'b> ImportResolver<'a, 'b> {
/// Adds suggestions for a path that cannot be resolved.
pub(crate) fn make_path_suggestion(
&mut self,
span: Span,
mut path: Vec<Segment>,
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
debug!("make_path_suggestion: span={:?} path={:?}", span, path);
match (path.get(0), path.get(1)) {
// `{{root}}::ident::...` on both editions.
// On 2015 `{{root}}` is usually added implicitly.
(Some(fst), Some(snd)) if fst.ident.name == kw::PathRoot &&
!snd.ident.is_path_segment_keyword() => {}
// `ident::...` on 2018.
(Some(fst), _) if fst.ident.span.rust_2018() &&
!fst.ident.is_path_segment_keyword() => {
// Insert a placeholder that's later replaced by `self`/`super`/etc.
path.insert(0, Segment::from_ident(Ident::invalid()));
}
_ => return None,
}
self.make_missing_self_suggestion(span, path.clone(), parent_scope)
.or_else(|| self.make_missing_crate_suggestion(span, path.clone(), parent_scope))
.or_else(|| self.make_missing_super_suggestion(span, path.clone(), parent_scope))
.or_else(|| self.make_external_crate_suggestion(span, path, parent_scope))
}
/// Suggest a missing `self::` if that resolves to an correct module.
///
/// ```
/// |
/// LL | use foo::Bar;
/// | ^^^ did you mean `self::foo`?
/// ```
fn make_missing_self_suggestion(
&mut self,
span: Span,
mut path: Vec<Segment>,
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `self` and check if that is valid.
path[0].ident.name = kw::SelfLower;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_self_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
Some((path, Vec::new()))
} else {
None
}
}
/// Suggests a missing `crate::` if that resolves to an correct module.
///
/// ```
/// |
/// LL | use foo::Bar;
/// | ^^^ did you mean `crate::foo`?
/// ```
fn make_missing_crate_suggestion(
&mut self,
span: Span,
mut path: Vec<Segment>,
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `crate` and check if that is valid.
path[0].ident.name = kw::Crate;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_crate_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
Some((
path,
vec![
"`use` statements changed in Rust 2018; read more at \
<https://doc.rust-lang.org/edition-guide/rust-2018/module-system/path-\
clarity.html>".to_string()
],
))
} else {
None
}
}
/// Suggests a missing `super::` if that resolves to an correct module.
///
/// ```
/// |
/// LL | use foo::Bar;
/// | ^^^ did you mean `super::foo`?
/// ```
fn make_missing_super_suggestion(
&mut self,
span: Span,
mut path: Vec<Segment>,
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `crate` and check if that is valid.
path[0].ident.name = kw::Super;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_super_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
Some((path, Vec::new()))
} else {
None
}
}
/// Suggests a missing external crate name if that resolves to an correct module.
///
/// ```
/// |
/// LL | use foobar::Baz;
/// | ^^^^^^ did you mean `baz::foobar`?
/// ```
///
/// Used when importing a submodule of an external crate but missing that crate's
/// name as the first part of path.
fn make_external_crate_suggestion(
&mut self,
span: Span,
mut path: Vec<Segment>,
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
if path[1].ident.span.rust_2015() {
return None;
}
// Sort extern crate names in reverse order to get
// 1) some consistent ordering for emitted dignostics, and
// 2) `std` suggestions before `core` suggestions.
let mut extern_crate_names =
self.resolver.extern_prelude.iter().map(|(ident, _)| ident.name).collect::<Vec<_>>();
extern_crate_names.sort_by_key(|name| Reverse(name.as_str()));
for name in extern_crate_names.into_iter() {
// Replace first ident with a crate name and check if that is valid.
path[0].ident.name = name;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_external_crate_suggestion: name={:?} path={:?} result={:?}",
name, path, result);
if let PathResult::Module(..) = result {
return Some((path, Vec::new()));
}
}
None
}
/// Suggests importing a macro from the root of the crate rather than a module within
/// the crate.
///
/// ```
/// help: a macro with this name exists at the root of the crate
/// |
/// LL | use issue_59764::makro;
/// | ^^^^^^^^^^^^^^^^^^
/// |
/// = note: this could be because a macro annotated with `#[macro_export]` will be exported
/// at the root of the crate instead of the module where it is defined
/// ```
pub(crate) fn check_for_module_export_macro(
&self,
directive: &'b ImportDirective<'b>,
module: ModuleOrUniformRoot<'b>,
ident: Ident,
) -> Option<(Option<Suggestion>, Vec<String>)> {
let mut crate_module = if let ModuleOrUniformRoot::Module(module) = module {
module
} else {
return None;
};
while let Some(parent) = crate_module.parent {
crate_module = parent;
}
if ModuleOrUniformRoot::same_def(ModuleOrUniformRoot::Module(crate_module), module) {
// Don't make a suggestion if the import was already from the root of the
// crate.
return None;
}
let resolutions = crate_module.resolutions.borrow();
let resolution = resolutions.get(&(ident, MacroNS))?;
let binding = resolution.borrow().binding()?;
if let Res::Def(DefKind::Macro(MacroKind::Bang), _) = binding.res() {
let module_name = crate_module.kind.name().unwrap();
let import = match directive.subclass {
ImportDirectiveSubclass::SingleImport { source, target, .. } if source != target =>
format!("{} as {}", source, target),
_ => format!("{}", ident),
};
let mut corrections: Vec<(Span, String)> = Vec::new();
if !directive.is_nested() {
// Assume this is the easy case of `use issue_59764::foo::makro;` and just remove
// intermediate segments.
corrections.push((directive.span, format!("{}::{}", module_name, import)));
} else {
// Find the binding span (and any trailing commas and spaces).
// ie. `use a::b::{c, d, e};`
// ^^^
let (found_closing_brace, binding_span) = find_span_of_binding_until_next_binding(
self.resolver.session, directive.span, directive.use_span,
);
debug!("check_for_module_export_macro: found_closing_brace={:?} binding_span={:?}",
found_closing_brace, binding_span);
let mut removal_span = binding_span;
if found_closing_brace {
// If the binding span ended with a closing brace, as in the below example:
// ie. `use a::b::{c, d};`
// ^
// Then expand the span of characters to remove to include the previous
// binding's trailing comma.
// ie. `use a::b::{c, d};`
// ^^^
if let Some(previous_span) = extend_span_to_previous_binding(
self.resolver.session, binding_span,
) {
debug!("check_for_module_export_macro: previous_span={:?}", previous_span);
removal_span = removal_span.with_lo(previous_span.lo());
}
}
debug!("check_for_module_export_macro: removal_span={:?}", removal_span);
// Remove the `removal_span`.
corrections.push((removal_span, "".to_string()));
// Find the span after the crate name and if it has nested imports immediatately
// after the crate name already.
// ie. `use a::b::{c, d};`
// ^^^^^^^^^
// or `use a::{b, c, d}};`
// ^^^^^^^^^^^
let (has_nested, after_crate_name) = find_span_immediately_after_crate_name(
self.resolver.session, module_name, directive.use_span,
);
debug!("check_for_module_export_macro: has_nested={:?} after_crate_name={:?}",
has_nested, after_crate_name);
let source_map = self.resolver.session.source_map();
// Add the import to the start, with a `{` if required.
let start_point = source_map.start_point(after_crate_name);
if let Ok(start_snippet) = source_map.span_to_snippet(start_point) {
corrections.push((
start_point,
if has_nested {
// In this case, `start_snippet` must equal '{'.
format!("{}{}, ", start_snippet, import)
} else {
// In this case, add a `{`, then the moved import, then whatever
// was there before.
format!("{{{}, {}", import, start_snippet)
}
));
}
// Add a `};` to the end if nested, matching the `{` added at the start.
if !has_nested {
corrections.push((source_map.end_point(after_crate_name),
"};".to_string()));
}
}
let suggestion = Some((
corrections,
String::from("a macro with this name exists at the root of the crate"),
Applicability::MaybeIncorrect,
));
let note = vec![
"this could be because a macro annotated with `#[macro_export]` will be exported \
at the root of the crate instead of the module where it is defined".to_string(),
];
Some((suggestion, note))
} else {
None
}
}
}
/// Given a `binding_span` of a binding within a use statement:
///
/// ```
/// use foo::{a, b, c};
/// ^
/// ```
///
/// then return the span until the next binding or the end of the statement:
///
/// ```
/// use foo::{a, b, c};
/// ^^^
/// ```
pub(crate) fn find_span_of_binding_until_next_binding(
sess: &Session,
binding_span: Span,
use_span: Span,
) -> (bool, Span) {
let source_map = sess.source_map();
// Find the span of everything after the binding.
// ie. `a, e};` or `a};`
let binding_until_end = binding_span.with_hi(use_span.hi());
// Find everything after the binding but not including the binding.
// ie. `, e};` or `};`
let after_binding_until_end = binding_until_end.with_lo(binding_span.hi());
// Keep characters in the span until we encounter something that isn't a comma or
// whitespace.
// ie. `, ` or ``.
//
// Also note whether a closing brace character was encountered. If there
// was, then later go backwards to remove any trailing commas that are left.
let mut found_closing_brace = false;
let after_binding_until_next_binding = source_map.span_take_while(
after_binding_until_end,
|&ch| {
if ch == '}' { found_closing_brace = true; }
ch == ' ' || ch == ','
}
);
// Combine the two spans.
// ie. `a, ` or `a`.
//
// Removing these would leave `issue_52891::{d, e};` or `issue_52891::{d, e, };`
let span = binding_span.with_hi(after_binding_until_next_binding.hi());
(found_closing_brace, span)
}
/// Given a `binding_span`, return the span through to the comma or opening brace of the previous
/// binding.
///
/// ```
/// use foo::a::{a, b, c};
/// ^^--- binding span
/// |
/// returned span
///
/// use foo::{a, b, c};
/// --- binding span
/// ```
pub(crate) fn extend_span_to_previous_binding(
sess: &Session,
binding_span: Span,
) -> Option<Span> {
let source_map = sess.source_map();
// `prev_source` will contain all of the source that came before the span.
// Then split based on a command and take the first (ie. closest to our span)
// snippet. In the example, this is a space.
let prev_source = source_map.span_to_prev_source(binding_span).ok()?;
let prev_comma = prev_source.rsplit(',').collect::<Vec<_>>();
let prev_starting_brace = prev_source.rsplit('{').collect::<Vec<_>>();
if prev_comma.len() <= 1 || prev_starting_brace.len() <= 1 {
return None;
}
let prev_comma = prev_comma.first().unwrap();
let prev_starting_brace = prev_starting_brace.first().unwrap();
// If the amount of source code before the comma is greater than
// the amount of source code before the starting brace then we've only
// got one item in the nested item (eg. `issue_52891::{self}`).
if prev_comma.len() > prev_starting_brace.len() {
return None;
}
Some(binding_span.with_lo(BytePos(
// Take away the number of bytes for the characters we've found and an
// extra for the comma.
binding_span.lo().0 - (prev_comma.as_bytes().len() as u32) - 1
)))
}
/// Given a `use_span` of a binding within a use statement, returns the highlighted span and if
/// it is a nested use tree.
///
/// ```
/// use foo::a::{b, c};
/// ^^^^^^^^^^ // false
///
/// use foo::{a, b, c};
/// ^^^^^^^^^^ // true
///
/// use foo::{a, b::{c, d}};
/// ^^^^^^^^^^^^^^^ // true
/// ```
fn find_span_immediately_after_crate_name(
sess: &Session,
module_name: Symbol,
use_span: Span,
) -> (bool, Span) {
debug!("find_span_immediately_after_crate_name: module_name={:?} use_span={:?}",
module_name, use_span);
let source_map = sess.source_map();
// Using `use issue_59764::foo::{baz, makro};` as an example throughout..
let mut num_colons = 0;
// Find second colon.. `use issue_59764:`
let until_second_colon = source_map.span_take_while(use_span, |c| {
if *c == ':' { num_colons += 1; }
match c {
':' if num_colons == 2 => false,
_ => true,
}
});
// Find everything after the second colon.. `foo::{baz, makro};`
let from_second_colon = use_span.with_lo(until_second_colon.hi() + BytePos(1));
let mut found_a_non_whitespace_character = false;
// Find the first non-whitespace character in `from_second_colon`.. `f`
let after_second_colon = source_map.span_take_while(from_second_colon, |c| {
if found_a_non_whitespace_character { return false; }
if !c.is_whitespace() { found_a_non_whitespace_character = true; }
true
});
// Find the first `{` in from_second_colon.. `foo::{`
let next_left_bracket = source_map.span_through_char(from_second_colon, '{');
(next_left_bracket == after_second_colon, from_second_colon)
}
/// Gets the stringified path for an enum from an `ImportSuggestion` for an enum variant.
fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, String) {
let variant_path = &suggestion.path;
let variant_path_string = path_names_to_string(variant_path);
let path_len = suggestion.path.segments.len();
let enum_path = ast::Path {
span: suggestion.path.span,
segments: suggestion.path.segments[0..path_len - 1].to_vec(),
};
let enum_path_string = path_names_to_string(&enum_path);
(variant_path_string, enum_path_string)
}
/// When an entity with a given name is not available in scope, we search for
/// entities with that name in all crates. This method allows outputting the
/// results of this search in a programmer-friendly way
crate fn show_candidates(
err: &mut DiagnosticBuilder<'_>,
// This is `None` if all placement locations are inside expansions
span: Option<Span>,
candidates: &[ImportSuggestion],
better: bool,
found_use: bool,
) {
// we want consistent results across executions, but candidates are produced
// by iterating through a hash map, so make sure they are ordered:
let mut path_strings: Vec<_> =
candidates.into_iter().map(|c| path_names_to_string(&c.path)).collect();
path_strings.sort();
let better = if better { "better " } else { "" };
let msg_diff = match path_strings.len() {
1 => " is found in another module, you can import it",
_ => "s are found in other modules, you can import them",
};
let msg = format!("possible {}candidate{} into scope", better, msg_diff);
if let Some(span) = span {
for candidate in &mut path_strings {
// produce an additional newline to separate the new use statement
// from the directly following item.
let additional_newline = if found_use {
""
} else {
"\n"
};
*candidate = format!("use {};\n{}", candidate, additional_newline);
}
err.span_suggestions(
span,
&msg,
path_strings.into_iter(),
Applicability::Unspecified,
);
} else {
let mut msg = msg;
msg.push(':');
for candidate in path_strings {
msg.push('\n');
msg.push_str(&candidate);
}
}
}
| 41.656647 | 100 | 0.491758 |
f78698cea3b65fe6caf98b467954e463f2eb02a0 | 754 | use std::path::PathBuf;
use heck::KebabCase;
use super::util::run_model_base_suite;
macro_rules! binary_tests {
($($test_name: ident,)*) => {
$(
#[test]
fn $test_name() {
let _ = env_logger::try_init();
let mut test_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
assert!(test_path.pop());
test_path.push("test-files");
test_path.push("models");
test_path.push(stringify!($test_name).to_kebab_case());
test_path.push("binary.rbxm");
run_model_base_suite(test_path);
}
)*
};
}
binary_tests! {
default_inserted_folder,
default_inserted_modulescript,
}
| 24.322581 | 78 | 0.537135 |
0a2230a01ca4ed2586009601dad1dddfe41d69be | 2,090 | extern crate elements_miniscript as miniscript;
extern crate regex;
use miniscript::{Descriptor, DummyKey};
use regex::Regex;
use std::str::FromStr;
fn do_test(data: &[u8]) {
let s = String::from_utf8_lossy(data);
if let Ok(desc) = Descriptor::<DummyKey>::from_str(&s) {
let output = desc.to_string();
let multi_wrap_pk_re = Regex::new("([a-z]+)c:pk_k\\(").unwrap();
let multi_wrap_pkh_re = Regex::new("([a-z]+)c:pk_h\\(").unwrap();
// Before doing anything check the special case
// To make sure that el are not treated as wrappers
let normalize_aliases = s
.replace("elc:pk_h(", "elpkh(")
.replace("elc:pk_k(", "elpk(");
let normalize_aliases = multi_wrap_pk_re.replace_all(&normalize_aliases, "$1:pk(");
let normalize_aliases = multi_wrap_pkh_re.replace_all(&normalize_aliases, "$1:pkh(");
let normalize_aliases = normalize_aliases
.replace("c:pk_k(", "pk(")
.replace("c:pk_h(", "pkh(");
let mut checksum_split = output.split('#');
let pre_checksum = checksum_split.next().unwrap();
assert!(checksum_split.next().is_some());
assert!(checksum_split.next().is_none());
if normalize_aliases.len() == output.len() {
let len = pre_checksum.len();
assert_eq!(
normalize_aliases[..len].to_lowercase(),
pre_checksum.to_lowercase()
);
} else {
assert_eq!(
normalize_aliases.to_lowercase(),
pre_checksum.to_lowercase()
);
}
}
}
#[cfg(feature = "afl")]
extern crate afl;
#[cfg(feature = "afl")]
fn main() {
afl::read_stdio_bytes(|data| {
do_test(&data);
});
}
#[cfg(feature = "honggfuzz")]
#[macro_use]
extern crate honggfuzz;
#[cfg(feature = "honggfuzz")]
fn main() {
loop {
fuzz!(|data| {
do_test(data);
});
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test() {
do_test(b"elc:pk_h()");
}
}
| 27.5 | 93 | 0.564115 |
c13a08799f2bcb96644f08983f46e5d0e24c6079 | 2,244 | pub mod request_decoder;
pub mod search_repository;
use tonic::metadata::MetadataMap;
use crate::auth::remote::x_outside_feature::auth::feature::AuthOutsideFeature;
use crate::auth::{
ticket::remote::validate::init::ApiValidateAuthTokenStruct,
user::account::remote::search::init::search_repository::MysqlSearchAuthUserAccountRepository,
};
use super::infra::SearchAuthUserAccountInfra;
pub struct SearchAuthUserAccountStruct<'a> {
validate_infra: ApiValidateAuthTokenStruct<'a>,
search_repository: MysqlSearchAuthUserAccountRepository<'a>,
}
impl<'a> SearchAuthUserAccountStruct<'a> {
pub fn new(feature: &'a AuthOutsideFeature, metadata: &'a MetadataMap) -> Self {
Self {
validate_infra: ApiValidateAuthTokenStruct::new(feature, metadata),
search_repository: MysqlSearchAuthUserAccountRepository::new(&feature.store.mysql),
}
}
}
impl<'a> SearchAuthUserAccountInfra for SearchAuthUserAccountStruct<'a> {
type ValidateInfra = ApiValidateAuthTokenStruct<'a>;
type SearchRepository = MysqlSearchAuthUserAccountRepository<'a>;
fn validate_infra(&self) -> &Self::ValidateInfra {
&self.validate_infra
}
fn search_repository(&self) -> &Self::SearchRepository {
&self.search_repository
}
}
#[cfg(test)]
pub mod test {
use crate::auth::{
ticket::remote::validate::init::test::StaticValidateAuthTokenStruct,
user::account::remote::search::init::search_repository::test::MemorySearchAuthUserAccountRepository,
};
use super::super::infra::SearchAuthUserAccountInfra;
pub struct StaticSearchAuthUserAccountStruct<'a> {
pub validate_infra: StaticValidateAuthTokenStruct<'a>,
pub search_repository: MemorySearchAuthUserAccountRepository<'a>,
}
impl<'a> SearchAuthUserAccountInfra for StaticSearchAuthUserAccountStruct<'a> {
type ValidateInfra = StaticValidateAuthTokenStruct<'a>;
type SearchRepository = MemorySearchAuthUserAccountRepository<'a>;
fn validate_infra(&self) -> &Self::ValidateInfra {
&self.validate_infra
}
fn search_repository(&self) -> &Self::SearchRepository {
&self.search_repository
}
}
}
| 33.492537 | 108 | 0.717914 |
4b3d81aff3ad9fae7525329dab6fa7b76291cc7c | 917 | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::AMOUNT {
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = r" Value of the field"]
pub struct AMOUNTR {
bits: u8,
}
impl AMOUNTR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:7 - Number of bytes received in the last granted transaction"]
#[inline]
pub fn amount(&self) -> AMOUNTR {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
AMOUNTR { bits }
}
}
| 21.833333 | 82 | 0.508179 |
6219893acc88681f7b9afd9aff4d72ab3baa420f | 371 | pub mod grid5000_deploy_env_response;
pub mod grid5000_deployment_request;
pub mod grid5000_link_job;
pub mod grid5000_reservation_request;
pub mod gridd500_job_submit_response;
pub use grid5000_deploy_env_response::*;
pub use grid5000_deployment_request::*;
pub use grid5000_link_job::*;
pub use grid5000_reservation_request::*;
pub use gridd500_job_submit_response::*; | 33.727273 | 40 | 0.851752 |
cc2529ec33b122dcdf6b474711c38a547a510be9 | 1,568 | #![cfg(windows)]
use std::{
fs,
io,
os::windows::fs as winfs,
path::PathBuf,
process,
};
use wink::app_ln;
enum LinkType {
Symbolic,
Hard,
Junction,
Infer,
}
struct Cmd {
target: PathBuf,
path: PathBuf,
link_type: LinkType,
}
impl Cmd {
fn from_args() -> Self {
use LinkType::*;
let m = app_ln().get_matches_from(wild::args());
let path = m.value_of("path").map(PathBuf::from).unwrap();
let target = m.value_of("target").map(PathBuf::from).unwrap();
let link_type = if m.is_present("symbolic") {
Symbolic
} else if m.is_present("hard") {
Hard
} else if m.is_present("junction") {
Junction
} else {
Infer
};
Self {
path,
target,
link_type,
}
}
fn run(mut self) -> io::Result<()> {
if self.path.is_dir() {
if let Some(name) = self.target.file_name() {
self.path.push(name);
}
}
match self.link_type {
LinkType::Symbolic => {
let md = fs::symlink_metadata(&self.target)?;
if md.is_dir() {
winfs::symlink_dir(&self.target, &self.path)
} else {
winfs::symlink_file(&self.target, &self.path)
}
}
LinkType::Hard => fs::hard_link(&self.target, &self.path),
LinkType::Junction => junction::create(&self.target, &self.path),
LinkType::Infer => {
let md = fs::symlink_metadata(&self.target)?;
if md.is_dir() {
winfs::symlink_dir(&self.target, &self.path)
} else {
fs::hard_link(&self.target, &self.path)
}
}
}
}
}
fn main() {
if let Err(e) = Cmd::from_args().run() {
eprintln!("error: {}", &e);
process::exit(2);
}
}
| 18.447059 | 68 | 0.59949 |
62b24b3963cb887c64b2bf94e347e90475d28632 | 723 | use std::str::FromStr;
#[derive(Debug)]
pub enum Method {
CONNECT,
DELETE,
GET,
HEAD,
OPTIONS,
PATCH,
POST,
PUT,
TRACE,
}
impl FromStr for Method {
type Err = MethodError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"CONNECT" => Ok(Self::CONNECT),
"DELETE" => Ok(Self::DELETE),
"GET" => Ok(Self::GET),
"HEAD" => Ok(Self::HEAD),
"OPTIONS" => Ok(Self::OPTIONS),
"PATCH" => Ok(Self::PATCH),
"POST" => Ok(Self::POST),
"PUT" => Ok(Self::PUT),
"TRACE" => Ok(Self::TRACE),
_ => Err(MethodError),
}
}
}
pub struct MethodError;
| 20.083333 | 53 | 0.46473 |
b9244452d4219adfeb8f8a6912f1c63c19768833 | 2,120 | use super::error::{Error, ErrorKind};
use super::pipeline::Pipeline;
use chrono::{DateTime, TimeZone, Utc};
use failure::ResultExt;
use log::{trace, warn};
use serde::{Deserialize, Serialize};
use std::fs;
use std::path::PathBuf;
#[derive(Debug, Deserialize, Serialize)]
pub struct State {
#[serde(default)]
pub id: String,
#[serde(skip_serializing)]
#[serde(default)]
pub path: String,
#[serde(default)]
pub active: bool,
#[serde(default = Utc::now())]
pub timestamp: DateTime<Utc>,
}
impl State {
pub fn read_from_pipeline(pipeline: &Pipeline) -> State {
let mut state_path = PathBuf::from(&pipeline.path);
state_path.pop();
state_path.push("state.json");
let state_path = state_path.to_string_lossy().to_string();
let state = State::read_file(&state_path);
match state {
Ok(state) => {
trace!("State loaded: {}", pipeline.id);
state
}
Err(err) => {
warn!("{}", err);
warn!("State created: {}", pipeline.id);
State {
id: pipeline.id.to_string(),
path: state_path.to_string(),
active: false,
timestamp: Utc.timestamp(0, 0),
}
}
}
}
pub fn read_file(state_path: &str) -> Result<State, Error> {
let state_data = fs::read_to_string(state_path)
.context(ErrorKind::InvalidStateFile(state_path.to_string()))?;
let mut state: State = serde_json::from_str(&state_data)
.context(ErrorKind::InvalidStateFile(state_path.to_string()))?;
state.path = state_path.to_string();
Ok(state)
}
pub fn write_file(&self) -> Result<(), Error> {
let state_data = serde_json::to_string_pretty(&self)
.context(ErrorKind::InvalidStateFile(self.path.to_string()))?;
fs::write(self.path.to_string(), state_data)
.context(ErrorKind::InvalidStateFile(self.path.to_string()))?;
Ok(())
}
}
| 27.179487 | 75 | 0.562736 |
7645cb4094e42b2881a3788d7129af869995f760 | 24,320 | use crate::bytes::ByteExt;
use crate::{Entry, Yaml, YamlParseError};
use core::iter::{Iterator, Peekable};
use std::str::Bytes;
use crate::Result;
// Implementation lifted from std, as it's currently only on Nightly. It's such a simple macro that it's low risk to duplicate it here (and better than writing one myself)
macro_rules! matches {
($expression:expr, $( $pattern:pat )|+ $( if $guard: expr )?) => {
match $expression {
$( $pattern )|+ $( if $guard )? => true,
_ => false
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) enum ParseContext {
FlowIn,
FlowOut,
FlowKey,
BlockIn,
BlockOut,
BlockKey,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ParseContextKind {
FlowMapping,
Flow,
BlockMapping,
Block,
}
pub(crate) struct Parser<'a> {
current: u8,
stream: Peekable<Bytes<'a>>,
bytes: &'a [u8],
source: &'a str,
idx: usize,
indent: usize,
expected: Vec<u8>,
contexts: Vec<ParseContext>,
}
impl<'a, 'b> Parser<'a> {
pub(crate) fn new(source: &'a str) -> Result<Self> {
let mut stream = source.bytes().peekable();
let first = stream.next().ok_or_else(|| YamlParseError {
line: 0,
col: 0,
msg: Some("expected input".into()),
source: None,
})?;
Ok(Self {
current: first,
bytes: source.as_bytes(),
stream,
source,
idx: 0,
indent: 0,
expected: Vec::new(),
contexts: Vec::new(),
})
}
fn start_context(&mut self, context_kind: ParseContextKind) -> Result<()> {
let context = match self.context() {
Some(ctx) => match context_kind {
ParseContextKind::Flow => ParseContext::FlowIn,
ParseContextKind::FlowMapping => ParseContext::FlowKey,
ParseContextKind::Block => match ctx {
ParseContext::FlowIn | ParseContext::FlowOut | ParseContext::FlowKey => {
return self.parse_error_with_msg(
"block collections cannot be values in flow collections",
)
}
ParseContext::BlockIn | ParseContext::BlockOut | ParseContext::BlockKey => {
ParseContext::BlockIn
}
},
ParseContextKind::BlockMapping => ParseContext::BlockKey,
},
None => match context_kind {
ParseContextKind::Flow => ParseContext::FlowIn,
ParseContextKind::FlowMapping => ParseContext::FlowKey,
ParseContextKind::Block => ParseContext::BlockOut,
ParseContextKind::BlockMapping => ParseContext::BlockKey,
},
};
self.contexts.push(context);
Ok(())
}
fn end_context(&mut self, expect: ParseContextKind) -> Result<()> {
if let Some(actual) = self.contexts.pop() {
let ctx_matches = match expect {
ParseContextKind::Flow => {
matches!(actual, ParseContext::FlowIn | ParseContext::FlowOut)
}
ParseContextKind::FlowMapping => matches!(actual, ParseContext::FlowKey),
ParseContextKind::Block => {
matches!(actual, ParseContext::BlockIn | ParseContext::BlockOut)
}
ParseContextKind::BlockMapping => matches!(actual, ParseContext::BlockKey),
};
if ctx_matches {
Ok(())
} else {
self.parse_error_with_msg(format!(
"expected but failed to end context {:?}, instead found {:?}",
expect, actual
))
}
} else {
self.parse_error_with_msg(format!(
"expected context {:?} but no contexts remained",
expect
))
}
}
fn context(&self) -> Option<ParseContext> {
self.contexts.last().copied()
}
fn bump(&mut self) -> bool {
match self.stream.next() {
Some(byte) => {
self.idx += 1;
self.current = byte;
true
}
None => false,
}
}
fn bump_newline(&mut self) -> bool {
match self.stream.next() {
Some(b'\n') | Some(b'\r') => self.bump(),
Some(byte) => {
self.idx += 1;
self.current = byte;
true
}
None => false,
}
}
fn advance(&mut self) -> Result<()> {
if self.bump() {
Ok(())
} else {
self.parse_error_with_msg("unexpected end of input")
}
}
fn peek(&mut self) -> Option<u8> {
self.stream.peek().copied()
}
fn at_end(&self) -> bool {
self.idx == self.bytes.len() - 1
}
fn parse_mapping_maybe(&mut self, node: Yaml<'a>) -> Result<Yaml<'a>> {
self.chomp_whitespace();
self.chomp_comment();
match self.current {
b':' if !matches!(self.expected.last(), Some(b'}') | Some(b':')) => {
self.parse_mapping_block(node)
}
_ => Ok(node),
}
}
pub(crate) fn parse(&mut self) -> Result<Yaml<'a>> {
let context = self.context();
let peeked = self.peek();
let res = match self.current {
b'#' => {
self.chomp_comment();
self.parse()?
}
b'-' if self.check_ahead_1(|val| val == b'-')
&& self.check_ahead_n(2, |val| val == b'-') =>
{
self.bump();
self.bump();
self.bump();
self.parse()?
}
b'\n' | b'\r' => {
self.chomp_newlines()?;
self.indent = 0;
self.parse()?
}
byt if byt.is_scalar_start(peeked, context) => self.parse_maybe_scalar()?,
b'{' => {
self.expected.push(b'}');
let res = self.parse_mapping_flow()?;
if let Some(b'}') = self.expected.last() {
self.pop_if_match(b'}')?;
}
self.parse_mapping_maybe(res)?
}
b'[' => {
let node = self.parse_sequence_flow()?;
self.parse_mapping_maybe(node)?
}
b'-' => match self.peek() {
Some(byt) if byt.is_linebreak() || byt.is_ws() => self.parse_sequence_block()?,
byt => unreachable!(format!("unexpected {:?}", byt.map(char::from))),
},
b'}' | b']' => {
return self.parse_error_with_msg(format!(
r#"unexpected symbol '{}'"#,
char::from(self.current)
))
}
b if b.is_ws() => {
self.chomp_indent();
if self.at_end() {
return self.parse_error_with_msg("unexpected end of input");
}
self.parse()?
}
// TODO: Provide error message
_ => return self.parse_error_with_msg("failed to parse at top level"),
};
Ok(res)
}
pub(crate) fn parse_maybe_scalar(&mut self) -> Result<Yaml<'a>> {
match self.context() {
None => {
self.start_context(ParseContextKind::BlockMapping)?;
let node = self.parse_scalar()?;
self.end_context(ParseContextKind::BlockMapping)?;
self.parse_mapping_maybe(node)
}
Some(ctx) => match ctx {
ParseContext::FlowIn | ParseContext::FlowOut | ParseContext::FlowKey => {
self.parse_scalar()
}
_ => {
self.start_context(ParseContextKind::BlockMapping)?;
let node = self.parse_scalar()?;
self.end_context(ParseContextKind::BlockMapping)?;
self.parse_mapping_maybe(node)
}
},
}
}
pub(crate) fn parse_scalar(&mut self) -> Result<Yaml<'a>> {
let context = self.context();
match self.current {
// TODO: currently qouble quote/single quote scalars are handled identically. maybe handle as defined
// by the YAML spec?
b'\"' => {
let scal_start = self.idx;
self.advance()?;
let _ = self
.take_while(|tok, _| !matches!(tok, b'\"'))
.map_err(|_| {
self.make_parse_error_with_msg("unexpected end of input; expected '\"'")
})?;
let scal_end = if self.bump() {
self.idx
} else {
self.bytes.len()
};
let entire_literal = self.slice_range((scal_start, scal_end));
Ok(Yaml::Scalar(entire_literal))
}
b'\'' => {
let scal_start = self.idx;
self.advance()?;
self.take_while(|tok, _| !matches!(tok, b'\''))
.map_err(|_| {
self.make_parse_error_with_msg("unexpected end of input; expected '\''")
})?;
let scal_end = if self.bump() {
self.idx
} else {
self.bytes.len()
};
let entire_literal = self.slice_range((scal_start, scal_end));
Ok(Yaml::Scalar(entire_literal))
}
_ => {
let accept = |tok: u8, nxt: Option<u8>| tok.is_ns_plain(nxt, context);
let (start, mut end) = self.take_while(&accept).unwrap_or_else(|val| val);
loop {
self.chomp_whitespace();
self.chomp_comment();
let (s, e) = self.take_while(&accept).unwrap_or_else(|val| val);
if s == e {
break;
} else {
end = e;
}
if self.at_end() {
break;
}
}
let entire_literal = self.slice_range((start, end));
Ok(Yaml::Scalar(entire_literal))
}
}
}
fn lookup_line_col(&self) -> (usize, usize) {
let err_off: usize = self.idx + 1;
let mut off = 0;
let mut line_len = 0;
let mut chars = self.source.chars().map(|c| (c, c.len_utf8()));
let mut line_lens = Vec::new();
while let Some((chr, len)) = chars.next() {
match chr {
'\r' => {
if let Some(('\n', nxtlen)) = chars.next() {
line_lens.push(line_len + nxtlen + len);
line_len = 0;
continue;
}
}
'\n' => {
line_lens.push(line_len + len);
line_len = 0;
continue;
}
_ => line_len += len,
}
}
let mut line_num = 0;
for ((line_no, _), len) in self.source.lines().enumerate().zip(line_lens) {
if err_off >= off && err_off < off + len {
return (line_no + 1, err_off - off + 1);
}
line_num = line_no;
off += len;
}
if err_off >= off {
return (line_num + 1, err_off - off + 1);
}
eprintln!("Couldn't find error location, please report this bug");
(0, 0)
}
#[allow(unused)]
fn parse_error<T>(&self) -> Result<T> {
let (line, col) = self.lookup_line_col();
Err(YamlParseError {
line,
col,
msg: Some(format!(
r#"unexpectedly found "{}" while parsing"#,
self.current
)),
source: None,
})
}
fn make_parse_error_with_msg<S: Into<String>>(&self, msg: S) -> YamlParseError {
let (line, col) = self.lookup_line_col();
YamlParseError {
line,
col,
msg: Some(msg.into()),
source: None,
}
}
fn parse_error_with_msg<T, S: Into<String>>(&self, msg: S) -> Result<T> {
Err(self.make_parse_error_with_msg(msg))
}
pub(crate) fn parse_mapping_flow(&mut self) -> Result<Yaml<'a>> {
match self.current {
b'{' => (),
_ => return self.parse_error_with_msg("expected left brace"),
}
self.advance()?;
let mut entries: Vec<Entry<'a>> = Vec::new();
loop {
match &self.current {
b'}' => {
self.bump();
return Ok(Yaml::Mapping(entries));
}
b',' => {
self.advance()?;
}
_ => {
self.expected.push(b':');
self.start_context(ParseContextKind::FlowMapping)?;
let key = self.parse()?;
self.end_context(ParseContextKind::FlowMapping)?;
self.chomp_whitespace();
self.chomp_comment();
match self.current {
b':' => {
self.pop_if_match(b':')?;
self.advance()?;
self.chomp_whitespace();
self.start_context(ParseContextKind::Flow)?;
let value = self.parse()?;
self.end_context(ParseContextKind::Flow)?;
self.chomp_whitespace();
self.chomp_comment();
entries.push(Entry { key, value })
}
// TODO: Provide error message
_ => return self.parse_error_with_msg("failed to parse flow mapping"),
}
}
}
}
}
pub(crate) fn parse_mapping_block(&mut self, start_key: Yaml<'a>) -> Result<Yaml<'a>> {
match self.context() {
Some(ParseContext::FlowIn)
| Some(ParseContext::FlowKey)
| Some(ParseContext::FlowOut) => {
return self
.parse_error_with_msg("block mappings may not appear in flow collections")
}
_ => {}
}
let indent = self.indent;
match self.current {
b':' => {
self.advance()?;
let mut entries = Vec::new();
self.chomp_whitespace();
self.chomp_comment();
let value = self.parse()?;
entries.push(Entry::new(start_key, value));
loop {
match self.current {
_ if self.at_end() => break,
byt if byt.is_linebreak() => {
self.indent = 0;
if self.bump_newline() {
continue;
} else {
break;
}
}
byt if byt.is_ws() => {
self.chomp_indent();
}
b'#' => self.chomp_comment(),
_ if self.indent < indent => break,
_ => {
self.expected.push(b':');
let key = self.parse()?;
self.chomp_whitespace();
self.chomp_comment();
if let b':' = self.current {
self.pop_if_match(b':')?;
self.advance()?;
self.chomp_whitespace();
let value = self.parse()?;
entries.push(Entry::new(key, value));
} else {
// TODO: Provide error message
return self.parse_error_with_msg("failed to parse block mapping");
}
}
}
}
Ok(Yaml::Mapping(entries))
}
// TODO: Provide error message
_ => self.parse_error_with_msg("failed to parse block mapping, expected ':'"),
}
}
fn slice_range(&self, (start, end): (usize, usize)) -> &'a str {
let end = usize::min(end, self.bytes.len());
&self.source[start..end]
}
fn chomp_comment(&mut self) {
if self.current == b'#' {
self.bump();
while !self.current.is_linebreak() {
if !self.bump() {
break;
}
}
}
}
fn chomp_whitespace(&mut self) {
while let b' ' | b'\t' = self.current {
if !self.bump() {
break;
}
}
}
fn chomp_indent(&mut self) {
let mut idt = 0;
while let b' ' | b'\t' = self.current {
if !self.bump() {
break;
}
idt += 1;
}
self.indent = idt;
}
fn chomp_newlines(&mut self) -> Result<()> {
while let b'\r' | b'\n' = self.current {
self.advance()?;
}
Ok(())
}
pub(crate) fn parse_sequence_flow(&mut self) -> Result<Yaml<'a>> {
self.start_context(ParseContextKind::Flow)?;
match self.current {
b'[' => {
self.advance()?;
let mut elements = Vec::new();
loop {
match self.current {
b']' => {
self.bump();
self.end_context(ParseContextKind::Flow)?;
return Ok(Yaml::Sequence(elements));
}
b' ' | b'\t' => self.chomp_whitespace(),
b'#' => self.chomp_comment(),
_ => {
let elem = self.parse()?;
elements.push(elem);
self.chomp_whitespace();
match self.current {
b',' => {
self.advance()?;
}
b'#' => self.chomp_comment(),
b']' => {
self.bump();
self.end_context(ParseContextKind::Flow)?;
return Ok(Yaml::Sequence(elements));
}
// TODO: Provide error message
_ => {
return self
.parse_error_with_msg("failed to parse flow sequence")
}
}
}
}
}
}
// TODO: Provide error message
_ => self.parse_error_with_msg("failed to parse flow sequence"),
}
}
fn check_ahead_1(&self, stop: impl Fn(u8) -> bool) -> bool {
match self.bytes.get(self.idx + 1) {
Some(&b) => stop(b),
None => false,
}
}
pub(crate) fn parse_sequence_block(&mut self) -> Result<Yaml<'a>> {
match self.context() {
Some(ParseContext::FlowIn)
| Some(ParseContext::FlowKey)
| Some(ParseContext::FlowOut) => {
return self
.parse_error_with_msg("block sequences may not appear in flow collections")
}
_ => {}
}
self.start_context(ParseContextKind::Block)?;
let indent = self.indent;
match self.current {
b'-' => {
let mut seq = Vec::new();
loop {
match self.current {
_ if self.at_end() => break,
b'#' => self.chomp_comment(),
byt if byt.is_linebreak() => {
self.indent = 0;
if self.bump_newline() {
continue;
} else {
break;
}
}
byt if byt.is_ws() => {
self.chomp_indent();
}
_ if self.indent < indent => break,
b'-' => {
if self.check_ahead_1(ByteExt::is_linebreak) {
self.advance()?;
self.advance()?;
self.indent = 0;
if self.current.is_ws() {
self.chomp_indent();
if self.indent < indent {
break;
} else {
let node = self.parse()?;
seq.push(node);
}
} else if 0 < indent {
break;
} else {
let node = self.parse()?;
seq.push(node);
}
} else if self.check_ahead_1(ByteExt::is_ws) {
self.advance()?;
self.advance()?;
let node = self.parse()?;
seq.push(node);
} else {
return self.parse_error_with_msg("unexpected '-'");
}
}
_ if self.indent == indent => break,
_ => return self.parse_error_with_msg("expected sequence item"),
}
}
self.end_context(ParseContextKind::Block)?;
Ok(Yaml::Sequence(seq))
}
// TODO: Provide error message
_ => self.parse_error_with_msg("failed to parse block sequence"),
}
}
fn check_ahead_n(&self, n: usize, stop: impl Fn(u8) -> bool) -> bool {
match self.bytes.get(self.idx + n) {
Some(&b) => stop(b),
None => false,
}
}
fn take_while(
&mut self,
accept: impl Fn(u8, Option<u8>) -> bool,
) -> std::result::Result<(usize, usize), (usize, usize)> {
let start = self.idx;
let mut end = start;
loop {
let peeked = self.peek();
if !accept(self.current, peeked) {
break;
} else if !self.bump() {
end += 1;
return Err((start, end));
}
end += 1;
}
Ok((start, end))
}
fn pop_if_match(&mut self, expect: u8) -> Result<()> {
match self.expected.last() {
Some(&val) if val == expect => {
self.expected.pop();
Ok(())
}
// TODO: Provide error message
_ => self.parse_error_with_msg("token was not expected"),
}
}
}
| 35.400291 | 171 | 0.408183 |
08b896f387bbed5d13e5f2bd2ef0f01c7261b34c | 591 | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
return
{ return () }
//~^ ERROR the type of this value must be known in this context
()
;
}
| 32.833333 | 68 | 0.698816 |
143ae611a3f0befa206b38df00bca85d95a54790 | 932 | use actix_web::{ get, web::{Data, Path}, HttpResponse, Result};
use crate::utils::response::{ZarynError};
use crate::models::state::{ AppState };
use crate::controllers::transaction::*;
#[get("")]
pub async fn get_transactions(state: Data<AppState>) -> Result<HttpResponse, ZarynError> {
let db = state.as_ref().db.clone();
return get_all_transactions(db).await;
}
#[get("/{wallet_address}")]
pub async fn get_wallet_transactions(Path(wallet_address): Path<String>, state: Data<AppState>) -> Result<HttpResponse, ZarynError> {
let db = state.as_ref().db.clone();
return get_this_wallet_transaction(wallet_address,db).await;
}
#[get("/info/{transaction_address}")]
pub async fn get_transaction_info(Path(transaction_address): Path<String>, state: Data<AppState>) -> Result<HttpResponse, ZarynError> {
let db = state.as_ref().db.clone();
return get_this_transaction_info(transaction_address,db).await;
}
| 37.28 | 135 | 0.725322 |
0364b8bb7d2197c3d10a84097df879814439defd | 3,427 | use crate::cli::mqtt::{publish::MqttPublishCommand, subscribe::MqttSubscribeCommand, MqttError};
use crate::command::{BuildCommand, BuildContext, Command};
use mqtt_client::{QoS, Topic};
use std::time::Duration;
use structopt::StructOpt;
const DEFAULT_HOST: &str = "localhost";
const DEFAULT_PORT: u16 = 1883;
const PUB_CLIENT_PREFIX: &str = "tedge-pub";
const SUB_CLIENT_PREFIX: &str = "tedge-sub";
const DISCONNECT_TIMEOUT: Duration = Duration::from_secs(2);
#[derive(StructOpt, Debug)]
pub enum TEdgeMqttCli {
/// Publish a MQTT message on a topic.
Pub {
/// Topic to publish
topic: String,
/// Message to publish
message: String,
/// QoS level (0, 1, 2)
#[structopt(short, long, parse(try_from_str = parse_qos), default_value = "0")]
qos: QoS,
},
/// Subscribe a MQTT topic.
Sub {
/// Topic to publish
topic: String,
/// QoS level (0, 1, 2)
#[structopt(short, long, parse(try_from_str = parse_qos), default_value = "0")]
qos: QoS,
/// Avoid printing the message topics on the console
#[structopt(long = "no-topic")]
hide_topic: bool,
},
}
impl BuildCommand for TEdgeMqttCli {
fn build_command(self, _context: BuildContext) -> Result<Box<dyn Command>, crate::ConfigError> {
let cmd = {
match self {
TEdgeMqttCli::Pub {
topic,
message,
qos,
} => MqttPublishCommand {
topic: Topic::new(topic.as_str())?,
message,
qos,
mqtt_config: mqtt_client::Config::new(DEFAULT_HOST, DEFAULT_PORT),
client_id: format!("{}-{}", PUB_CLIENT_PREFIX, std::process::id()),
disconnect_timeout: DISCONNECT_TIMEOUT,
}
.into_boxed(),
TEdgeMqttCli::Sub {
topic,
qos,
hide_topic,
} => MqttSubscribeCommand {
topic,
qos,
hide_topic,
mqtt_config: mqtt_client::Config::new(DEFAULT_HOST, DEFAULT_PORT),
client_id: format!("{}-{}", SUB_CLIENT_PREFIX, std::process::id()),
}
.into_boxed(),
}
};
Ok(cmd)
}
}
fn parse_qos(src: &str) -> Result<QoS, MqttError> {
let int_val: u8 = src.parse().map_err(|_| MqttError::InvalidQoSError)?;
match int_val {
0 => Ok(QoS::AtMostOnce),
1 => Ok(QoS::AtLeastOnce),
2 => Ok(QoS::ExactlyOnce),
_ => Err(MqttError::InvalidQoSError),
}
}
#[cfg(test)]
mod tests {
use super::parse_qos;
use mqtt_client::QoS;
#[test]
fn test_parse_qos_at_most_once() {
let input_qos = "0";
let expected_qos = QoS::AtMostOnce;
assert_eq!(parse_qos(input_qos).unwrap(), expected_qos);
}
#[test]
fn test_parse_qos_at_least_once() {
let input_qos = "1";
let expected_qos = QoS::AtLeastOnce;
assert_eq!(parse_qos(input_qos).unwrap(), expected_qos);
}
#[test]
fn test_parse_qos_exactly_once() {
let input_qos = "2";
let expected_qos = QoS::ExactlyOnce;
assert_eq!(parse_qos(input_qos).unwrap(), expected_qos);
}
}
| 30.873874 | 100 | 0.5445 |
d9244404a019a963468b861bbbb423ec860f1298 | 6,335 | //! Utilities for building with rustdoc.
use crate::core::compiler::context::Context;
use crate::core::compiler::unit::Unit;
use crate::core::compiler::CompileKind;
use crate::sources::CRATES_IO_REGISTRY;
use crate::util::errors::{internal, CargoResult};
use cargo_util::ProcessBuilder;
use std::collections::HashMap;
use std::fmt;
use std::hash;
use url::Url;
const DOCS_RS_URL: &'static str = "https://docs.rs/";
/// Mode used for `std`.
#[derive(Debug, Hash)]
pub enum RustdocExternMode {
/// Use a local `file://` URL.
Local,
/// Use a remote URL to <https://doc.rust-lang.org/> (default).
Remote,
/// An arbitrary URL.
Url(String),
}
impl From<String> for RustdocExternMode {
fn from(s: String) -> RustdocExternMode {
match s.as_ref() {
"local" => RustdocExternMode::Local,
"remote" => RustdocExternMode::Remote,
_ => RustdocExternMode::Url(s),
}
}
}
impl fmt::Display for RustdocExternMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
RustdocExternMode::Local => "local".fmt(f),
RustdocExternMode::Remote => "remote".fmt(f),
RustdocExternMode::Url(s) => s.fmt(f),
}
}
}
impl<'de> serde::de::Deserialize<'de> for RustdocExternMode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Ok(s.into())
}
}
#[derive(serde::Deserialize, Debug)]
#[serde(default)]
pub struct RustdocExternMap {
#[serde(deserialize_with = "default_crates_io_to_docs_rs")]
pub(crate) registries: HashMap<String, String>,
std: Option<RustdocExternMode>,
}
impl Default for RustdocExternMap {
fn default() -> Self {
let mut registries = HashMap::new();
registries.insert(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into());
Self {
registries,
std: None,
}
}
}
fn default_crates_io_to_docs_rs<'de, D: serde::Deserializer<'de>>(
de: D,
) -> Result<HashMap<String, String>, D::Error> {
use serde::Deserialize;
let mut registries = HashMap::deserialize(de)?;
if !registries.contains_key(CRATES_IO_REGISTRY) {
registries.insert(CRATES_IO_REGISTRY.into(), DOCS_RS_URL.into());
}
Ok(registries)
}
impl hash::Hash for RustdocExternMap {
fn hash<H: hash::Hasher>(&self, into: &mut H) {
self.std.hash(into);
for (key, value) in &self.registries {
key.hash(into);
value.hash(into);
}
}
}
pub fn add_root_urls(
cx: &Context<'_, '_>,
unit: &Unit,
rustdoc: &mut ProcessBuilder,
) -> CargoResult<()> {
let config = cx.bcx.config;
if !config.cli_unstable().rustdoc_map {
log::debug!("`doc.extern-map` ignored, requires -Zrustdoc-map flag");
return Ok(());
}
let map = config.doc_extern_map()?;
let mut unstable_opts = false;
// Collect mapping of registry name -> index url.
let name2url: HashMap<&String, Url> = map
.registries
.keys()
.filter_map(|name| {
if let Ok(index_url) = config.get_registry_index(name) {
Some((name, index_url))
} else {
log::warn!(
"`doc.extern-map.{}` specifies a registry that is not defined",
name
);
None
}
})
.collect();
for dep in cx.unit_deps(unit) {
if dep.unit.target.is_linkable() && !dep.unit.mode.is_doc() {
for (registry, location) in &map.registries {
let sid = dep.unit.pkg.package_id().source_id();
let matches_registry = || -> bool {
if !sid.is_registry() {
return false;
}
if sid.is_default_registry() {
return registry == CRATES_IO_REGISTRY;
}
if let Some(index_url) = name2url.get(registry) {
return index_url == sid.url();
}
false
};
if matches_registry() {
let mut url = location.clone();
if !url.contains("{pkg_name}") && !url.contains("{version}") {
if !url.ends_with('/') {
url.push('/');
}
url.push_str("{pkg_name}/{version}/");
}
let url = url
.replace("{pkg_name}", &dep.unit.pkg.name())
.replace("{version}", &dep.unit.pkg.version().to_string());
rustdoc.arg("--extern-html-root-url");
rustdoc.arg(format!("{}={}", dep.unit.target.crate_name(), url));
unstable_opts = true;
}
}
}
}
let std_url = match &map.std {
None | Some(RustdocExternMode::Remote) => None,
Some(RustdocExternMode::Local) => {
let sysroot = &cx.bcx.target_data.info(CompileKind::Host).sysroot;
let html_root = sysroot.join("share").join("doc").join("rust").join("html");
if html_root.exists() {
let url = Url::from_file_path(&html_root).map_err(|()| {
internal(format!(
"`{}` failed to convert to URL",
html_root.display()
))
})?;
Some(url.to_string())
} else {
log::warn!(
"`doc.extern-map.std` is \"local\", but local docs don't appear to exist at {}",
html_root.display()
);
None
}
}
Some(RustdocExternMode::Url(s)) => Some(s.to_string()),
};
if let Some(url) = std_url {
for name in &["std", "core", "alloc", "proc_macro"] {
rustdoc.arg("--extern-html-root-url");
rustdoc.arg(format!("{}={}", name, url));
unstable_opts = true;
}
}
if unstable_opts {
rustdoc.arg("-Zunstable-options");
}
Ok(())
}
| 32.487179 | 100 | 0.515549 |
1ec0260fda0cad5a4145bdf330abf69b13ee6dd9 | 5,696 | #[cfg(any(test, feature = "testing"))]
use crate::account_address::AccountAddress;
#[cfg(any(test, feature = "testing"))]
use canonical_serialization::SimpleSerializer;
use canonical_serialization::{
CanonicalDeserialize, CanonicalDeserializer, CanonicalSerialize, CanonicalSerializer,
};
#[cfg(any(test, feature = "testing"))]
use crypto::HashValue;
use failure::prelude::*;
use hex;
#[cfg(any(test, feature = "testing"))]
use proptest_derive::Arbitrary;
use proto_conv::{FromProto, IntoProto};
use serde::{Deserialize, Serialize};
use std::{convert::TryFrom, fmt};
/// Size of an event key.
pub const EVENT_KEY_LENGTH: usize = 32;
/// A struct that represents a globally unique id for an Event stream that a user can listen to.
#[derive(
Ord, PartialOrd, Eq, PartialEq, Hash, Debug, Default, Clone, Serialize, Deserialize, Copy,
)]
#[cfg_attr(any(test, feature = "testing"), derive(Arbitrary))]
pub struct EventKey([u8; EVENT_KEY_LENGTH]);
impl EventKey {
/// Construct a new EventKey from a byte array slice.
pub fn new(key: [u8; EVENT_KEY_LENGTH]) -> Self {
EventKey(key)
}
/// Get the byte representation of the event key.
pub fn as_bytes(&self) -> &[u8] {
&self.0
}
/// Convert event key into a byte array.
pub fn to_vec(&self) -> Vec<u8> {
self.0.to_vec()
}
#[cfg(any(test, feature = "testing"))]
/// Create a random event key for testing
pub fn random() -> Self {
EventKey::try_from(HashValue::random().to_vec().as_slice()).unwrap()
}
#[cfg(any(test, feature = "testing"))]
/// Create a unique handle by using an AccountAddress and a counter.
pub fn new_from_address(addr: &AccountAddress, salt: u64) -> Self {
let mut serializer: SimpleSerializer<Vec<u8>> = SimpleSerializer::new();
serializer.encode_u64(salt).expect("Can't serialize salt");
serializer
.encode_struct(addr)
.expect("Can't serialize address");
EventKey(*HashValue::from_sha3_256(&serializer.get_output()).as_ref())
}
}
impl TryFrom<&[u8]> for EventKey {
type Error = failure::Error;
/// Tries to convert the provided byte array into Event Key.
fn try_from(bytes: &[u8]) -> Result<EventKey> {
ensure!(
bytes.len() == EVENT_KEY_LENGTH,
"The Address {:?} is of invalid length",
bytes
);
let mut addr = [0u8; EVENT_KEY_LENGTH];
addr.copy_from_slice(bytes);
Ok(EventKey(addr))
}
}
/// A Rust representation of an Event Handle Resource.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct EventHandle {
/// The associated globally unique key that is used as the key to the EventStore.
key: EventKey,
/// Number of events in the event stream.
count: u64,
}
impl EventHandle {
/// Constructs a new Event Handle
pub fn new(key: EventKey, count: u64) -> Self {
EventHandle { key, count }
}
/// Return the key to where this event is stored in EventStore.
pub fn key(&self) -> &EventKey {
&self.key
}
/// Return the counter for the handle
pub fn count(&self) -> u64 {
self.count
}
#[cfg(any(test, feature = "testing"))]
pub fn count_mut(&mut self) -> &mut u64 {
&mut self.count
}
#[cfg(any(test, feature = "testing"))]
/// Create a random event handle for testing
pub fn random_handle(count: u64) -> Self {
Self {
key: EventKey::random(),
count,
}
}
#[cfg(any(test, feature = "testing"))]
/// Derive a unique handle by using an AccountAddress and a counter.
pub fn new_from_address(addr: &AccountAddress, salt: u64) -> Self {
Self {
key: EventKey::new_from_address(addr, salt),
count: 0,
}
}
}
impl fmt::LowerHex for EventKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", hex::encode(&self.0))
}
}
impl fmt::Display for EventKey {
fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result {
// Forward to the LowerHex impl with a "0x" prepended (the # flag).
write!(f, "{:#x}", self)
}
}
impl FromProto for EventKey {
type ProtoType = Vec<u8>;
fn from_proto(key: Self::ProtoType) -> Result<Self> {
EventKey::try_from(&key[..])
}
}
impl IntoProto for EventKey {
type ProtoType = Vec<u8>;
fn into_proto(self) -> Self::ProtoType {
self.0.to_vec()
}
}
impl CanonicalSerialize for EventKey {
fn serialize(&self, serializer: &mut impl CanonicalSerializer) -> Result<()> {
// We cannot use encode_raw_bytes as this structure will represent how Move Value of type
// EventKey is serialized into. And since Move doesn't have fix length bytearray, values
// can't be encoded in the fix length fasion.
serializer.encode_bytes(&self.0)?;
Ok(())
}
}
impl CanonicalDeserialize for EventKey {
fn deserialize(deserializer: &mut impl CanonicalDeserializer) -> Result<Self> {
let bytes = deserializer.decode_bytes()?;
Self::try_from(bytes.as_slice())
}
}
impl CanonicalSerialize for EventHandle {
fn serialize(&self, serializer: &mut impl CanonicalSerializer) -> Result<()> {
serializer
.encode_u64(self.count)?
.encode_struct(&self.key)?;
Ok(())
}
}
impl CanonicalDeserialize for EventHandle {
fn deserialize(deserializer: &mut impl CanonicalDeserializer) -> Result<Self> {
let count = deserializer.decode_u64()?;
let key = deserializer.decode_struct()?;
Ok(EventHandle { count, key })
}
}
| 30.297872 | 97 | 0.626229 |
0e9cf39929f1c6fb10d580a2d0045a319aad5d27 | 1,229 | // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[link(name = "issue_2526",
vers = "0.2",
uuid = "54cc1bc9-02b8-447c-a227-75ebc923bc29")];
#[crate_type = "lib"];
extern mod std;
struct arc_destruct<T> {
_data: int,
}
#[unsafe_destructor]
impl<T:Const> Drop for arc_destruct<T> {
fn finalize(&self) {}
}
fn arc_destruct<T:Const>(data: int) -> arc_destruct<T> {
arc_destruct {
_data: data
}
}
fn arc<T:Const>(_data: T) -> arc_destruct<T> {
arc_destruct(0)
}
fn init() -> arc_destruct<context_res> {
unsafe {
arc(context_res())
}
}
struct context_res {
ctx : int,
}
impl Drop for context_res {
fn finalize(&self) {}
}
fn context_res() -> context_res {
context_res {
ctx: 0
}
}
pub type context = arc_destruct<context_res>;
| 21.189655 | 68 | 0.658259 |
db44fb3a73e1eb4d93482d4e0a396b81c0da7b53 | 1,508 | use crate::handler::default::*;
use crate::handler::query::*;
use actix_files as fs;
use actix_web::http::{header, Method, StatusCode};
use actix_web::{error, web, HttpRequest, HttpResponse};
use std::io;
pub fn config_app(cfg: &mut web::ServiceConfig) {
cfg.service(favicon)
// register simple route, handle all methods
.service(welcome)
// with path parameters
.service(web::resource("/models/{name}").route(web::get().to(with_param)))
// async response body
.service(query)
.service(query_str)
.service(
web::resource("/test").to(|req: HttpRequest| match *req.method() {
Method::GET => HttpResponse::Ok(),
Method::POST => HttpResponse::MethodNotAllowed(),
_ => HttpResponse::NotFound(),
}),
)
.service(web::resource("/error").to(|| async {
error::InternalError::new(
io::Error::new(io::ErrorKind::Other, "test"),
StatusCode::INTERNAL_SERVER_ERROR,
)
}));
}
pub fn config_static(cfg: &mut web::ServiceConfig) {
// static files
cfg.service(fs::Files::new("/static", "../../static").show_files_listing())
// redirect
.service(web::resource("/").route(web::get().to(|req: HttpRequest| {
println!("{:?}", req);
HttpResponse::Found()
.header(header::LOCATION, "static/welcome.html")
.finish()
})));
}
| 35.069767 | 82 | 0.554377 |
6a79c5ad6f996a6e971657eb297e129da589b64f | 7,944 | /// This module provides default futures that can be used for
/// sending request to other microservices by proxy engine.
///
use std::collections::HashMap;
use std::str::from_utf8;
use std::sync::Arc;
use futures::future::{Future};
use futures::Stream;
use json::parse as json_parse;
use lapin_futures_rustls::lapin::channel::{
BasicConsumeOptions, BasicProperties, BasicPublishOptions, QueueBindOptions,
QueueDeclareOptions, QueueDeleteOptions, QueueUnbindOptions,
};
use lapin_futures_rustls::lapin::types::{AMQPValue, FieldTable};
use log::{error, info, warn};
use crate::error::PathfinderError;
use crate::rabbitmq::{RabbitMQContext};
use crate::engine::MessageSender;
use crate::engine::options::RpcOptions;
use crate::engine::serializer::Serializer;
/// Simple future that sends a RPC request to the certain microservice,
/// consumes from a response from a separate queue and then returns a
/// response to the caller via transmitter.
pub fn rpc_request_future(
transmitter: MessageSender,
rabbitmq_context: Arc<RabbitMQContext>,
options: Arc<RpcOptions>,
headers: HashMap<String, String>
) -> Box<Future<Item=(), Error=PathfinderError> + Send + Sync + 'static> {
let rabbitmq_context_local = rabbitmq_context.clone();
let publish_channel = rabbitmq_context_local.get_publish_channel();
let consume_channel = rabbitmq_context_local.get_consume_channel();
let queue_name = options.get_queue_name().unwrap().clone();
let queue_declare_options = QueueDeclareOptions {
passive: false,
durable: true,
exclusive: true,
auto_delete: false,
..Default::default()
};
Box::new(
// 1. Declare a response queue
consume_channel
.queue_declare(&queue_name, queue_declare_options, FieldTable::new())
.map(move |queue| (publish_channel, consume_channel, queue, options))
// 2. Link the response queue the exchange
.and_then(move |(publish_channel, consume_channel, queue, options)| {
let queue_name = options.get_queue_name().unwrap().clone();
let endpoint = options.get_endpoint().unwrap().clone();
let routing_key = options.get_queue_name().unwrap().clone();
consume_channel
.queue_bind(
&queue_name,
&endpoint.get_response_exchange(),
&routing_key,
QueueBindOptions::default(),
FieldTable::new()
)
.map(move |_| (publish_channel, consume_channel, queue, options))
})
// 3. Publish message into the microservice queue and make ensure that it's delivered
.and_then(move |(publish_channel, consume_channel, queue, options)| {
let publish_message_options = BasicPublishOptions {
mandatory: true,
immediate: false,
..Default::default()
};
let mut message_headers = FieldTable::new();
for (key, value) in headers.clone().iter() {
let header_name = key.clone();
let header_value = AMQPValue::LongString(value.clone());
message_headers.insert(header_name, header_value);
}
let endpoint = options.get_endpoint().unwrap().clone();
let message = options.get_message().unwrap().clone();
let queue_name_response = options.get_queue_name().unwrap().clone();
let event_name = message["event-name"].as_str().unwrap_or("null");
let basic_properties = BasicProperties::default()
.with_content_type("application/json".to_string()) // Content type
.with_headers(message_headers) // Headers for the message
.with_delivery_mode(2) // Message must be persistent
.with_reply_to(queue_name_response.to_string()) // Response queue
.with_correlation_id(event_name.clone().to_string()); // Event name
publish_channel
.basic_publish(
&endpoint.get_request_exchange(),
&endpoint.get_routing_key(),
message["content"].dump().as_bytes().to_vec(),
publish_message_options,
basic_properties
)
.map(move |confirmation| {
match confirmation {
Some(_) => info!("Publish message got confirmation."),
None => warn!("Request wasn't delivered."),
};
(publish_channel, consume_channel, queue, options)
})
})
// 4. Consume a response message from the queue, that was declared on the 1st step
.and_then(move |(publish_channel, consume_channel, queue, options)| {
consume_channel
.basic_consume(
&queue,
"response_consumer",
BasicConsumeOptions::default(),
FieldTable::new()
)
.and_then(move |stream| {
stream
.take(1)
.into_future()
.map_err(|(err, _)| err)
.map(move |(message, _)| (publish_channel, consume_channel, queue, message.unwrap(), options))
})
})
// 5. Prepare a response for a client, serialize and sent via WebSocket transmitter
.and_then(move |(publish_channel, consume_channel, queue, message, options)| {
let raw_data = from_utf8(&message.data).unwrap();
let json = Arc::new(Box::new(json_parse(raw_data).unwrap()));
let serializer = Serializer::new();
let response = serializer.serialize(json.dump()).unwrap();
let transmitter_local = transmitter.clone();
transmitter_local.unbounded_send(response).unwrap_or(());
consume_channel
.basic_ack(message.delivery_tag, false)
.map(move |_confirmation| (publish_channel, consume_channel, queue, options))
})
// 6. Unbind the response queue from the exchange point
.and_then(move |(publish_channel, consume_channel, _queue, options)| {
let queue_name = options.get_queue_name().unwrap().clone();
let routing_key = options.get_queue_name().unwrap().clone();
let endpoint = options.get_endpoint().unwrap().clone();
consume_channel
.queue_unbind(
&queue_name,
&endpoint.get_response_exchange(),
&routing_key,
QueueUnbindOptions::default(),
FieldTable::new(),
)
.map(move |_| (publish_channel, consume_channel, options))
})
// 7. Delete the response queue
.and_then(move |(_publish_channel, consume_channel, options)| {
let queue_delete_options = QueueDeleteOptions {
if_unused: false,
if_empty: false,
..Default::default()
};
let queue_name = options.get_queue_name().unwrap().clone();
consume_channel
.queue_delete(&queue_name, queue_delete_options)
.map(move |_| ())
})
// 8. Returns the result to the caller as future
.then(move |result| match result {
Ok(_) => Ok(()),
Err(err) => {
error!("Error in RabbitMQ client. Reason: {}", err);
let message = String::from("The request wasn't processed. Please, try once again.");
Err(PathfinderError::MessageBrokerError(message))
}
})
)
}
| 43.889503 | 118 | 0.575906 |
0130586b43042a36925b7400d73ca75d2c0d21ed | 92,979 | //! Compiler intrinsics.
//!
//! The corresponding definitions are in `compiler/rustc_codegen_llvm/src/intrinsic.rs`.
//! The corresponding const implementations are in `compiler/rustc_mir/src/interpret/intrinsics.rs`
//!
//! # Const intrinsics
//!
//! Note: any changes to the constness of intrinsics should be discussed with the language team.
//! This includes changes in the stability of the constness.
//!
//! In order to make an intrinsic usable at compile-time, one needs to copy the implementation
//! from <https://github.com/rust-lang/miri/blob/master/src/shims/intrinsics.rs> to
//! `compiler/rustc_mir/src/interpret/intrinsics.rs` and add a
//! `#[rustc_const_unstable(feature = "foo", issue = "01234")]` to the intrinsic.
//!
//! If an intrinsic is supposed to be used from a `const fn` with a `rustc_const_stable` attribute,
//! the intrinsic's attribute must be `rustc_const_stable`, too. Such a change should not be done
//! without T-lang consultation, because it bakes a feature into the language that cannot be
//! replicated in user code without compiler support.
//!
//! # Volatiles
//!
//! The volatile intrinsics provide operations intended to act on I/O
//! memory, which are guaranteed to not be reordered by the compiler
//! across other volatile intrinsics. See the LLVM documentation on
//! [[volatile]].
//!
//! [volatile]: http://llvm.org/docs/LangRef.html#volatile-memory-accesses
//!
//! # Atomics
//!
//! The atomic intrinsics provide common atomic operations on machine
//! words, with multiple possible memory orderings. They obey the same
//! semantics as C++11. See the LLVM documentation on [[atomics]].
//!
//! [atomics]: http://llvm.org/docs/Atomics.html
//!
//! A quick refresher on memory ordering:
//!
//! * Acquire - a barrier for acquiring a lock. Subsequent reads and writes
//! take place after the barrier.
//! * Release - a barrier for releasing a lock. Preceding reads and writes
//! take place before the barrier.
//! * Sequentially consistent - sequentially consistent operations are
//! guaranteed to happen in order. This is the standard mode for working
//! with atomic types and is equivalent to Java's `volatile`.
#![unstable(
feature = "core_intrinsics",
reason = "intrinsics are unlikely to ever be stabilized, instead \
they should be used through stabilized interfaces \
in the rest of the standard library",
issue = "none"
)]
#![allow(missing_docs)]
use crate::marker::DiscriminantKind;
use crate::mem;
// These imports are used for simplifying intra-doc links
#[allow(unused_imports)]
#[cfg(all(target_has_atomic = "8", target_has_atomic = "32", target_has_atomic = "ptr"))]
use crate::sync::atomic::{self, AtomicBool, AtomicI32, AtomicIsize, AtomicU32, Ordering};
#[stable(feature = "drop_in_place", since = "1.8.0")]
#[rustc_deprecated(
reason = "no longer an intrinsic - use `ptr::drop_in_place` directly",
since = "1.18.0"
)]
pub use crate::ptr::drop_in_place;
extern "rust-intrinsic" {
// N.B., these intrinsics take raw pointers because they mutate aliased
// memory, which is not valid for either `&` or `&mut`.
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::SeqCst`] as both the `success` and `failure` parameters.
/// For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::Acquire`] as both the `success` and `failure` parameters.
/// For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_acq<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::Release`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_rel<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::AcqRel`] as the `success` and [`Ordering::Acquire`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_acqrel<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::Relaxed`] as both the `success` and `failure` parameters.
/// For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_relaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::SeqCst`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_failrelaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::SeqCst`] as the `success` and [`Ordering::Acquire`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_failacq<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::Acquire`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_acq_failrelaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange` method by passing
/// [`Ordering::AcqRel`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange`].
pub fn atomic_cxchg_acqrel_failrelaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::SeqCst`] as both the `success` and `failure` parameters.
/// For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::Acquire`] as both the `success` and `failure` parameters.
/// For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_acq<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::Release`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_rel<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::AcqRel`] as the `success` and [`Ordering::Acquire`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_acqrel<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::Relaxed`] as both the `success` and `failure` parameters.
/// For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_relaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::SeqCst`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_failrelaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::SeqCst`] as the `success` and [`Ordering::Acquire`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_failacq<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::Acquire`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_acq_failrelaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Stores a value if the current value is the same as the `old` value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `compare_exchange_weak` method by passing
/// [`Ordering::AcqRel`] as the `success` and [`Ordering::Relaxed`] as the
/// `failure` parameters. For example, [`AtomicBool::compare_exchange_weak`].
pub fn atomic_cxchgweak_acqrel_failrelaxed<T: Copy>(dst: *mut T, old: T, src: T) -> (T, bool);
/// Loads the current value of the pointer.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `load` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::load`].
pub fn atomic_load<T: Copy>(src: *const T) -> T;
/// Loads the current value of the pointer.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `load` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicBool::load`].
pub fn atomic_load_acq<T: Copy>(src: *const T) -> T;
/// Loads the current value of the pointer.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `load` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::load`].
pub fn atomic_load_relaxed<T: Copy>(src: *const T) -> T;
pub fn atomic_load_unordered<T: Copy>(src: *const T) -> T;
/// Stores the value at the specified memory location.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `store` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::store`].
pub fn atomic_store<T: Copy>(dst: *mut T, val: T);
/// Stores the value at the specified memory location.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `store` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicBool::store`].
pub fn atomic_store_rel<T: Copy>(dst: *mut T, val: T);
/// Stores the value at the specified memory location.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `store` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::store`].
pub fn atomic_store_relaxed<T: Copy>(dst: *mut T, val: T);
pub fn atomic_store_unordered<T: Copy>(dst: *mut T, val: T);
/// Stores the value at the specified memory location, returning the old value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `swap` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::swap`].
pub fn atomic_xchg<T: Copy>(dst: *mut T, src: T) -> T;
/// Stores the value at the specified memory location, returning the old value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `swap` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicBool::swap`].
pub fn atomic_xchg_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Stores the value at the specified memory location, returning the old value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `swap` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicBool::swap`].
pub fn atomic_xchg_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Stores the value at the specified memory location, returning the old value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `swap` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicBool::swap`].
pub fn atomic_xchg_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Stores the value at the specified memory location, returning the old value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `swap` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::swap`].
pub fn atomic_xchg_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Adds to the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_add` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicIsize::fetch_add`].
pub fn atomic_xadd<T: Copy>(dst: *mut T, src: T) -> T;
/// Adds to the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_add` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicIsize::fetch_add`].
pub fn atomic_xadd_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Adds to the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_add` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicIsize::fetch_add`].
pub fn atomic_xadd_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Adds to the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_add` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicIsize::fetch_add`].
pub fn atomic_xadd_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Adds to the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_add` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicIsize::fetch_add`].
pub fn atomic_xadd_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Subtract from the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_sub` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicIsize::fetch_sub`].
pub fn atomic_xsub<T: Copy>(dst: *mut T, src: T) -> T;
/// Subtract from the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_sub` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicIsize::fetch_sub`].
pub fn atomic_xsub_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Subtract from the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_sub` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicIsize::fetch_sub`].
pub fn atomic_xsub_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Subtract from the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_sub` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicIsize::fetch_sub`].
pub fn atomic_xsub_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Subtract from the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_sub` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicIsize::fetch_sub`].
pub fn atomic_xsub_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise and with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_and` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::fetch_and`].
pub fn atomic_and<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise and with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_and` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicBool::fetch_and`].
pub fn atomic_and_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise and with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_and` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicBool::fetch_and`].
pub fn atomic_and_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise and with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_and` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicBool::fetch_and`].
pub fn atomic_and_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise and with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_and` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::fetch_and`].
pub fn atomic_and_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise nand with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`AtomicBool`] type via the `fetch_nand` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::fetch_nand`].
pub fn atomic_nand<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise nand with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`AtomicBool`] type via the `fetch_nand` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicBool::fetch_nand`].
pub fn atomic_nand_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise nand with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`AtomicBool`] type via the `fetch_nand` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicBool::fetch_nand`].
pub fn atomic_nand_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise nand with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`AtomicBool`] type via the `fetch_nand` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicBool::fetch_nand`].
pub fn atomic_nand_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise nand with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`AtomicBool`] type via the `fetch_nand` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::fetch_nand`].
pub fn atomic_nand_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise or with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_or` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::fetch_or`].
pub fn atomic_or<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise or with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_or` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicBool::fetch_or`].
pub fn atomic_or_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise or with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_or` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicBool::fetch_or`].
pub fn atomic_or_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise or with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_or` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicBool::fetch_or`].
pub fn atomic_or_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise or with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_or` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::fetch_or`].
pub fn atomic_or_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise xor with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_xor` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicBool::fetch_xor`].
pub fn atomic_xor<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise xor with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_xor` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicBool::fetch_xor`].
pub fn atomic_xor_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise xor with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_xor` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicBool::fetch_xor`].
pub fn atomic_xor_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise xor with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_xor` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicBool::fetch_xor`].
pub fn atomic_xor_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Bitwise xor with the current value, returning the previous value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] types via the `fetch_xor` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicBool::fetch_xor`].
pub fn atomic_xor_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_max` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicI32::fetch_max`].
pub fn atomic_max<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_max` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicI32::fetch_max`].
pub fn atomic_max_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_max` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicI32::fetch_max`].
pub fn atomic_max_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_max` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicI32::fetch_max`].
pub fn atomic_max_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_max` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicI32::fetch_max`].
pub fn atomic_max_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_min` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicI32::fetch_min`].
pub fn atomic_min<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_min` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicI32::fetch_min`].
pub fn atomic_min_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_min` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicI32::fetch_min`].
pub fn atomic_min_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_min` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicI32::fetch_min`].
pub fn atomic_min_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using a signed comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] signed integer types via the `fetch_min` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicI32::fetch_min`].
pub fn atomic_min_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_min` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicU32::fetch_min`].
pub fn atomic_umin<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_min` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicU32::fetch_min`].
pub fn atomic_umin_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_min` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicU32::fetch_min`].
pub fn atomic_umin_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_min` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicU32::fetch_min`].
pub fn atomic_umin_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Minimum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_min` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicU32::fetch_min`].
pub fn atomic_umin_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_max` method by passing
/// [`Ordering::SeqCst`] as the `order`. For example, [`AtomicU32::fetch_max`].
pub fn atomic_umax<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_max` method by passing
/// [`Ordering::Acquire`] as the `order`. For example, [`AtomicU32::fetch_max`].
pub fn atomic_umax_acq<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_max` method by passing
/// [`Ordering::Release`] as the `order`. For example, [`AtomicU32::fetch_max`].
pub fn atomic_umax_rel<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_max` method by passing
/// [`Ordering::AcqRel`] as the `order`. For example, [`AtomicU32::fetch_max`].
pub fn atomic_umax_acqrel<T: Copy>(dst: *mut T, src: T) -> T;
/// Maximum with the current value using an unsigned comparison.
///
/// The stabilized version of this intrinsic is available on the
/// [`atomic`] unsigned integer types via the `fetch_max` method by passing
/// [`Ordering::Relaxed`] as the `order`. For example, [`AtomicU32::fetch_max`].
pub fn atomic_umax_relaxed<T: Copy>(dst: *mut T, src: T) -> T;
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
/// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// The `locality` argument must be a constant integer and is a temporal locality specifier
/// ranging from (0) - no locality, to (3) - extremely local keep in cache.
///
/// This intrinsic does not have a stable counterpart.
pub fn prefetch_read_data<T>(data: *const T, locality: i32);
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
/// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// The `locality` argument must be a constant integer and is a temporal locality specifier
/// ranging from (0) - no locality, to (3) - extremely local keep in cache.
///
/// This intrinsic does not have a stable counterpart.
pub fn prefetch_write_data<T>(data: *const T, locality: i32);
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
/// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// The `locality` argument must be a constant integer and is a temporal locality specifier
/// ranging from (0) - no locality, to (3) - extremely local keep in cache.
///
/// This intrinsic does not have a stable counterpart.
pub fn prefetch_read_instruction<T>(data: *const T, locality: i32);
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
/// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// The `locality` argument must be a constant integer and is a temporal locality specifier
/// ranging from (0) - no locality, to (3) - extremely local keep in cache.
///
/// This intrinsic does not have a stable counterpart.
pub fn prefetch_write_instruction<T>(data: *const T, locality: i32);
}
extern "rust-intrinsic" {
/// An atomic fence.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::fence`] by passing [`Ordering::SeqCst`]
/// as the `order`.
pub fn atomic_fence();
/// An atomic fence.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::fence`] by passing [`Ordering::Acquire`]
/// as the `order`.
pub fn atomic_fence_acq();
/// An atomic fence.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::fence`] by passing [`Ordering::Release`]
/// as the `order`.
pub fn atomic_fence_rel();
/// An atomic fence.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::fence`] by passing [`Ordering::AcqRel`]
/// as the `order`.
pub fn atomic_fence_acqrel();
/// A compiler-only memory barrier.
///
/// Memory accesses will never be reordered across this barrier by the
/// compiler, but no instructions will be emitted for it. This is
/// appropriate for operations on the same thread that may be preempted,
/// such as when interacting with signal handlers.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::compiler_fence`] by passing [`Ordering::SeqCst`]
/// as the `order`.
pub fn atomic_singlethreadfence();
/// A compiler-only memory barrier.
///
/// Memory accesses will never be reordered across this barrier by the
/// compiler, but no instructions will be emitted for it. This is
/// appropriate for operations on the same thread that may be preempted,
/// such as when interacting with signal handlers.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::compiler_fence`] by passing [`Ordering::Acquire`]
/// as the `order`.
pub fn atomic_singlethreadfence_acq();
/// A compiler-only memory barrier.
///
/// Memory accesses will never be reordered across this barrier by the
/// compiler, but no instructions will be emitted for it. This is
/// appropriate for operations on the same thread that may be preempted,
/// such as when interacting with signal handlers.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::compiler_fence`] by passing [`Ordering::Release`]
/// as the `order`.
pub fn atomic_singlethreadfence_rel();
/// A compiler-only memory barrier.
///
/// Memory accesses will never be reordered across this barrier by the
/// compiler, but no instructions will be emitted for it. This is
/// appropriate for operations on the same thread that may be preempted,
/// such as when interacting with signal handlers.
///
/// The stabilized version of this intrinsic is available in
/// [`atomic::compiler_fence`] by passing [`Ordering::AcqRel`]
/// as the `order`.
pub fn atomic_singlethreadfence_acqrel();
/// Magic intrinsic that derives its meaning from attributes
/// attached to the function.
///
/// For example, dataflow uses this to inject static assertions so
/// that `rustc_peek(potentially_uninitialized)` would actually
/// double-check that dataflow did indeed compute that it is
/// uninitialized at that point in the control flow.
///
/// This intrinsic should not be used outside of the compiler.
pub fn rustc_peek<T>(_: T) -> T;
/// Aborts the execution of the process.
///
/// A more user-friendly and stable version of this operation is
/// [`std::process::abort`](../../std/process/fn.abort.html).
pub fn abort() -> !;
/// Informs the optimizer that this point in the code is not reachable,
/// enabling further optimizations.
///
/// N.B., this is very different from the `unreachable!()` macro: Unlike the
/// macro, which panics when it is executed, it is *undefined behavior* to
/// reach code marked with this function.
///
/// The stabilized version of this intrinsic is [`core::hint::unreachable_unchecked`](crate::hint::unreachable_unchecked).
#[rustc_const_unstable(feature = "const_unreachable_unchecked", issue = "53188")]
pub fn unreachable() -> !;
/// Informs the optimizer that a condition is always true.
/// If the condition is false, the behavior is undefined.
///
/// No code is generated for this intrinsic, but the optimizer will try
/// to preserve it (and its condition) between passes, which may interfere
/// with optimization of surrounding code and reduce performance. It should
/// not be used if the invariant can be discovered by the optimizer on its
/// own, or if it does not enable any significant optimizations.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_assume", issue = "76972")]
pub fn assume(b: bool);
/// Hints to the compiler that branch condition is likely to be true.
/// Returns the value passed to it.
///
/// Any use other than with `if` statements will probably not have an effect.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_likely", issue = "none")]
pub fn likely(b: bool) -> bool;
/// Hints to the compiler that branch condition is likely to be false.
/// Returns the value passed to it.
///
/// Any use other than with `if` statements will probably not have an effect.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_likely", issue = "none")]
pub fn unlikely(b: bool) -> bool;
/// Executes a breakpoint trap, for inspection by a debugger.
///
/// This intrinsic does not have a stable counterpart.
pub fn breakpoint();
/// The size of a type in bytes.
///
/// More specifically, this is the offset in bytes between successive
/// items of the same type, including alignment padding.
///
/// The stabilized version of this intrinsic is [`core::mem::size_of`](crate::mem::size_of).
#[rustc_const_stable(feature = "const_size_of", since = "1.40.0")]
pub fn size_of<T>() -> usize;
/// Moves a value to an uninitialized memory location.
///
/// Drop glue is not run on the destination.
///
/// The stabilized version of this intrinsic is [`core::ptr::write`](crate::ptr::write).
pub fn move_val_init<T>(dst: *mut T, src: T);
/// The minimum alignment of a type.
///
/// The stabilized version of this intrinsic is [`core::mem::align_of`](crate::mem::align_of).
#[rustc_const_stable(feature = "const_min_align_of", since = "1.40.0")]
pub fn min_align_of<T>() -> usize;
/// The preferred alignment of a type.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_pref_align_of", issue = "none")]
pub fn pref_align_of<T>() -> usize;
/// The size of the referenced value in bytes.
///
/// The stabilized version of this intrinsic is [`mem::size_of_val`].
#[rustc_const_unstable(feature = "const_size_of_val", issue = "46571")]
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
/// The required alignment of the referenced value.
///
/// The stabilized version of this intrinsic is [`core::mem::align_of_val`](crate::mem::align_of_val).
#[rustc_const_unstable(feature = "const_align_of_val", issue = "46571")]
pub fn min_align_of_val<T: ?Sized>(_: *const T) -> usize;
/// Gets a static string slice containing the name of a type.
///
/// The stabilized version of this intrinsic is [`core::any::type_name`](crate::any::type_name).
#[rustc_const_unstable(feature = "const_type_name", issue = "63084")]
pub fn type_name<T: ?Sized>() -> &'static str;
/// Gets an identifier which is globally unique to the specified type. This
/// function will return the same value for a type regardless of whichever
/// crate it is invoked in.
///
/// The stabilized version of this intrinsic is [`core::any::TypeId::of`](crate::any::TypeId::of).
#[rustc_const_unstable(feature = "const_type_id", issue = "77125")]
pub fn type_id<T: ?Sized + 'static>() -> u64;
/// A guard for unsafe functions that cannot ever be executed if `T` is uninhabited:
/// This will statically either panic, or do nothing.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_assert_type", issue = "none")]
pub fn assert_inhabited<T>();
/// A guard for unsafe functions that cannot ever be executed if `T` does not permit
/// zero-initialization: This will statically either panic, or do nothing.
///
/// This intrinsic does not have a stable counterpart.
pub fn assert_zero_valid<T>();
/// A guard for unsafe functions that cannot ever be executed if `T` has invalid
/// bit patterns: This will statically either panic, or do nothing.
///
/// This intrinsic does not have a stable counterpart.
pub fn assert_uninit_valid<T>();
/// Gets a reference to a static `Location` indicating where it was called.
///
/// Consider using [`core::panic::Location::caller`](crate::panic::Location::caller) instead.
#[rustc_const_unstable(feature = "const_caller_location", issue = "76156")]
pub fn caller_location() -> &'static crate::panic::Location<'static>;
/// Moves a value out of scope without running drop glue.
///
/// This exists solely for [`mem::forget_unsized`]; normal `forget` uses
/// `ManuallyDrop` instead.
pub fn forget<T: ?Sized>(_: T);
/// Reinterprets the bits of a value of one type as another type.
///
/// Both types must have the same size. Neither the original, nor the result,
/// may be an [invalid value](../../nomicon/what-unsafe-does.html).
///
/// `transmute` is semantically equivalent to a bitwise move of one type
/// into another. It copies the bits from the source value into the
/// destination value, then forgets the original. It's equivalent to C's
/// `memcpy` under the hood, just like `transmute_copy`.
///
/// `transmute` is **incredibly** unsafe. There are a vast number of ways to
/// cause [undefined behavior][ub] with this function. `transmute` should be
/// the absolute last resort.
///
/// The [nomicon](../../nomicon/transmutes.html) has additional
/// documentation.
///
/// [ub]: ../../reference/behavior-considered-undefined.html
///
/// # Examples
///
/// There are a few things that `transmute` is really useful for.
///
/// Turning a pointer into a function pointer. This is *not* portable to
/// machines where function pointers and data pointers have different sizes.
///
/// ```
/// fn foo() -> i32 {
/// 0
/// }
/// let pointer = foo as *const ();
/// let function = unsafe {
/// std::mem::transmute::<*const (), fn() -> i32>(pointer)
/// };
/// assert_eq!(function(), 0);
/// ```
///
/// Extending a lifetime, or shortening an invariant lifetime. This is
/// advanced, very unsafe Rust!
///
/// ```
/// struct R<'a>(&'a i32);
/// unsafe fn extend_lifetime<'b>(r: R<'b>) -> R<'static> {
/// std::mem::transmute::<R<'b>, R<'static>>(r)
/// }
///
/// unsafe fn shorten_invariant_lifetime<'b, 'c>(r: &'b mut R<'static>)
/// -> &'b mut R<'c> {
/// std::mem::transmute::<&'b mut R<'static>, &'b mut R<'c>>(r)
/// }
/// ```
///
/// # Alternatives
///
/// Don't despair: many uses of `transmute` can be achieved through other means.
/// Below are common applications of `transmute` which can be replaced with safer
/// constructs.
///
/// Turning raw bytes(`&[u8]`) to `u32`, `f64`, etc.:
///
/// ```
/// let raw_bytes = [0x78, 0x56, 0x34, 0x12];
///
/// let num = unsafe {
/// std::mem::transmute::<[u8; 4], u32>(raw_bytes)
/// };
///
/// // use `u32::from_ne_bytes` instead
/// let num = u32::from_ne_bytes(raw_bytes);
/// // or use `u32::from_le_bytes` or `u32::from_be_bytes` to specify the endianness
/// let num = u32::from_le_bytes(raw_bytes);
/// assert_eq!(num, 0x12345678);
/// let num = u32::from_be_bytes(raw_bytes);
/// assert_eq!(num, 0x78563412);
/// ```
///
/// Turning a pointer into a `usize`:
///
/// ```
/// let ptr = &0;
/// let ptr_num_transmute = unsafe {
/// std::mem::transmute::<&i32, usize>(ptr)
/// };
///
/// // Use an `as` cast instead
/// let ptr_num_cast = ptr as *const i32 as usize;
/// ```
///
/// Turning a `*mut T` into an `&mut T`:
///
/// ```
/// let ptr: *mut i32 = &mut 0;
/// let ref_transmuted = unsafe {
/// std::mem::transmute::<*mut i32, &mut i32>(ptr)
/// };
///
/// // Use a reborrow instead
/// let ref_casted = unsafe { &mut *ptr };
/// ```
///
/// Turning an `&mut T` into an `&mut U`:
///
/// ```
/// let ptr = &mut 0;
/// let val_transmuted = unsafe {
/// std::mem::transmute::<&mut i32, &mut u32>(ptr)
/// };
///
/// // Now, put together `as` and reborrowing - note the chaining of `as`
/// // `as` is not transitive
/// let val_casts = unsafe { &mut *(ptr as *mut i32 as *mut u32) };
/// ```
///
/// Turning an `&str` into an `&[u8]`:
///
/// ```
/// // this is not a good way to do this.
/// let slice = unsafe { std::mem::transmute::<&str, &[u8]>("Rust") };
/// assert_eq!(slice, &[82, 117, 115, 116]);
///
/// // You could use `str::as_bytes`
/// let slice = "Rust".as_bytes();
/// assert_eq!(slice, &[82, 117, 115, 116]);
///
/// // Or, just use a byte string, if you have control over the string
/// // literal
/// assert_eq!(b"Rust", &[82, 117, 115, 116]);
/// ```
///
/// Turning a `Vec<&T>` into a `Vec<Option<&T>>`:
///
/// ```
/// let store = [0, 1, 2, 3];
/// let v_orig = store.iter().collect::<Vec<&i32>>();
///
/// // clone the vector as we will reuse them later
/// let v_clone = v_orig.clone();
///
/// // Using transmute: this relies on the unspecified data layout of `Vec`, which is a
/// // bad idea and could cause Undefined Behavior.
/// // However, it is no-copy.
/// let v_transmuted = unsafe {
/// std::mem::transmute::<Vec<&i32>, Vec<Option<&i32>>>(v_clone)
/// };
///
/// let v_clone = v_orig.clone();
///
/// // This is the suggested, safe way.
/// // It does copy the entire vector, though, into a new array.
/// let v_collected = v_clone.into_iter()
/// .map(Some)
/// .collect::<Vec<Option<&i32>>>();
///
/// let v_clone = v_orig.clone();
///
/// // The no-copy, unsafe way, still using transmute, but not relying on the data layout.
/// // Like the first approach, this reuses the `Vec` internals.
/// // Therefore, the new inner type must have the
/// // exact same size, *and the same alignment*, as the old type.
/// // The same caveats exist for this method as transmute, for
/// // the original inner type (`&i32`) to the converted inner type
/// // (`Option<&i32>`), so read the nomicon pages linked above and also
/// // consult the [`from_raw_parts`] documentation.
/// let v_from_raw = unsafe {
// FIXME Update this when vec_into_raw_parts is stabilized
/// // Ensure the original vector is not dropped.
/// let mut v_clone = std::mem::ManuallyDrop::new(v_clone);
/// Vec::from_raw_parts(v_clone.as_mut_ptr() as *mut Option<&i32>,
/// v_clone.len(),
/// v_clone.capacity())
/// };
/// ```
///
/// [`from_raw_parts`]: ../../std/vec/struct.Vec.html#method.from_raw_parts
///
/// Implementing `split_at_mut`:
///
/// ```
/// use std::{slice, mem};
///
/// // There are multiple ways to do this, and there are multiple problems
/// // with the following (transmute) way.
/// fn split_at_mut_transmute<T>(slice: &mut [T], mid: usize)
/// -> (&mut [T], &mut [T]) {
/// let len = slice.len();
/// assert!(mid <= len);
/// unsafe {
/// let slice2 = mem::transmute::<&mut [T], &mut [T]>(slice);
/// // first: transmute is not type safe; all it checks is that T and
/// // U are of the same size. Second, right here, you have two
/// // mutable references pointing to the same memory.
/// (&mut slice[0..mid], &mut slice2[mid..len])
/// }
/// }
///
/// // This gets rid of the type safety problems; `&mut *` will *only* give
/// // you an `&mut T` from an `&mut T` or `*mut T`.
/// fn split_at_mut_casts<T>(slice: &mut [T], mid: usize)
/// -> (&mut [T], &mut [T]) {
/// let len = slice.len();
/// assert!(mid <= len);
/// unsafe {
/// let slice2 = &mut *(slice as *mut [T]);
/// // however, you still have two mutable references pointing to
/// // the same memory.
/// (&mut slice[0..mid], &mut slice2[mid..len])
/// }
/// }
///
/// // This is how the standard library does it. This is the best method, if
/// // you need to do something like this
/// fn split_at_stdlib<T>(slice: &mut [T], mid: usize)
/// -> (&mut [T], &mut [T]) {
/// let len = slice.len();
/// assert!(mid <= len);
/// unsafe {
/// let ptr = slice.as_mut_ptr();
/// // This now has three mutable references pointing at the same
/// // memory. `slice`, the rvalue ret.0, and the rvalue ret.1.
/// // `slice` is never used after `let ptr = ...`, and so one can
/// // treat it as "dead", and therefore, you only have two real
/// // mutable slices.
/// (slice::from_raw_parts_mut(ptr, mid),
/// slice::from_raw_parts_mut(ptr.add(mid), len - mid))
/// }
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
// NOTE: While this makes the intrinsic const stable, we have some custom code in const fn
// checks that prevent its use within `const fn`.
#[rustc_const_stable(feature = "const_transmute", since = "1.46.0")]
#[rustc_diagnostic_item = "transmute"]
pub fn transmute<T, U>(e: T) -> U;
/// Returns `true` if the actual type given as `T` requires drop
/// glue; returns `false` if the actual type provided for `T`
/// implements `Copy`.
///
/// If the actual type neither requires drop glue nor implements
/// `Copy`, then the return value of this function is unspecified.
///
/// The stabilized version of this intrinsic is [`mem::needs_drop`](crate::mem::needs_drop).
#[rustc_const_stable(feature = "const_needs_drop", since = "1.40.0")]
pub fn needs_drop<T>() -> bool;
/// Calculates the offset from a pointer.
///
/// This is implemented as an intrinsic to avoid converting to and from an
/// integer, since the conversion would throw away aliasing information.
///
/// # Safety
///
/// Both the starting and resulting pointer must be either in bounds or one
/// byte past the end of an allocated object. If either pointer is out of
/// bounds or arithmetic overflow occurs then any further use of the
/// returned value will result in undefined behavior.
///
/// The stabilized version of this intrinsic is
/// [`std::pointer::offset`](../../std/primitive.pointer.html#method.offset).
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
pub fn offset<T>(dst: *const T, offset: isize) -> *const T;
/// Calculates the offset from a pointer, potentially wrapping.
///
/// This is implemented as an intrinsic to avoid converting to and from an
/// integer, since the conversion inhibits certain optimizations.
///
/// # Safety
///
/// Unlike the `offset` intrinsic, this intrinsic does not restrict the
/// resulting pointer to point into or one byte past the end of an allocated
/// object, and it wraps with two's complement arithmetic. The resulting
/// value is not necessarily valid to be used to actually access memory.
///
/// The stabilized version of this intrinsic is
/// [`std::pointer::wrapping_offset`](../../std/primitive.pointer.html#method.wrapping_offset).
#[must_use = "returns a new pointer rather than modifying its argument"]
#[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")]
pub fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
/// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with
/// a size of `count` * `size_of::<T>()` and an alignment of
/// `min_align_of::<T>()`
///
/// The volatile parameter is set to `true`, so it will not be optimized out
/// unless size is equal to zero.
///
/// This intrinsic does not have a stable counterpart.
pub fn volatile_copy_nonoverlapping_memory<T>(dst: *mut T, src: *const T, count: usize);
/// Equivalent to the appropriate `llvm.memmove.p0i8.0i8.*` intrinsic, with
/// a size of `count * size_of::<T>()` and an alignment of
/// `min_align_of::<T>()`
///
/// The volatile parameter is set to `true`, so it will not be optimized out
/// unless size is equal to zero.
///
/// This intrinsic does not have a stable counterpart.
pub fn volatile_copy_memory<T>(dst: *mut T, src: *const T, count: usize);
/// Equivalent to the appropriate `llvm.memset.p0i8.*` intrinsic, with a
/// size of `count * size_of::<T>()` and an alignment of
/// `min_align_of::<T>()`.
///
/// The volatile parameter is set to `true`, so it will not be optimized out
/// unless size is equal to zero.
///
/// This intrinsic does not have a stable counterpart.
pub fn volatile_set_memory<T>(dst: *mut T, val: u8, count: usize);
/// Performs a volatile load from the `src` pointer.
///
/// The stabilized version of this intrinsic is [`core::ptr::read_volatile`](crate::ptr::read_volatile).
pub fn volatile_load<T>(src: *const T) -> T;
/// Performs a volatile store to the `dst` pointer.
///
/// The stabilized version of this intrinsic is [`core::ptr::write_volatile`](crate::ptr::write_volatile).
pub fn volatile_store<T>(dst: *mut T, val: T);
/// Performs a volatile load from the `src` pointer
/// The pointer is not required to be aligned.
///
/// This intrinsic does not have a stable counterpart.
pub fn unaligned_volatile_load<T>(src: *const T) -> T;
/// Performs a volatile store to the `dst` pointer.
/// The pointer is not required to be aligned.
///
/// This intrinsic does not have a stable counterpart.
pub fn unaligned_volatile_store<T>(dst: *mut T, val: T);
/// Returns the square root of an `f32`
///
/// The stabilized version of this intrinsic is
/// [`f32::sqrt`](../../std/primitive.f32.html#method.sqrt)
pub fn sqrtf32(x: f32) -> f32;
/// Returns the square root of an `f64`
///
/// The stabilized version of this intrinsic is
/// [`f64::sqrt`](../../std/primitive.f64.html#method.sqrt)
pub fn sqrtf64(x: f64) -> f64;
/// Raises an `f32` to an integer power.
///
/// The stabilized version of this intrinsic is
/// [`f32::powi`](../../std/primitive.f32.html#method.powi)
pub fn powif32(a: f32, x: i32) -> f32;
/// Raises an `f64` to an integer power.
///
/// The stabilized version of this intrinsic is
/// [`f64::powi`](../../std/primitive.f64.html#method.powi)
pub fn powif64(a: f64, x: i32) -> f64;
/// Returns the sine of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::sin`](../../std/primitive.f32.html#method.sin)
pub fn sinf32(x: f32) -> f32;
/// Returns the sine of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::sin`](../../std/primitive.f64.html#method.sin)
pub fn sinf64(x: f64) -> f64;
/// Returns the cosine of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::cos`](../../std/primitive.f32.html#method.cos)
pub fn cosf32(x: f32) -> f32;
/// Returns the cosine of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::cos`](../../std/primitive.f64.html#method.cos)
pub fn cosf64(x: f64) -> f64;
/// Raises an `f32` to an `f32` power.
///
/// The stabilized version of this intrinsic is
/// [`f32::powf`](../../std/primitive.f32.html#method.powf)
pub fn powf32(a: f32, x: f32) -> f32;
/// Raises an `f64` to an `f64` power.
///
/// The stabilized version of this intrinsic is
/// [`f64::powf`](../../std/primitive.f64.html#method.powf)
pub fn powf64(a: f64, x: f64) -> f64;
/// Returns the exponential of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::exp`](../../std/primitive.f32.html#method.exp)
pub fn expf32(x: f32) -> f32;
/// Returns the exponential of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::exp`](../../std/primitive.f64.html#method.exp)
pub fn expf64(x: f64) -> f64;
/// Returns 2 raised to the power of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::exp2`](../../std/primitive.f32.html#method.exp2)
pub fn exp2f32(x: f32) -> f32;
/// Returns 2 raised to the power of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::exp2`](../../std/primitive.f64.html#method.exp2)
pub fn exp2f64(x: f64) -> f64;
/// Returns the natural logarithm of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::ln`](../../std/primitive.f32.html#method.ln)
pub fn logf32(x: f32) -> f32;
/// Returns the natural logarithm of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::ln`](../../std/primitive.f64.html#method.ln)
pub fn logf64(x: f64) -> f64;
/// Returns the base 10 logarithm of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::log10`](../../std/primitive.f32.html#method.log10)
pub fn log10f32(x: f32) -> f32;
/// Returns the base 10 logarithm of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::log10`](../../std/primitive.f64.html#method.log10)
pub fn log10f64(x: f64) -> f64;
/// Returns the base 2 logarithm of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::log2`](../../std/primitive.f32.html#method.log2)
pub fn log2f32(x: f32) -> f32;
/// Returns the base 2 logarithm of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::log2`](../../std/primitive.f64.html#method.log2)
pub fn log2f64(x: f64) -> f64;
/// Returns `a * b + c` for `f32` values.
///
/// The stabilized version of this intrinsic is
/// [`f32::mul_add`](../../std/primitive.f32.html#method.mul_add)
pub fn fmaf32(a: f32, b: f32, c: f32) -> f32;
/// Returns `a * b + c` for `f64` values.
///
/// The stabilized version of this intrinsic is
/// [`f64::mul_add`](../../std/primitive.f64.html#method.mul_add)
pub fn fmaf64(a: f64, b: f64, c: f64) -> f64;
/// Returns the absolute value of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::abs`](../../std/primitive.f32.html#method.abs)
pub fn fabsf32(x: f32) -> f32;
/// Returns the absolute value of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::abs`](../../std/primitive.f64.html#method.abs)
pub fn fabsf64(x: f64) -> f64;
/// Returns the minimum of two `f32` values.
///
/// The stabilized version of this intrinsic is
/// [`f32::min`]
pub fn minnumf32(x: f32, y: f32) -> f32;
/// Returns the minimum of two `f64` values.
///
/// The stabilized version of this intrinsic is
/// [`f64::min`]
pub fn minnumf64(x: f64, y: f64) -> f64;
/// Returns the maximum of two `f32` values.
///
/// The stabilized version of this intrinsic is
/// [`f32::max`]
pub fn maxnumf32(x: f32, y: f32) -> f32;
/// Returns the maximum of two `f64` values.
///
/// The stabilized version of this intrinsic is
/// [`f64::max`]
pub fn maxnumf64(x: f64, y: f64) -> f64;
/// Copies the sign from `y` to `x` for `f32` values.
///
/// The stabilized version of this intrinsic is
/// [`f32::copysign`](../../std/primitive.f32.html#method.copysign)
pub fn copysignf32(x: f32, y: f32) -> f32;
/// Copies the sign from `y` to `x` for `f64` values.
///
/// The stabilized version of this intrinsic is
/// [`f64::copysign`](../../std/primitive.f64.html#method.copysign)
pub fn copysignf64(x: f64, y: f64) -> f64;
/// Returns the largest integer less than or equal to an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::floor`](../../std/primitive.f32.html#method.floor)
pub fn floorf32(x: f32) -> f32;
/// Returns the largest integer less than or equal to an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::floor`](../../std/primitive.f64.html#method.floor)
pub fn floorf64(x: f64) -> f64;
/// Returns the smallest integer greater than or equal to an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::ceil`](../../std/primitive.f32.html#method.ceil)
pub fn ceilf32(x: f32) -> f32;
/// Returns the smallest integer greater than or equal to an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::ceil`](../../std/primitive.f64.html#method.ceil)
pub fn ceilf64(x: f64) -> f64;
/// Returns the integer part of an `f32`.
///
/// The stabilized version of this intrinsic is
/// [`f32::trunc`](../../std/primitive.f32.html#method.trunc)
pub fn truncf32(x: f32) -> f32;
/// Returns the integer part of an `f64`.
///
/// The stabilized version of this intrinsic is
/// [`f64::trunc`](../../std/primitive.f64.html#method.trunc)
pub fn truncf64(x: f64) -> f64;
/// Returns the nearest integer to an `f32`. May raise an inexact floating-point exception
/// if the argument is not an integer.
pub fn rintf32(x: f32) -> f32;
/// Returns the nearest integer to an `f64`. May raise an inexact floating-point exception
/// if the argument is not an integer.
pub fn rintf64(x: f64) -> f64;
/// Returns the nearest integer to an `f32`.
///
/// This intrinsic does not have a stable counterpart.
pub fn nearbyintf32(x: f32) -> f32;
/// Returns the nearest integer to an `f64`.
///
/// This intrinsic does not have a stable counterpart.
pub fn nearbyintf64(x: f64) -> f64;
/// Returns the nearest integer to an `f32`. Rounds half-way cases away from zero.
///
/// The stabilized version of this intrinsic is
/// [`f32::round`](../../std/primitive.f32.html#method.round)
pub fn roundf32(x: f32) -> f32;
/// Returns the nearest integer to an `f64`. Rounds half-way cases away from zero.
///
/// The stabilized version of this intrinsic is
/// [`f64::round`](../../std/primitive.f64.html#method.round)
pub fn roundf64(x: f64) -> f64;
/// Float addition that allows optimizations based on algebraic rules.
/// May assume inputs are finite.
///
/// This intrinsic does not have a stable counterpart.
pub fn fadd_fast<T: Copy>(a: T, b: T) -> T;
/// Float subtraction that allows optimizations based on algebraic rules.
/// May assume inputs are finite.
///
/// This intrinsic does not have a stable counterpart.
pub fn fsub_fast<T: Copy>(a: T, b: T) -> T;
/// Float multiplication that allows optimizations based on algebraic rules.
/// May assume inputs are finite.
///
/// This intrinsic does not have a stable counterpart.
pub fn fmul_fast<T: Copy>(a: T, b: T) -> T;
/// Float division that allows optimizations based on algebraic rules.
/// May assume inputs are finite.
///
/// This intrinsic does not have a stable counterpart.
pub fn fdiv_fast<T: Copy>(a: T, b: T) -> T;
/// Float remainder that allows optimizations based on algebraic rules.
/// May assume inputs are finite.
///
/// This intrinsic does not have a stable counterpart.
pub fn frem_fast<T: Copy>(a: T, b: T) -> T;
/// Convert with LLVM’s fptoui/fptosi, which may return undef for values out of range
/// (<https://github.com/rust-lang/rust/issues/10184>)
///
/// Stabilized as [`f32::to_int_unchecked`] and [`f64::to_int_unchecked`].
pub fn float_to_int_unchecked<Float: Copy, Int: Copy>(value: Float) -> Int;
/// Returns the number of bits set in an integer type `T`
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `count_ones` method. For example,
/// [`u32::count_ones`]
#[rustc_const_stable(feature = "const_ctpop", since = "1.40.0")]
pub fn ctpop<T: Copy>(x: T) -> T;
/// Returns the number of leading unset bits (zeroes) in an integer type `T`.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `leading_zeros` method. For example,
/// [`u32::leading_zeros`]
///
/// # Examples
///
/// ```
/// #![feature(core_intrinsics)]
///
/// use std::intrinsics::ctlz;
///
/// let x = 0b0001_1100_u8;
/// let num_leading = ctlz(x);
/// assert_eq!(num_leading, 3);
/// ```
///
/// An `x` with value `0` will return the bit width of `T`.
///
/// ```
/// #![feature(core_intrinsics)]
///
/// use std::intrinsics::ctlz;
///
/// let x = 0u16;
/// let num_leading = ctlz(x);
/// assert_eq!(num_leading, 16);
/// ```
#[rustc_const_stable(feature = "const_ctlz", since = "1.40.0")]
pub fn ctlz<T: Copy>(x: T) -> T;
/// Like `ctlz`, but extra-unsafe as it returns `undef` when
/// given an `x` with value `0`.
///
/// This intrinsic does not have a stable counterpart.
///
/// # Examples
///
/// ```
/// #![feature(core_intrinsics)]
///
/// use std::intrinsics::ctlz_nonzero;
///
/// let x = 0b0001_1100_u8;
/// let num_leading = unsafe { ctlz_nonzero(x) };
/// assert_eq!(num_leading, 3);
/// ```
#[rustc_const_stable(feature = "constctlz", since = "1.50.0")]
pub fn ctlz_nonzero<T: Copy>(x: T) -> T;
/// Returns the number of trailing unset bits (zeroes) in an integer type `T`.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `trailing_zeros` method. For example,
/// [`u32::trailing_zeros`]
///
/// # Examples
///
/// ```
/// #![feature(core_intrinsics)]
///
/// use std::intrinsics::cttz;
///
/// let x = 0b0011_1000_u8;
/// let num_trailing = cttz(x);
/// assert_eq!(num_trailing, 3);
/// ```
///
/// An `x` with value `0` will return the bit width of `T`:
///
/// ```
/// #![feature(core_intrinsics)]
///
/// use std::intrinsics::cttz;
///
/// let x = 0u16;
/// let num_trailing = cttz(x);
/// assert_eq!(num_trailing, 16);
/// ```
#[rustc_const_stable(feature = "const_cttz", since = "1.40.0")]
pub fn cttz<T: Copy>(x: T) -> T;
/// Like `cttz`, but extra-unsafe as it returns `undef` when
/// given an `x` with value `0`.
///
/// This intrinsic does not have a stable counterpart.
///
/// # Examples
///
/// ```
/// #![feature(core_intrinsics)]
///
/// use std::intrinsics::cttz_nonzero;
///
/// let x = 0b0011_1000_u8;
/// let num_trailing = unsafe { cttz_nonzero(x) };
/// assert_eq!(num_trailing, 3);
/// ```
#[rustc_const_unstable(feature = "const_cttz", issue = "none")]
pub fn cttz_nonzero<T: Copy>(x: T) -> T;
/// Reverses the bytes in an integer type `T`.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `swap_bytes` method. For example,
/// [`u32::swap_bytes`]
#[rustc_const_stable(feature = "const_bswap", since = "1.40.0")]
pub fn bswap<T: Copy>(x: T) -> T;
/// Reverses the bits in an integer type `T`.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `reverse_bits` method. For example,
/// [`u32::reverse_bits`]
#[rustc_const_stable(feature = "const_bitreverse", since = "1.40.0")]
pub fn bitreverse<T: Copy>(x: T) -> T;
/// Performs checked integer addition.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `overflowing_add` method. For example,
/// [`u32::overflowing_add`]
#[rustc_const_stable(feature = "const_int_overflow", since = "1.40.0")]
pub fn add_with_overflow<T: Copy>(x: T, y: T) -> (T, bool);
/// Performs checked integer subtraction
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `overflowing_sub` method. For example,
/// [`u32::overflowing_sub`]
#[rustc_const_stable(feature = "const_int_overflow", since = "1.40.0")]
pub fn sub_with_overflow<T: Copy>(x: T, y: T) -> (T, bool);
/// Performs checked integer multiplication
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `overflowing_mul` method. For example,
/// [`u32::overflowing_mul`]
#[rustc_const_stable(feature = "const_int_overflow", since = "1.40.0")]
pub fn mul_with_overflow<T: Copy>(x: T, y: T) -> (T, bool);
/// Performs an exact division, resulting in undefined behavior where
/// `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`
///
/// This intrinsic does not have a stable counterpart.
pub fn exact_div<T: Copy>(x: T, y: T) -> T;
/// Performs an unchecked division, resulting in undefined behavior
/// where `y == 0` or `x == T::MIN && y == -1`
///
/// Safe wrappers for this intrinsic are available on the integer
/// primitives via the `checked_div` method. For example,
/// [`u32::checked_div`]
#[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
pub fn unchecked_div<T: Copy>(x: T, y: T) -> T;
/// Returns the remainder of an unchecked division, resulting in
/// undefined behavior when `y == 0` or `x == T::MIN && y == -1`
///
/// Safe wrappers for this intrinsic are available on the integer
/// primitives via the `checked_rem` method. For example,
/// [`u32::checked_rem`]
#[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
pub fn unchecked_rem<T: Copy>(x: T, y: T) -> T;
/// Performs an unchecked left shift, resulting in undefined behavior when
/// `y < 0` or `y >= N`, where N is the width of T in bits.
///
/// Safe wrappers for this intrinsic are available on the integer
/// primitives via the `checked_shl` method. For example,
/// [`u32::checked_shl`]
#[rustc_const_stable(feature = "const_int_unchecked", since = "1.40.0")]
pub fn unchecked_shl<T: Copy>(x: T, y: T) -> T;
/// Performs an unchecked right shift, resulting in undefined behavior when
/// `y < 0` or `y >= N`, where N is the width of T in bits.
///
/// Safe wrappers for this intrinsic are available on the integer
/// primitives via the `checked_shr` method. For example,
/// [`u32::checked_shr`]
#[rustc_const_stable(feature = "const_int_unchecked", since = "1.40.0")]
pub fn unchecked_shr<T: Copy>(x: T, y: T) -> T;
/// Returns the result of an unchecked addition, resulting in
/// undefined behavior when `x + y > T::MAX` or `x + y < T::MIN`.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
pub fn unchecked_add<T: Copy>(x: T, y: T) -> T;
/// Returns the result of an unchecked subtraction, resulting in
/// undefined behavior when `x - y > T::MAX` or `x - y < T::MIN`.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
pub fn unchecked_sub<T: Copy>(x: T, y: T) -> T;
/// Returns the result of an unchecked multiplication, resulting in
/// undefined behavior when `x * y > T::MAX` or `x * y < T::MIN`.
///
/// This intrinsic does not have a stable counterpart.
#[rustc_const_unstable(feature = "const_int_unchecked_arith", issue = "none")]
pub fn unchecked_mul<T: Copy>(x: T, y: T) -> T;
/// Performs rotate left.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `rotate_left` method. For example,
/// [`u32::rotate_left`]
#[rustc_const_stable(feature = "const_int_rotate", since = "1.40.0")]
pub fn rotate_left<T: Copy>(x: T, y: T) -> T;
/// Performs rotate right.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `rotate_right` method. For example,
/// [`u32::rotate_right`]
#[rustc_const_stable(feature = "const_int_rotate", since = "1.40.0")]
pub fn rotate_right<T: Copy>(x: T, y: T) -> T;
/// Returns (a + b) mod 2<sup>N</sup>, where N is the width of T in bits.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `wrapping_add` method. For example,
/// [`u32::wrapping_add`]
#[rustc_const_stable(feature = "const_int_wrapping", since = "1.40.0")]
pub fn wrapping_add<T: Copy>(a: T, b: T) -> T;
/// Returns (a - b) mod 2<sup>N</sup>, where N is the width of T in bits.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `wrapping_sub` method. For example,
/// [`u32::wrapping_sub`]
#[rustc_const_stable(feature = "const_int_wrapping", since = "1.40.0")]
pub fn wrapping_sub<T: Copy>(a: T, b: T) -> T;
/// Returns (a * b) mod 2<sup>N</sup>, where N is the width of T in bits.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `wrapping_mul` method. For example,
/// [`u32::wrapping_mul`]
#[rustc_const_stable(feature = "const_int_wrapping", since = "1.40.0")]
pub fn wrapping_mul<T: Copy>(a: T, b: T) -> T;
/// Computes `a + b`, saturating at numeric bounds.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `saturating_add` method. For example,
/// [`u32::saturating_add`]
#[rustc_const_stable(feature = "const_int_saturating", since = "1.40.0")]
pub fn saturating_add<T: Copy>(a: T, b: T) -> T;
/// Computes `a - b`, saturating at numeric bounds.
///
/// The stabilized versions of this intrinsic are available on the integer
/// primitives via the `saturating_sub` method. For example,
/// [`u32::saturating_sub`]
#[rustc_const_stable(feature = "const_int_saturating", since = "1.40.0")]
pub fn saturating_sub<T: Copy>(a: T, b: T) -> T;
/// Returns the value of the discriminant for the variant in 'v',
/// cast to a `u64`; if `T` has no discriminant, returns `0`.
///
/// The stabilized version of this intrinsic is [`core::mem::discriminant`](crate::mem::discriminant).
#[rustc_const_unstable(feature = "const_discriminant", issue = "69821")]
pub fn discriminant_value<T>(v: &T) -> <T as DiscriminantKind>::Discriminant;
/// Returns the number of variants of the type `T` cast to a `usize`;
/// if `T` has no variants, returns `0`. Uninhabited variants will be counted.
///
/// The to-be-stabilized version of this intrinsic is [`mem::variant_count`].
#[rustc_const_unstable(feature = "variant_count", issue = "73662")]
pub fn variant_count<T>() -> usize;
/// Rust's "try catch" construct which invokes the function pointer `try_fn`
/// with the data pointer `data`.
///
/// The third argument is a function called if a panic occurs. This function
/// takes the data pointer and a pointer to the target-specific exception
/// object that was caught. For more information see the compiler's
/// source as well as std's catch implementation.
pub fn r#try(try_fn: fn(*mut u8), data: *mut u8, catch_fn: fn(*mut u8, *mut u8)) -> i32;
/// Emits a `!nontemporal` store according to LLVM (see their docs).
/// Probably will never become stable.
pub fn nontemporal_store<T>(ptr: *mut T, val: T);
/// See documentation of `<*const T>::offset_from` for details.
#[rustc_const_unstable(feature = "const_ptr_offset_from", issue = "41079")]
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
/// See documentation of `<*const T>::guaranteed_eq` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
pub fn ptr_guaranteed_eq<T>(ptr: *const T, other: *const T) -> bool;
/// See documentation of `<*const T>::guaranteed_ne` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
pub fn ptr_guaranteed_ne<T>(ptr: *const T, other: *const T) -> bool;
/// Allocate at compile time. Should not be called at runtime.
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
pub fn const_allocate(size: usize, align: usize) -> *mut u8;
}
// Some functions are defined here because they accidentally got made
// available in this module on stable. See <https://github.com/rust-lang/rust/issues/15702>.
// (`transmute` also falls into this category, but it cannot be wrapped due to the
// check that `T` and `U` have the same size.)
/// Checks whether `ptr` is properly aligned with respect to
/// `align_of::<T>()`.
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
!ptr.is_null() && ptr as usize % mem::align_of::<T>() == 0
}
/// Checks whether the regions of memory starting at `src` and `dst` of size
/// `count * size_of::<T>()` do *not* overlap.
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
let src_usize = src as usize;
let dst_usize = dst as usize;
let size = mem::size_of::<T>().checked_mul(count).unwrap();
let diff = if src_usize > dst_usize { src_usize - dst_usize } else { dst_usize - src_usize };
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
// they do not overlap.
diff >= size
}
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
/// and destination must *not* overlap.
///
/// For regions of memory which might overlap, use [`copy`] instead.
///
/// `copy_nonoverlapping` is semantically equivalent to C's [`memcpy`], but
/// with the argument order swapped.
///
/// [`memcpy`]: https://en.cppreference.com/w/c/string/byte/memcpy
///
/// # Safety
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `src` must be [valid] for reads of `count * size_of::<T>()` bytes.
///
/// * `dst` must be [valid] for writes of `count * size_of::<T>()` bytes.
///
/// * Both `src` and `dst` must be properly aligned.
///
/// * The region of memory beginning at `src` with a size of `count *
/// size_of::<T>()` bytes must *not* overlap with the region of memory
/// beginning at `dst` with the same size.
///
/// Like [`read`], `copy_nonoverlapping` creates a bitwise copy of `T`, regardless of
/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using *both* the values
/// in the region beginning at `*src` and the region beginning at `*dst` can
/// [violate memory safety][read-ownership].
///
/// Note that even if the effectively copied size (`count * size_of::<T>()`) is
/// `0`, the pointers must be non-NULL and properly aligned.
///
/// [`read`]: crate::ptr::read
/// [read-ownership]: crate::ptr::read#ownership-of-the-returned-value
/// [valid]: crate::ptr#safety
///
/// # Examples
///
/// Manually implement [`Vec::append`]:
///
/// ```
/// use std::ptr;
///
/// /// Moves all the elements of `src` into `dst`, leaving `src` empty.
/// fn append<T>(dst: &mut Vec<T>, src: &mut Vec<T>) {
/// let src_len = src.len();
/// let dst_len = dst.len();
///
/// // Ensure that `dst` has enough capacity to hold all of `src`.
/// dst.reserve(src_len);
///
/// unsafe {
/// // The call to offset is always safe because `Vec` will never
/// // allocate more than `isize::MAX` bytes.
/// let dst_ptr = dst.as_mut_ptr().offset(dst_len as isize);
/// let src_ptr = src.as_ptr();
///
/// // Truncate `src` without dropping its contents. We do this first,
/// // to avoid problems in case something further down panics.
/// src.set_len(0);
///
/// // The two regions cannot overlap because mutable references do
/// // not alias, and two different vectors cannot own the same
/// // memory.
/// ptr::copy_nonoverlapping(src_ptr, dst_ptr, src_len);
///
/// // Notify `dst` that it now holds the contents of `src`.
/// dst.set_len(dst_len + src_len);
/// }
/// }
///
/// let mut a = vec!['r'];
/// let mut b = vec!['u', 's', 't'];
///
/// append(&mut a, &mut b);
///
/// assert_eq!(a, &['r', 'u', 's', 't']);
/// assert!(b.is_empty());
/// ```
///
/// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append
#[doc(alias = "memcpy")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
#[inline]
pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
extern "rust-intrinsic" {
fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
}
// FIXME: Perform these checks only at run time
/*if cfg!(debug_assertions)
&& !(is_aligned_and_not_null(src)
&& is_aligned_and_not_null(dst)
&& is_nonoverlapping(src, dst, count))
{
// Not panicking to keep codegen impact smaller.
abort();
}*/
// SAFETY: the safety contract for `copy_nonoverlapping` must be
// upheld by the caller.
unsafe { copy_nonoverlapping(src, dst, count) }
}
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
/// and destination may overlap.
///
/// If the source and destination will *never* overlap,
/// [`copy_nonoverlapping`] can be used instead.
///
/// `copy` is semantically equivalent to C's [`memmove`], but with the argument
/// order swapped. Copying takes place as if the bytes were copied from `src`
/// to a temporary array and then copied from the array to `dst`.
///
/// [`memmove`]: https://en.cppreference.com/w/c/string/byte/memmove
///
/// # Safety
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `src` must be [valid] for reads of `count * size_of::<T>()` bytes.
///
/// * `dst` must be [valid] for writes of `count * size_of::<T>()` bytes.
///
/// * Both `src` and `dst` must be properly aligned.
///
/// Like [`read`], `copy` creates a bitwise copy of `T`, regardless of
/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the values
/// in the region beginning at `*src` and the region beginning at `*dst` can
/// [violate memory safety][read-ownership].
///
/// Note that even if the effectively copied size (`count * size_of::<T>()`) is
/// `0`, the pointers must be non-NULL and properly aligned.
///
/// [`read`]: crate::ptr::read
/// [read-ownership]: crate::ptr::read#ownership-of-the-returned-value
/// [valid]: crate::ptr#safety
///
/// # Examples
///
/// Efficiently create a Rust vector from an unsafe buffer:
///
/// ```
/// use std::ptr;
///
/// /// # Safety
/// ///
/// /// * `ptr` must be correctly aligned for its type and non-zero.
/// /// * `ptr` must be valid for reads of `elts` contiguous elements of type `T`.
/// /// * Those elements must not be used after calling this function unless `T: Copy`.
/// # #[allow(dead_code)]
/// unsafe fn from_buf_raw<T>(ptr: *const T, elts: usize) -> Vec<T> {
/// let mut dst = Vec::with_capacity(elts);
///
/// // SAFETY: Our precondition ensures the source is aligned and valid,
/// // and `Vec::with_capacity` ensures that we have usable space to write them.
/// ptr::copy(ptr, dst.as_mut_ptr(), elts);
///
/// // SAFETY: We created it with this much capacity earlier,
/// // and the previous `copy` has initialized these elements.
/// dst.set_len(elts);
/// dst
/// }
/// ```
#[doc(alias = "memmove")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
#[inline]
pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
extern "rust-intrinsic" {
fn copy<T>(src: *const T, dst: *mut T, count: usize);
}
// FIXME: Perform these checks only at run time
/*if cfg!(debug_assertions) && !(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst)) {
// Not panicking to keep codegen impact smaller.
abort();
}*/
// SAFETY: the safety contract for `copy` must be upheld by the caller.
unsafe { copy(src, dst, count) }
}
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
/// `val`.
///
/// `write_bytes` is similar to C's [`memset`], but sets `count *
/// size_of::<T>()` bytes to `val`.
///
/// [`memset`]: https://en.cppreference.com/w/c/string/byte/memset
///
/// # Safety
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `dst` must be [valid] for writes of `count * size_of::<T>()` bytes.
///
/// * `dst` must be properly aligned.
///
/// Additionally, the caller must ensure that writing `count *
/// size_of::<T>()` bytes to the given region of memory results in a valid
/// value of `T`. Using a region of memory typed as a `T` that contains an
/// invalid value of `T` is undefined behavior.
///
/// Note that even if the effectively copied size (`count * size_of::<T>()`) is
/// `0`, the pointer must be non-NULL and properly aligned.
///
/// [valid]: crate::ptr#safety
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// use std::ptr;
///
/// let mut vec = vec![0u32; 4];
/// unsafe {
/// let vec_ptr = vec.as_mut_ptr();
/// ptr::write_bytes(vec_ptr, 0xfe, 2);
/// }
/// assert_eq!(vec, [0xfefefefe, 0xfefefefe, 0, 0]);
/// ```
///
/// Creating an invalid value:
///
/// ```
/// use std::ptr;
///
/// let mut v = Box::new(0i32);
///
/// unsafe {
/// // Leaks the previously held value by overwriting the `Box<T>` with
/// // a null pointer.
/// ptr::write_bytes(&mut v as *mut Box<i32>, 0, 1);
/// }
///
/// // At this point, using or dropping `v` results in undefined behavior.
/// // drop(v); // ERROR
///
/// // Even leaking `v` "uses" it, and hence is undefined behavior.
/// // mem::forget(v); // ERROR
///
/// // In fact, `v` is invalid according to basic type layout invariants, so *any*
/// // operation touching it is undefined behavior.
/// // let v2 = v; // ERROR
///
/// unsafe {
/// // Let us instead put in a valid value
/// ptr::write(&mut v as *mut Box<i32>, Box::new(42i32));
/// }
///
/// // Now the box is fine
/// assert_eq!(*v, 42);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
extern "rust-intrinsic" {
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
}
debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer");
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
unsafe { write_bytes(dst, val, count) }
}
| 45.734875 | 126 | 0.628604 |
8772cb3cd86f8d1276e8b50ef52a7e9a9eda7795 | 36,437 | use ic_registry_transport::{
pb::v1::{
registry_mutation::Type, RegistryAtomicMutateRequest, RegistryDelta, RegistryMutation,
RegistryValue,
},
Error,
};
use crate::{
common::LOG_PREFIX,
pb::v1::{
registry_stable_storage::Version as ReprVersion, ChangelogEntry, RegistryStableStorage,
},
};
use ic_certified_map::RbTree;
use prost::Message;
use std::cmp::max;
use std::collections::{BTreeMap, VecDeque};
use std::fmt;
#[cfg(target_arch = "wasm32")]
use dfn_core::println;
/// The type for the registry map.
///
/// The Deque part is mostly future proofing for when we have garbage collection
/// so that we're able to call pop_front().
pub type RegistryMap = BTreeMap<Vec<u8>, VecDeque<RegistryValue>>;
pub type Version = u64;
#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone, Default)]
pub struct EncodedVersion([u8; 8]);
impl EncodedVersion {
pub const fn as_version(&self) -> Version {
Version::from_be_bytes(self.0)
}
}
impl fmt::Debug for EncodedVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_version())
}
}
impl From<Version> for EncodedVersion {
fn from(v: Version) -> Self {
Self(v.to_be_bytes())
}
}
impl From<EncodedVersion> for Version {
fn from(v: EncodedVersion) -> Self {
v.as_version()
}
}
impl AsRef<[u8]> for EncodedVersion {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
/// The main struct for the Registry.
///
/// The registry is a versioned key value store.
///
/// TODO(NNS1-487): Garbage collection.
#[derive(PartialEq, Default, Clone, Debug)]
pub struct Registry {
/// Global counter that is incremented each time a mutation is applied to
/// the registry. Each set of changes is tagged with this version.
version: Version,
/// Registry contents represented as a versioned key/value store, where
/// value versions are stored in a deque in ascending order (latest version
/// is stored at the back of the deque).
pub(crate) store: RegistryMap,
/// All the mutations applied to the registry.
///
/// We keep them explicitly for certification purposes and as a stable
/// representation that allows us change the index structure in future.
///
/// Each entry contains a blob which is a serialized
/// RegistryAtomicMutateRequest. We keep the serialized version around to
/// make sure that hash trees stay the same even if protobuf schema evolves.
changelog: RbTree<EncodedVersion, Vec<u8>>,
}
impl Registry {
pub fn new() -> Self {
Self::default()
}
/// Returns the deltas applied since `version`, exclusive; optionally
/// limited to the subsequent `max_versions` (i.e. changes applied in
/// versions `(version, version + max_versions]`).
pub fn get_changes_since(
&self,
version: u64,
max_versions: Option<usize>,
) -> Vec<RegistryDelta> {
let max_version = match max_versions {
Some(max_versions) => version.saturating_add(max_versions as u64),
None => std::u64::MAX,
};
self.store
.iter()
// For every key create a delta with values versioned `(version, max_version]`.
.map(|(key, values)| RegistryDelta {
key: key.clone(),
values: values
.iter()
.rev()
.skip_while(|value| value.version > max_version)
.take_while(|value| value.version > version)
.cloned()
.collect(),
})
// Drop empty deltas.
.filter(|delta| !delta.values.is_empty())
.collect()
}
/// Returns the highest version of value such that it is lower than or equal
/// to 'version', or None if it does not exist or if the most recent update
/// whose version is less than or equal to 'version' is a deletion marker.
pub fn get(&self, key: &[u8], version: Version) -> Option<&RegistryValue> {
let value = self
.store
.get(key)?
.iter()
.rev()
// Get the first one versioned at or below `version`.
.find(|value| value.version <= version)?;
if value.deletion_marker {
return None;
}
Some(value)
}
/// Computes the number of deltas with version greater than `since_version`
/// that fit into the specified byte limit.
///
/// This function is used to determine the number of deltas to include into
/// a response to avoid the going beyond the max response size limit.
pub fn count_fitting_deltas(&self, since_version: Version, max_bytes: usize) -> usize {
self.changelog()
.iter()
.skip(since_version as usize)
.scan(0, |size, (key, value)| {
*size += value.len() + key.as_ref().len();
Some(*size)
})
.take_while(|size| *size < max_bytes)
.count()
}
/// Returns the last RegistryValue, if any, for the given key.
///
/// As we keep track of deletions in the registry, this value
/// might be the tombstone, that is, RegistryValue with 'deleted'
/// field equal true, and value being completely bogus. Thus,
/// when calling 'get_last' you must check the 'deleted' marker,
/// otherwise you might deal with garbage.
fn get_last(&self, key: &[u8]) -> Option<&RegistryValue> {
self.store.get(key).and_then(VecDeque::back)
}
/// Increments the latest version of the registry.
fn increment_version(&mut self) -> Version {
self.version += 1;
self.version
}
pub fn latest_version(&self) -> Version {
self.version
}
fn apply_mutations_as_version(
&mut self,
mut mutations: Vec<RegistryMutation>,
version: Version,
) {
// We sort entries by key to eliminate the difference between changelog
// produced by the new version of the registry canister starting from v1
// and the changelog recovered from the stable representation of the
// original version that didn't support certification.
mutations.sort_by(|l, r| l.key.cmp(&r.key));
for m in mutations.iter_mut() {
// We normalize all the INSERT/UPDATE/UPSERT operations to be just
// UPSERTs. This serves 2 purposes:
//
// 1. This significantly simplifies reconstruction of the changelog
// when we deserialize the registry from the original stable
// representation.
//
// 2. This will play nicely with garbage collection: if an old
// INSERT entry is removed, the newly connected clients won't
// fail because of an UPDATE in the first survived entry with the
// same key.
m.mutation_type = match Type::from_i32(m.mutation_type).unwrap() {
Type::Insert | Type::Update | Type::Upsert => Type::Upsert,
Type::Delete => Type::Delete,
} as i32;
}
let req = RegistryAtomicMutateRequest {
mutations,
preconditions: vec![],
};
let bytes = pb_encode(&req);
self.changelog.insert(version.into(), bytes);
for mutation in req.mutations {
(*self.store.entry(mutation.key).or_default()).push_back(RegistryValue {
version,
value: mutation.value,
deletion_marker: mutation.mutation_type == Type::Delete as i32,
});
}
}
/// Applies the given mutations, without any check corresponding
/// to the mutation_type.
///
/// This should be called only after having made sure that all
/// preconditions are satisfied.
fn apply_mutations(&mut self, mutations: Vec<RegistryMutation>) {
if mutations.is_empty() {
// We should not increment the version if there is no
// mutation, so that we keep the invariant that the
// global version is the max of all versions in the store.
return;
}
self.increment_version();
self.apply_mutations_as_version(mutations, self.version);
}
/// Verifies the implicit precondition corresponding to the mutation_type
/// field.
fn verify_mutation_type(&self, mutations: &[RegistryMutation]) -> Vec<Error> {
mutations
.iter()
.map(|m| {
let key = &m.key;
let latest = self
.get_last(key)
.filter(|registry_value| !registry_value.deletion_marker);
match (Type::from_i32(m.mutation_type), latest) {
(None, _) => Err(Error::MalformedMessage(format!(
"Unknown mutation type {} for key {:?}.",
m.mutation_type, m.key
))),
(Some(Type::Insert), None) => Ok(()),
(Some(Type::Insert), Some(_)) => Err(Error::KeyAlreadyPresent(key.to_vec())),
(Some(Type::Update), None) => Err(Error::KeyNotPresent(key.to_vec())),
(Some(Type::Update), Some(_)) => Ok(()),
(Some(Type::Delete), None) => Err(Error::KeyNotPresent(key.to_vec())),
(Some(Type::Delete), Some(_)) => Ok(()),
(Some(Type::Upsert), None) => Ok(()),
(Some(Type::Upsert), Some(_)) => Ok(()),
}
})
.flat_map(Result::err)
.collect()
}
/// Checks that invariants hold after applying mutations
pub fn maybe_apply_mutation_internal(&mut self, mutations: Vec<RegistryMutation>) {
println!(
"{}Received a mutate call containing a list of {} mutations",
LOG_PREFIX,
mutations.len()
);
let errors = self.verify_mutation_type(mutations.as_slice());
if !errors.is_empty() {
panic!(
"{}Transaction rejected because of the following errors: [{}].",
LOG_PREFIX,
errors
.iter()
.map(|e| format!("{}", e))
.collect::<Vec::<String>>()
.join(", ")
);
}
self.check_global_invariants(mutations.as_slice());
self.apply_mutations(mutations);
}
/// Serializes the registry contents using the specified version of stable
/// representation.
fn serializable_form_at(&self, repr_version: ReprVersion) -> RegistryStableStorage {
match repr_version {
ReprVersion::Version1 => RegistryStableStorage {
version: repr_version as i32,
deltas: vec![],
changelog: self
.changelog
.iter()
.map(|(encoded_version, bytes)| ChangelogEntry {
version: encoded_version.as_version(),
encoded_mutation: bytes.clone(),
})
.collect(),
},
ReprVersion::Unspecified => RegistryStableStorage {
version: repr_version as i32,
deltas: self
.store
.iter()
.map(|(key, values)| RegistryDelta {
key: key.clone(),
values: values.iter().cloned().collect(),
})
.collect(),
changelog: vec![],
},
}
}
pub fn serializable_form(&self) -> RegistryStableStorage {
self.serializable_form_at(ReprVersion::Version1)
}
pub fn changelog(&self) -> &RbTree<EncodedVersion, Vec<u8>> {
&self.changelog
}
/// Sets the content of the registry from its serialized representation.
///
/// Panics if not currently empty: this is only meant to be used in
/// canister_post_upgrade.
///
/// In post_upgrade, one should do as much verification as possible, and
/// panic for anything unexpected. Indeed, panicking here keeps the
/// pre-upgrade state unchanged, and gives the developer an opportunity
/// to try upgrading to a different wasm binary. As a corollary, any
/// lossy way of handling unexpected data must be banned in
/// post_upgrade.
pub fn from_serializable_form(&mut self, stable_repr: RegistryStableStorage) {
assert!(self.store.is_empty());
assert!(self.changelog.is_empty());
assert_eq!(self.version, 0);
let repr_version = ReprVersion::from_i32(stable_repr.version).unwrap_or_else(|| {
panic!(
"Version {} of stable registry representation is not supported by this canister",
stable_repr.version
)
});
match repr_version {
ReprVersion::Version1 => {
for entry in stable_repr.changelog {
let req = RegistryAtomicMutateRequest::decode(&entry.encoded_mutation[..])
.unwrap_or_else(|err| {
panic!("Failed to decode mutation@{}: {}", entry.version, err)
});
self.apply_mutations_as_version(req.mutations, entry.version);
self.version = entry.version;
}
}
ReprVersion::Unspecified => {
let mut mutations_by_version = BTreeMap::<Version, Vec<RegistryMutation>>::new();
for delta in stable_repr.deltas.into_iter() {
self.version = max(
self.version,
delta
.values
.last()
.map(|registry_value| registry_value.version)
.unwrap_or(0),
);
for v in delta.values.iter() {
mutations_by_version
.entry(v.version)
.or_default()
.push(RegistryMutation {
mutation_type: if v.deletion_marker {
Type::Delete
} else {
Type::Upsert
} as i32,
key: delta.key.clone(),
value: v.value.clone(),
})
}
self.store.insert(delta.key, VecDeque::from(delta.values));
}
// We iterated over keys in ascending order, so the mutations
// must also be sorted by key, resulting in canonical encoding.
self.changelog = mutations_by_version
.into_iter()
.map(|(v, mutations)| {
(
EncodedVersion::from(v),
pb_encode(&RegistryAtomicMutateRequest {
mutations,
preconditions: vec![],
}),
)
})
.collect()
}
}
}
}
fn pb_encode(msg: &impl prost::Message) -> Vec<u8> {
let mut buf = vec![];
msg.encode(&mut buf).unwrap();
buf
}
#[cfg(test)]
mod tests {
use super::*;
use ic_registry_transport::{delete, insert, update, upsert};
use rand::Rng;
use rand_core::SeedableRng;
use rand_distr::{Alphanumeric, Distribution, Poisson, Uniform};
/// Simulate a round-trip through stable memory, which is an essential part
/// of the upgrade process.
///
/// This should bring back the registry in a state indistinguishable
/// from the one before calling this method.
fn serialize_then_deserialize(registry: Registry) {
let mut serialized_v0 = Vec::new();
registry
.serializable_form_at(ReprVersion::Unspecified)
.encode(&mut serialized_v0)
.expect("Error encoding registry");
let mut serialized_v1 = Vec::new();
registry
.serializable_form_at(ReprVersion::Version1)
.encode(&mut serialized_v1)
.expect("Error encoding registry");
let restore_from_v0 = RegistryStableStorage::decode(serialized_v0.as_slice())
.expect("Error decoding registry");
let mut restored = Registry::new();
restored.from_serializable_form(restore_from_v0);
assert_eq!(restored, registry);
let restore_from_v1 = RegistryStableStorage::decode(serialized_v1.as_slice())
.expect("Error decoding registry");
let mut restored = Registry::new();
restored.from_serializable_form(restore_from_v1);
assert_eq!(restored, registry);
}
fn apply_mutations_skip_invariant_checks(
registry: &mut Registry,
mutations: Vec<RegistryMutation>,
) -> Vec<Error> {
let errors = registry.verify_mutation_type(&mutations);
if errors.is_empty() {
registry.apply_mutations(mutations);
}
errors
}
/// Shorthand for asserting equality with the empty vector.
macro_rules! assert_empty {
($v:expr) => {
assert_eq!($v, vec![])
};
}
#[test]
fn test_get() {
let mut registry = Registry::new();
let key = vec![1, 2, 3, 4];
let value = vec![5, 6, 7, 8];
let value2 = vec![9, 10, 11, 12];
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value)]
));
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![update(&key, &value2)]
));
let result2 = registry.get(&key, registry.latest_version());
assert_eq!(value2, result2.unwrap().value);
assert_eq!(registry.latest_version(), result2.unwrap().version);
let result = registry.get(&key, registry.latest_version() - 1);
assert_eq!(value, result.unwrap().value);
assert_eq!(registry.latest_version() - 1, result.unwrap().version);
serialize_then_deserialize(registry);
}
#[test]
fn test_get_after_delete() {
let mut registry = Registry::new();
let key = vec![1, 2, 3, 4];
let value = vec![5, 6, 7, 8];
let value2 = vec![9, 10, 11, 12];
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value)]
));
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![update(&key, &value2)]
));
let result2 = registry.get(&key, registry.latest_version());
assert_eq!(value2, result2.unwrap().value);
assert_eq!(registry.latest_version(), result2.unwrap().version);
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(&key)]
));
// The definition of get says that we should get None if the last version is has
// a deletion marker set.
let result = registry.get(&key, registry.latest_version());
assert_eq!(None, result);
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value)]
));
let result = registry.get(&key, registry.latest_version());
assert_eq!(value, result.unwrap().value);
assert_eq!(registry.latest_version(), result.unwrap().version);
serialize_then_deserialize(registry);
}
#[test]
fn test_get_changes_since() {
let mut registry = Registry::new();
let key1 = vec![1, 2, 3, 4];
let key2 = vec![5, 6, 7, 8];
let value1 = vec![5, 6, 7, 8];
let value2 = vec![9, 10, 11, 12];
// On the first mutation we insert key1
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key1, &value1)]
));
// On the second mutation we insert key2 an update key 1
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key2, &value1), update(&key1, &value2)],
));
// On the third mutation we update key 2 and delete key one.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(&key1), update(&key2, &value2)],
));
// On the forth mutation we insert key one again
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key1, &value1)]
));
// Fetching all the mutations since 0 should get
// a total of 2 keys:
// key 1 with three values (@1 value1, @2 value2, @3 delete, @4 value1)
// key 2 with three values (@2 value1, @3 value2)
let deltas = registry.get_changes_since(0, None);
// Assert that we got the right thing, and test a few values
assert_eq!(deltas.len(), 2);
let key1_values = &deltas.get(0).unwrap().values;
let key2_values = &deltas.get(1).unwrap().values;
assert_eq!(key1_values.len(), 4);
assert_eq!(key2_values.len(), 2);
assert_eq!(key1_values[0].value, value1);
assert_eq!(key1_values[0].version, 4);
assert!(key1_values[1].deletion_marker);
assert_eq!(key1_values[1].version, 3);
assert_eq!(deltas, registry.get_changes_since(0, Some(4)));
assert_eq!(deltas, registry.get_changes_since(0, Some(9)));
// Fetch all mutations for 2 versions after version 1 (i.e. versions 2 and 3).
let deltas = registry.get_changes_since(1, Some(2));
// Assert that we got the right thing, and test the values.
assert_eq!(deltas.len(), 2);
let key1_values = &deltas.get(0).unwrap().values;
let key2_values = &deltas.get(1).unwrap().values;
assert_eq!(key1_values.len(), 2);
assert_eq!(key2_values.len(), 2);
assert!(key1_values[0].deletion_marker);
assert_eq!(key1_values[0].version, 3);
assert_eq!(key1_values[1].value, value2);
assert_eq!(key1_values[1].version, 2);
assert_eq!(key2_values[0].value, value2);
assert_eq!(key2_values[0].version, 3);
assert_eq!(key2_values[1].value, value1);
assert_eq!(key2_values[1].version, 2);
// Now try getting a couple of other versions
// Version 4 should be empty (versions to get changes from are exclusive)
let deltas = registry.get_changes_since(4, None);
assert_eq!(deltas.len(), 0);
// Changes since version 3 for should include key 1
let deltas = registry.get_changes_since(3, None);
assert_eq!(deltas.len(), 1);
// Changes since version 2 for should include both keys
let deltas = registry.get_changes_since(2, None);
assert_eq!(deltas.len(), 2);
serialize_then_deserialize(registry);
}
#[test]
fn test_insert() {
let mut registry = Registry::new();
let key = vec![1, 2, 3, 4];
let value = vec![5, 6, 7, 8];
let value2 = vec![9, 10, 11, 12];
// Inserting a non-existing key should succeed.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value)]
));
// Inserting an existing (non-deleted) key should fail.
assert_eq!(
registry.verify_mutation_type(&[insert(&key, &value)]),
vec![Error::KeyAlreadyPresent(key.clone())]
);
// After deleting the key, it should be possible to insert
// it again.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(&key)]
));
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value2)]
));
serialize_then_deserialize(registry);
}
#[test]
fn test_update() {
let mut registry = Registry::new();
let key = vec![1, 2, 3, 4];
// Updating without the key existing should fail.
let value = vec![5, 6, 7, 8];
let value2 = vec![9, 10, 11, 12];
assert_eq!(
registry.verify_mutation_type(&[update(&key, &value)]),
vec![Error::KeyNotPresent(key.clone())]
);
// After a regular insert the update should succeed.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value)]
));
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![update(&key, &value2)]
));
let result = registry.get(&key, registry.latest_version());
assert_eq!(value2, result.unwrap().value);
// After the key is deleted the update should fail.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(&key)]
));
assert_eq!(
apply_mutations_skip_invariant_checks(&mut registry, vec![update(&key, &value)]),
vec![Error::KeyNotPresent(key)]
);
serialize_then_deserialize(registry);
}
#[test]
fn test_count_fitting_deltas() {
let mut registry = Registry::new();
let mutation1 = upsert(&[90; 50], &[1; 50]);
let mutation2 = upsert(&[90; 100], &[1; 100]);
let mutation3 = upsert(&[89; 200], &[1; 200]);
for mutation in [&mutation1, &mutation2, &mutation3] {
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![mutation.clone()]
));
}
assert_eq!(registry.count_fitting_deltas(0, 100), 0);
assert_eq!(registry.count_fitting_deltas(0, 150), 1);
assert_eq!(registry.count_fitting_deltas(0, 400), 2);
assert_eq!(registry.count_fitting_deltas(0, 2000000), 3);
assert_eq!(registry.count_fitting_deltas(1, 150), 0);
assert_eq!(registry.count_fitting_deltas(1, 400), 1);
assert_eq!(registry.count_fitting_deltas(1, 2000000), 2);
assert_eq!(registry.count_fitting_deltas(2, 300), 0);
assert_eq!(registry.count_fitting_deltas(2, 1000), 1);
assert_eq!(registry.count_fitting_deltas(3, 2000000), 0);
assert_eq!(registry.count_fitting_deltas(4, 2000000), 0);
}
#[test]
fn test_upsert() {
let mut registry = Registry::new();
let key = vec![1, 2, 3, 4];
// Upserting without the key existing should succeed.
let value = vec![5, 6, 7, 8];
let value2 = vec![9, 10, 11, 12];
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![upsert(&key, &value)]
));
let result = registry.get(&key, registry.latest_version());
assert_eq!(value, result.unwrap().value);
// Afterwards, another upsert should update the value
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![upsert(&key, &value2)]
));
let result = registry.get(&key, registry.latest_version());
assert_eq!(value2, result.unwrap().value);
// After the key is deleted the upsert should succeed.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(&key)]
));
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![upsert(&key, &value)]
));
let result = registry.get(&key, registry.latest_version());
assert_eq!(value, result.unwrap().value);
serialize_then_deserialize(registry);
}
#[test]
fn test_delete() {
let mut registry = Registry::new();
let key = vec![1, 2, 3, 4];
let value = vec![5, 6, 7, 8];
// Deleting a non-existing key should fail.
assert_eq!(
apply_mutations_skip_invariant_checks(&mut registry, vec![delete(&key)]),
vec![Error::KeyNotPresent(key.clone())]
);
// After inserting the key, delete should succeed.
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(&key, &value)]
));
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(&key)]
));
// After a key has been deleted delete should fail.
assert_eq!(
apply_mutations_skip_invariant_checks(&mut registry, vec![delete(&key)]),
vec![Error::KeyNotPresent(key)]
);
serialize_then_deserialize(registry);
}
#[test]
fn test_transactional_behavior() {
let mut registry = Registry::new();
// Single mutation, all good
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(b"shakira", b"colombia")]
));
// Two mutations, all good
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(b"rihanna", b"barbados"), insert(b"m.i.a", b"uk")],
));
// two insertions, but the second one is already present -- should do nothing
assert_eq!(
apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(b"beyonce", b"us"), insert(b"m.i.a", b"sri lanka")]
),
vec![Error::KeyAlreadyPresent(b"m.i.a".to_vec())]
);
// We should still be at version 2, since the last transaction returned an error
assert_eq!(registry.latest_version(), 2);
assert_eq!(registry.get(b"shakira", 2).unwrap().value, b"colombia");
assert_eq!(registry.get(b"rihanna", 2).unwrap().value, b"barbados");
assert_eq!(registry.get(b"m.i.a", 2).unwrap().value, b"uk");
assert_eq!(registry.get(b"beyonce", 2), None);
}
#[test]
fn test_transactional_behavior_with_deletes() {
let mut registry = Registry::new();
// Single mutation, all good
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(b"shakira", b"colombia")]
));
// Two mutations, all good
apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(b"rihanna", b"barbados"), insert(b"m.i.a", b"uk")],
);
// two insertions, but the second one is already present -- should do nothing
assert_empty!(apply_mutations_skip_invariant_checks(
&mut registry,
vec![delete(b"rihanna")]
));
// We should be at version 3
assert_eq!(registry.latest_version(), 3);
assert_eq!(registry.get(b"rihanna", 3), None);
}
/// A generator of byte vectors where the length is
/// geometrically-distributed and the content uniformly distributed.
///
/// This is in a sense the most natural distribution over byte vectors:
/// since the length is unbounded, the length should be the most natural
/// distribution over all non-negative integers that has a finite mean:
/// the geometric one is perfect.
///
/// Then, give the length, the uniform distribution is the most natural
/// choice for the content.
struct RandomByteVectorGenerator {
mean_length: f32,
}
impl rand_distr::Distribution<Vec<u8>> for RandomByteVectorGenerator {
fn sample<R>(&self, rng: &mut R) -> Vec<u8>
where
R: Rng + ?Sized,
{
let mut val = Vec::<u8>::new();
let p = 1.0 / (self.mean_length + 1.0);
while rng.gen::<f32>() > p {
val.push(rng.gen());
}
val
}
}
fn average<Iter: Iterator<Item = usize>>(iter: Iter) -> f32 {
let sum_and_count = iter.fold((0, 0), |(sum, n), item| (sum + item, n + 1));
sum_and_count.0 as f32 / sum_and_count.1 as f32
}
#[test]
fn test_serialize_deserialize_with_random_content_10_keys() {
let registry = initialize_random_registry(1, 10, 25.0, 300);
serialize_then_deserialize(registry)
}
#[test]
fn test_serialize_deserialize_with_random_content_100_keys() {
let registry = initialize_random_registry(2, 100, 8.0, 2000);
serialize_then_deserialize(registry)
}
#[test]
fn test_serialize_deserialize_with_random_content_1000_keys() {
let registry = initialize_random_registry(3, 1000, 13.0, 1500);
serialize_then_deserialize(registry)
}
#[allow(unused_must_use)] // Required because insertion errors are ignored.
fn initialize_random_registry(
seed: u64,
num_keys: usize,
mean_value_length: f32,
num_updates: usize,
) -> Registry {
let mut rng = rand::rngs::SmallRng::seed_from_u64(seed);
// First generate a bunch of keys
// How long should they be, roughly?
// We're not trying to prevent collisions here -- we just want enough diversity.
// A Poisson distribution plus an offset of one byte sounds pretty natural.
let mean_key_length_in_bytes = (num_keys as f64).log2() / 8.0;
let key_length_generator = Poisson::new(mean_key_length_in_bytes).unwrap();
// In theory the registry allows keys to be arbitrary collections of
// bytes. In practice the replica expects keys to only ever be String.
let keys: Vec<String> = (1..num_keys)
.map(|_| {
let len = key_length_generator.sample(&mut rng) as usize;
(&mut rng)
.sample_iter(Alphanumeric)
.take(len)
.map(char::from)
.collect()
})
.collect();
// First let's insert them all to avoid having to deal with insert v. update
let mut registry = Registry::new();
let gen = RandomByteVectorGenerator {
mean_length: mean_value_length,
};
for k in &keys {
apply_mutations_skip_invariant_checks(
&mut registry,
vec![insert(k.as_bytes(), &gen.sample(&mut rng))],
);
}
// Now let's do some mutations.
// Each mutation will be on a random key, so that not all keys have the same
// number of RegistryValues.
let key_index_distr = Uniform::new(0, keys.len());
for _ in 0..num_updates {
apply_mutations_skip_invariant_checks(
&mut registry,
vec![update(
&keys[key_index_distr.sample(&mut rng)],
&gen.sample(&mut rng),
)],
);
}
// Let's print out some stats to make sure we have the diversity we want
let changes = registry.get_changes_since(0, None);
let num_registry_values: usize = changes.iter().map(|delta| delta.values.len()).sum();
eprintln!(
"\
Populated a registry with random content.
Number of keys: {} (desired: {})
Average key length: {},
Number of RegistryValues: {},
Average number of RegistryValue per key: {},
Average length of the values: {} (desired: {})",
changes.len(),
num_keys,
average(changes.iter().map(|delta| delta.key.len())),
num_registry_values,
num_registry_values as f32 / changes.len() as f32,
average(
changes
.iter()
.map(|delta| delta.values.iter())
.flatten()
.map(|registry_value| registry_value.value.len())
),
mean_value_length
);
registry
}
}
| 37.994786 | 97 | 0.568186 |
034fc5b6ebc2969d5b8d9145fa1aad7221716b29 | 2,538 | extern crate serde_json;
use self::serde_json::Value;
use std::str::FromStr;
use curl::easy::Easy;
use config::Config;
use inoreader::News;
pub struct TelegramBotClient {
client: Easy,
endpoint: String
}
impl TelegramBotClient {
pub fn new(conf: &Config) -> TelegramBotClient {
TelegramBotClient {
client: Easy::new(),
endpoint: conf.get_telegram_bot_endpoint()
}
}
pub fn get_chart_ids(&mut self) -> Vec<i64> {
// getUpdates получает все сообщения которые были отправлены боту на текущий момент
let response = self.get("/getUpdates").unwrap();
// из этих сообщений получем id чатов для рассылки
let messages = response["result"].as_array().unwrap();
let mut chart_ids: Vec<i64> = Vec::new();
for m in messages {
chart_ids.push(
m["message"]["chat"]["id"].as_i64().unwrap()
)
}
chart_ids
}
pub fn send_message(&mut self, news: News, chat_id: i64) {
let msg_url = format!(
"/sendMessage?chat_id={}&text={}&parse_mode=Markdown",
chat_id,
news.to_markdown(),
);
match self.get(&msg_url) {
Ok(_) => println!("News success send"),
Err(e) => println!("Can't send news: {:?}", e),
};
}
fn get(&mut self, endpoint: &str) -> Result<Value, String> {
let mut ep = self.endpoint.to_owned();
let mut response = Vec::new();
ep.push_str(endpoint);
&self.client.url(&ep).unwrap();
// вынесено в для освобождения заимствования после получения ответа
{
let mut transfer = &mut self.client.transfer();
transfer.write_function(|data| {
response.extend_from_slice(data);
Ok(data.len())
}).unwrap();
transfer.perform().unwrap();
}
let response_code = self.client.response_code().unwrap();
match response_code {
200 => Ok(self.parse_response(response)),
_ => Err(self.get_error_msg(response)),
}
}
fn parse_response(&self, response: Vec<u8>) -> Value {
let resp = String::from_utf8(response).unwrap();
serde_json::from_str(&resp).unwrap_or_default()
}
fn get_error_msg(&self, response: Vec<u8>) -> String {
let resp = &self.parse_response(response);
String::from_str(resp["description"].as_str().unwrap_or_default()).unwrap_or_default()
}
}
| 30.578313 | 94 | 0.573286 |
147805543fb8bf23c0deea8f12a670b27b8f68d2 | 7,601 | #![allow(unused_imports, non_camel_case_types)]
use crate::models::r5::Element::Element;
use crate::models::r5::Extension::Extension;
use serde_json::json;
use serde_json::value::Value;
use std::borrow::Cow;
/// A Capability Statement documents a set of capabilities (behaviors) of a FHIR
/// Server for a particular version of FHIR that may be used as a statement of actual
/// server functionality or a statement of required or desired server implementation.
#[derive(Debug)]
pub struct CapabilityStatement_Interaction1<'a> {
pub(crate) value: Cow<'a, Value>,
}
impl CapabilityStatement_Interaction1<'_> {
pub fn new(value: &Value) -> CapabilityStatement_Interaction1 {
CapabilityStatement_Interaction1 {
value: Cow::Borrowed(value),
}
}
pub fn to_json(&self) -> Value {
(*self.value).clone()
}
/// Extensions for code
pub fn _code(&self) -> Option<Element> {
if let Some(val) = self.value.get("_code") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// Extensions for documentation
pub fn _documentation(&self) -> Option<Element> {
if let Some(val) = self.value.get("_documentation") {
return Some(Element {
value: Cow::Borrowed(val),
});
}
return None;
}
/// A coded identifier of the operation, supported by the system.
pub fn code(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("code") {
return Some(string);
}
return None;
}
/// Guidance specific to the implementation of this operation, such as limitations
/// on the kind of transactions allowed, or information about system wide search is
/// implemented.
pub fn documentation(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("documentation") {
return Some(string);
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element. To make the use of extensions safe and manageable,
/// there is a strict set of governance applied to the definition and use of
/// extensions. Though any implementer can define an extension, there is a set of
/// requirements that SHALL be met as part of the definition of the extension.
pub fn extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("extension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
/// Unique id for the element within a resource (for internal references). This may be
/// any string value that does not contain spaces.
pub fn id(&self) -> Option<&str> {
if let Some(Value::String(string)) = self.value.get("id") {
return Some(string);
}
return None;
}
/// May be used to represent additional information that is not part of the basic
/// definition of the element and that modifies the understanding of the element
/// in which it is contained and/or the understanding of the containing element's
/// descendants. Usually modifier elements provide negation or qualification. To make
/// the use of extensions safe and manageable, there is a strict set of governance
/// applied to the definition and use of extensions. Though any implementer can define
/// an extension, there is a set of requirements that SHALL be met as part of the
/// definition of the extension. Applications processing a resource are required to
/// check for modifier extensions. Modifier extensions SHALL NOT change the meaning
/// of any elements on Resource or DomainResource (including cannot change the meaning
/// of modifierExtension itself).
pub fn modifier_extension(&self) -> Option<Vec<Extension>> {
if let Some(Value::Array(val)) = self.value.get("modifierExtension") {
return Some(
val.into_iter()
.map(|e| Extension {
value: Cow::Borrowed(e),
})
.collect::<Vec<_>>(),
);
}
return None;
}
pub fn validate(&self) -> bool {
if let Some(_val) = self._code() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self._documentation() {
if !_val.validate() {
return false;
}
}
if let Some(_val) = self.code() {}
if let Some(_val) = self.documentation() {}
if let Some(_val) = self.extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
if let Some(_val) = self.id() {}
if let Some(_val) = self.modifier_extension() {
if !_val.into_iter().map(|e| e.validate()).all(|x| x == true) {
return false;
}
}
return true;
}
}
#[derive(Debug)]
pub struct CapabilityStatement_Interaction1Builder {
pub(crate) value: Value,
}
impl CapabilityStatement_Interaction1Builder {
pub fn build(&self) -> CapabilityStatement_Interaction1 {
CapabilityStatement_Interaction1 {
value: Cow::Owned(self.value.clone()),
}
}
pub fn with(
existing: CapabilityStatement_Interaction1,
) -> CapabilityStatement_Interaction1Builder {
CapabilityStatement_Interaction1Builder {
value: (*existing.value).clone(),
}
}
pub fn new() -> CapabilityStatement_Interaction1Builder {
let mut __value: Value = json!({});
return CapabilityStatement_Interaction1Builder { value: __value };
}
pub fn _code<'a>(
&'a mut self,
val: Element,
) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["_code"] = json!(val.value);
return self;
}
pub fn _documentation<'a>(
&'a mut self,
val: Element,
) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["_documentation"] = json!(val.value);
return self;
}
pub fn code<'a>(&'a mut self, val: &str) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["code"] = json!(val);
return self;
}
pub fn documentation<'a>(
&'a mut self,
val: &str,
) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["documentation"] = json!(val);
return self;
}
pub fn extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["extension"] = json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
pub fn id<'a>(&'a mut self, val: &str) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["id"] = json!(val);
return self;
}
pub fn modifier_extension<'a>(
&'a mut self,
val: Vec<Extension>,
) -> &'a mut CapabilityStatement_Interaction1Builder {
self.value["modifierExtension"] =
json!(val.into_iter().map(|e| e.value).collect::<Vec<_>>());
return self;
}
}
| 34.238739 | 97 | 0.589791 |
21e35957796eeedcb6e82719f4bf9375796c833e | 2,192 | /*
* Copyright (c) 2016-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
//! Application clipboard utility functions.
use gdk::Display;
use gtk::{Clipboard, ClipboardExt};
use url::Url;
use app::App;
impl App {
/// Get the URL from the clipboard if there is one.
/// If there are no URLs in the clipboard, this will show errors.
pub fn get_url_from_clipboard(&self) -> Option<String> {
let clipboard = Display::get_default()
.and_then(|display| Clipboard::get_default(&display));
if let Some(clipboard) = clipboard {
let mut urls = clipboard.wait_for_uris();
let url = urls.pop()
.or_else(|| {
let text = clipboard.wait_for_text();
text.and_then(|text| Url::parse(&text).ok().map(|_| text))
});
if let Some(url) = url {
return Some(url);
}
else {
self.error("No URLs in the clipboard");
}
}
else {
self.error("Cannot get the system clipboard");
}
None
}
}
| 39.142857 | 83 | 0.649179 |
335521d1c7f148b8603930eb37736f79f246a5c1 | 5,898 | /*
* Copyright 2018-2021 TON DEV SOLUTIONS LTD.
*
* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use
* this file except in compliance with the License.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific TON DEV software governing permissions and
* limitations under the License.
*/
use crate::client::ClientContext;
use crate::crypto;
use crate::crypto::internal::{
decode_public_key, decode_secret_key, key256, sign_using_keys, ton_crc16,
};
use crate::encoding::{base64_decode, hex_decode};
use crate::error::ClientResult;
use base64::URL_SAFE;
use ed25519_dalek::Keypair;
use std::fmt::{Debug, Formatter};
pub(crate) fn strip_secret(secret: &str) -> String {
const SECRET_SHOW_LEN: usize = 8;
if secret.len() <= SECRET_SHOW_LEN {
return format!(r#""{}""#, secret);
}
format!(
r#""{}..." ({} chars)"#,
&secret[..SECRET_SHOW_LEN],
secret.len(),
)
}
//----------------------------------------------------------------------------------------- KeyPair
///
#[derive(Serialize, Deserialize, Clone, ApiType, Default, PartialEq)]
pub struct KeyPair {
/// Public key - 64 symbols hex string
pub public: String,
/// Private key - u64 symbols hex string
pub secret: String,
}
impl KeyPair {
pub fn new(public: String, secret: String) -> KeyPair {
KeyPair { public, secret }
}
pub fn decode(&self) -> ClientResult<Keypair> {
Ok(Keypair {
public: decode_public_key(&self.public)?,
secret: decode_secret_key(&self.secret)?,
})
}
}
impl Debug for KeyPair {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, r#"KeyPair {{ public: "{}", secret: {} }}"#, self.public, strip_secret(&self.secret))
}
}
//----------------------------------------------------------- convert_public_key_to_ton_safe_format
///
#[derive(Serialize, Deserialize, ApiType, Default)]
pub struct ParamsOfConvertPublicKeyToTonSafeFormat {
/// Public key - 64 symbols hex string
pub public_key: String,
}
#[derive(Serialize, Deserialize, ApiType, Default)]
pub struct ResultOfConvertPublicKeyToTonSafeFormat {
/// Public key represented in TON safe format.
pub ton_public_key: String,
}
/// Converts public key to ton safe_format
#[api_function]
pub fn convert_public_key_to_ton_safe_format(
_context: std::sync::Arc<ClientContext>,
params: ParamsOfConvertPublicKeyToTonSafeFormat,
) -> ClientResult<ResultOfConvertPublicKeyToTonSafeFormat> {
let public_key = hex_decode(¶ms.public_key)?;
let mut ton_public_key: Vec<u8> = Vec::new();
ton_public_key.push(0x3e);
ton_public_key.push(0xe6);
ton_public_key.extend_from_slice(&public_key);
let hash = ton_crc16(&ton_public_key);
ton_public_key.push((hash >> 8) as u8);
ton_public_key.push((hash & 255) as u8);
Ok(ResultOfConvertPublicKeyToTonSafeFormat {
ton_public_key: base64::encode_config(&ton_public_key, URL_SAFE),
})
}
//----------------------------------------------------------------------- generate_random_sign_keys
/// Generates random ed25519 key pair.
#[api_function]
pub fn generate_random_sign_keys(_context: std::sync::Arc<ClientContext>) -> ClientResult<KeyPair> {
let mut rng = rand::thread_rng();
let keypair = ed25519_dalek::Keypair::generate(&mut rng);
Ok(KeyPair::new(
hex::encode(keypair.public.to_bytes()),
hex::encode(keypair.secret.to_bytes()),
))
}
//-------------------------------------------------------------------------------------------- sign
///
#[derive(Serialize, Deserialize, ApiType, Default)]
pub struct ParamsOfSign {
/// Data that must be signed encoded in `base64`.
pub unsigned: String,
/// Sign keys.
pub keys: KeyPair,
}
#[derive(Serialize, Deserialize, ApiType, Default)]
pub struct ResultOfSign {
/// Signed data combined with signature encoded in `base64`.
pub signed: String,
/// Signature encoded in `hex`.
pub signature: String,
}
/// Signs a data using the provided keys.
#[api_function]
pub fn sign(
_context: std::sync::Arc<ClientContext>,
params: ParamsOfSign,
) -> ClientResult<ResultOfSign> {
let (signed, signature) =
sign_using_keys(&base64_decode(¶ms.unsigned)?, ¶ms.keys.decode()?)?;
Ok(ResultOfSign {
signed: base64::encode(&signed),
signature: hex::encode(signature),
})
}
//-------------------------------------------------------------------------------- verify_signature
///
#[derive(Serialize, Deserialize, ApiType, Default)]
pub struct ParamsOfVerifySignature {
/// Signed data that must be verified encoded in `base64`.
pub signed: String,
/// Signer's public key - 64 symbols hex string
pub public: String,
}
#[derive(Serialize, Deserialize, ApiType, Default)]
pub struct ResultOfVerifySignature {
/// Unsigned data encoded in `base64`.
pub unsigned: String,
}
/// Verifies signed data using the provided public key.
/// Raises error if verification is failed.
#[api_function]
pub fn verify_signature(
_context: std::sync::Arc<ClientContext>,
params: ParamsOfVerifySignature,
) -> ClientResult<ResultOfVerifySignature> {
let mut unsigned: Vec<u8> = Vec::new();
let signed = base64_decode(¶ms.signed)?;
unsigned.resize(signed.len(), 0);
let len = sodalite::sign_attached_open(
&mut unsigned,
&signed,
&key256(&hex_decode(¶ms.public)?)?,
)
.map_err(|_| crypto::Error::nacl_sign_failed("verify signature failed"))?;
unsigned.resize(len, 0);
Ok(ResultOfVerifySignature {
unsigned: base64::encode(&unsigned),
})
}
| 32.054348 | 103 | 0.639708 |
5d33bcfac08ed3520d28bdc49bb8bf959eaecf2e | 122,614 | // This file is generated by rust-protobuf 2.25.1. Do not edit
// @generated
// https://github.com/rust-lang/rust-clippy/issues/702
#![allow(unknown_lints)]
#![allow(clippy::all)]
#![allow(unused_attributes)]
#![cfg_attr(rustfmt, rustfmt::skip)]
#![allow(box_pointers)]
#![allow(dead_code)]
#![allow(missing_docs)]
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
#![allow(non_upper_case_globals)]
#![allow(trivial_casts)]
#![allow(unused_imports)]
#![allow(unused_results)]
//! Generated file from `google/api/servicemanagement/v1/resources.proto`
/// Generated files are compatible only with the same version
/// of protobuf runtime.
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_25_1;
#[derive(PartialEq,Clone,Default)]
pub struct ManagedService {
// message fields
pub service_name: ::std::string::String,
pub producer_project_id: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ManagedService {
fn default() -> &'a ManagedService {
<ManagedService as ::protobuf::Message>::default_instance()
}
}
impl ManagedService {
pub fn new() -> ManagedService {
::std::default::Default::default()
}
// string service_name = 2;
pub fn get_service_name(&self) -> &str {
&self.service_name
}
pub fn clear_service_name(&mut self) {
self.service_name.clear();
}
// Param is passed by value, moved
pub fn set_service_name(&mut self, v: ::std::string::String) {
self.service_name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_service_name(&mut self) -> &mut ::std::string::String {
&mut self.service_name
}
// Take field
pub fn take_service_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.service_name, ::std::string::String::new())
}
// string producer_project_id = 3;
pub fn get_producer_project_id(&self) -> &str {
&self.producer_project_id
}
pub fn clear_producer_project_id(&mut self) {
self.producer_project_id.clear();
}
// Param is passed by value, moved
pub fn set_producer_project_id(&mut self, v: ::std::string::String) {
self.producer_project_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_producer_project_id(&mut self) -> &mut ::std::string::String {
&mut self.producer_project_id
}
// Take field
pub fn take_producer_project_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.producer_project_id, ::std::string::String::new())
}
}
impl ::protobuf::Message for ManagedService {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.service_name)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.producer_project_id)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.service_name.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.service_name);
}
if !self.producer_project_id.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.producer_project_id);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.service_name.is_empty() {
os.write_string(2, &self.service_name)?;
}
if !self.producer_project_id.is_empty() {
os.write_string(3, &self.producer_project_id)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ManagedService {
ManagedService::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"service_name",
|m: &ManagedService| { &m.service_name },
|m: &mut ManagedService| { &mut m.service_name },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"producer_project_id",
|m: &ManagedService| { &m.producer_project_id },
|m: &mut ManagedService| { &mut m.producer_project_id },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ManagedService>(
"ManagedService",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ManagedService {
static instance: ::protobuf::rt::LazyV2<ManagedService> = ::protobuf::rt::LazyV2::INIT;
instance.get(ManagedService::new)
}
}
impl ::protobuf::Clear for ManagedService {
fn clear(&mut self) {
self.service_name.clear();
self.producer_project_id.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ManagedService {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ManagedService {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct OperationMetadata {
// message fields
pub resource_names: ::protobuf::RepeatedField<::std::string::String>,
pub steps: ::protobuf::RepeatedField<OperationMetadata_Step>,
pub progress_percentage: i32,
pub start_time: ::protobuf::SingularPtrField<::protobuf::well_known_types::Timestamp>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a OperationMetadata {
fn default() -> &'a OperationMetadata {
<OperationMetadata as ::protobuf::Message>::default_instance()
}
}
impl OperationMetadata {
pub fn new() -> OperationMetadata {
::std::default::Default::default()
}
// repeated string resource_names = 1;
pub fn get_resource_names(&self) -> &[::std::string::String] {
&self.resource_names
}
pub fn clear_resource_names(&mut self) {
self.resource_names.clear();
}
// Param is passed by value, moved
pub fn set_resource_names(&mut self, v: ::protobuf::RepeatedField<::std::string::String>) {
self.resource_names = v;
}
// Mutable pointer to the field.
pub fn mut_resource_names(&mut self) -> &mut ::protobuf::RepeatedField<::std::string::String> {
&mut self.resource_names
}
// Take field
pub fn take_resource_names(&mut self) -> ::protobuf::RepeatedField<::std::string::String> {
::std::mem::replace(&mut self.resource_names, ::protobuf::RepeatedField::new())
}
// repeated .google.api.servicemanagement.v1.OperationMetadata.Step steps = 2;
pub fn get_steps(&self) -> &[OperationMetadata_Step] {
&self.steps
}
pub fn clear_steps(&mut self) {
self.steps.clear();
}
// Param is passed by value, moved
pub fn set_steps(&mut self, v: ::protobuf::RepeatedField<OperationMetadata_Step>) {
self.steps = v;
}
// Mutable pointer to the field.
pub fn mut_steps(&mut self) -> &mut ::protobuf::RepeatedField<OperationMetadata_Step> {
&mut self.steps
}
// Take field
pub fn take_steps(&mut self) -> ::protobuf::RepeatedField<OperationMetadata_Step> {
::std::mem::replace(&mut self.steps, ::protobuf::RepeatedField::new())
}
// int32 progress_percentage = 3;
pub fn get_progress_percentage(&self) -> i32 {
self.progress_percentage
}
pub fn clear_progress_percentage(&mut self) {
self.progress_percentage = 0;
}
// Param is passed by value, moved
pub fn set_progress_percentage(&mut self, v: i32) {
self.progress_percentage = v;
}
// .google.protobuf.Timestamp start_time = 4;
pub fn get_start_time(&self) -> &::protobuf::well_known_types::Timestamp {
self.start_time.as_ref().unwrap_or_else(|| <::protobuf::well_known_types::Timestamp as ::protobuf::Message>::default_instance())
}
pub fn clear_start_time(&mut self) {
self.start_time.clear();
}
pub fn has_start_time(&self) -> bool {
self.start_time.is_some()
}
// Param is passed by value, moved
pub fn set_start_time(&mut self, v: ::protobuf::well_known_types::Timestamp) {
self.start_time = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_start_time(&mut self) -> &mut ::protobuf::well_known_types::Timestamp {
if self.start_time.is_none() {
self.start_time.set_default();
}
self.start_time.as_mut().unwrap()
}
// Take field
pub fn take_start_time(&mut self) -> ::protobuf::well_known_types::Timestamp {
self.start_time.take().unwrap_or_else(|| ::protobuf::well_known_types::Timestamp::new())
}
}
impl ::protobuf::Message for OperationMetadata {
fn is_initialized(&self) -> bool {
for v in &self.steps {
if !v.is_initialized() {
return false;
}
};
for v in &self.start_time {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_string_into(wire_type, is, &mut self.resource_names)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.steps)?;
},
3 => {
if wire_type != ::protobuf::wire_format::WireTypeVarint {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
let tmp = is.read_int32()?;
self.progress_percentage = tmp;
},
4 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.start_time)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.resource_names {
my_size += ::protobuf::rt::string_size(1, &value);
};
for value in &self.steps {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
if self.progress_percentage != 0 {
my_size += ::protobuf::rt::value_size(3, self.progress_percentage, ::protobuf::wire_format::WireTypeVarint);
}
if let Some(ref v) = self.start_time.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.resource_names {
os.write_string(1, &v)?;
};
for v in &self.steps {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
if self.progress_percentage != 0 {
os.write_int32(3, self.progress_percentage)?;
}
if let Some(ref v) = self.start_time.as_ref() {
os.write_tag(4, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> OperationMetadata {
OperationMetadata::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"resource_names",
|m: &OperationMetadata| { &m.resource_names },
|m: &mut OperationMetadata| { &mut m.resource_names },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<OperationMetadata_Step>>(
"steps",
|m: &OperationMetadata| { &m.steps },
|m: &mut OperationMetadata| { &mut m.steps },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt32>(
"progress_percentage",
|m: &OperationMetadata| { &m.progress_percentage },
|m: &mut OperationMetadata| { &mut m.progress_percentage },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<::protobuf::well_known_types::Timestamp>>(
"start_time",
|m: &OperationMetadata| { &m.start_time },
|m: &mut OperationMetadata| { &mut m.start_time },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<OperationMetadata>(
"OperationMetadata",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static OperationMetadata {
static instance: ::protobuf::rt::LazyV2<OperationMetadata> = ::protobuf::rt::LazyV2::INIT;
instance.get(OperationMetadata::new)
}
}
impl ::protobuf::Clear for OperationMetadata {
fn clear(&mut self) {
self.resource_names.clear();
self.steps.clear();
self.progress_percentage = 0;
self.start_time.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for OperationMetadata {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for OperationMetadata {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct OperationMetadata_Step {
// message fields
pub description: ::std::string::String,
pub status: OperationMetadata_Status,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a OperationMetadata_Step {
fn default() -> &'a OperationMetadata_Step {
<OperationMetadata_Step as ::protobuf::Message>::default_instance()
}
}
impl OperationMetadata_Step {
pub fn new() -> OperationMetadata_Step {
::std::default::Default::default()
}
// string description = 2;
pub fn get_description(&self) -> &str {
&self.description
}
pub fn clear_description(&mut self) {
self.description.clear();
}
// Param is passed by value, moved
pub fn set_description(&mut self, v: ::std::string::String) {
self.description = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_description(&mut self) -> &mut ::std::string::String {
&mut self.description
}
// Take field
pub fn take_description(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.description, ::std::string::String::new())
}
// .google.api.servicemanagement.v1.OperationMetadata.Status status = 4;
pub fn get_status(&self) -> OperationMetadata_Status {
self.status
}
pub fn clear_status(&mut self) {
self.status = OperationMetadata_Status::STATUS_UNSPECIFIED;
}
// Param is passed by value, moved
pub fn set_status(&mut self, v: OperationMetadata_Status) {
self.status = v;
}
}
impl ::protobuf::Message for OperationMetadata_Step {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
2 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.description)?;
},
4 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.status, 4, &mut self.unknown_fields)?
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.description.is_empty() {
my_size += ::protobuf::rt::string_size(2, &self.description);
}
if self.status != OperationMetadata_Status::STATUS_UNSPECIFIED {
my_size += ::protobuf::rt::enum_size(4, self.status);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.description.is_empty() {
os.write_string(2, &self.description)?;
}
if self.status != OperationMetadata_Status::STATUS_UNSPECIFIED {
os.write_enum(4, ::protobuf::ProtobufEnum::value(&self.status))?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> OperationMetadata_Step {
OperationMetadata_Step::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"description",
|m: &OperationMetadata_Step| { &m.description },
|m: &mut OperationMetadata_Step| { &mut m.description },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<OperationMetadata_Status>>(
"status",
|m: &OperationMetadata_Step| { &m.status },
|m: &mut OperationMetadata_Step| { &mut m.status },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<OperationMetadata_Step>(
"OperationMetadata.Step",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static OperationMetadata_Step {
static instance: ::protobuf::rt::LazyV2<OperationMetadata_Step> = ::protobuf::rt::LazyV2::INIT;
instance.get(OperationMetadata_Step::new)
}
}
impl ::protobuf::Clear for OperationMetadata_Step {
fn clear(&mut self) {
self.description.clear();
self.status = OperationMetadata_Status::STATUS_UNSPECIFIED;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for OperationMetadata_Step {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for OperationMetadata_Step {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum OperationMetadata_Status {
STATUS_UNSPECIFIED = 0,
DONE = 1,
NOT_STARTED = 2,
IN_PROGRESS = 3,
FAILED = 4,
CANCELLED = 5,
}
impl ::protobuf::ProtobufEnum for OperationMetadata_Status {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<OperationMetadata_Status> {
match value {
0 => ::std::option::Option::Some(OperationMetadata_Status::STATUS_UNSPECIFIED),
1 => ::std::option::Option::Some(OperationMetadata_Status::DONE),
2 => ::std::option::Option::Some(OperationMetadata_Status::NOT_STARTED),
3 => ::std::option::Option::Some(OperationMetadata_Status::IN_PROGRESS),
4 => ::std::option::Option::Some(OperationMetadata_Status::FAILED),
5 => ::std::option::Option::Some(OperationMetadata_Status::CANCELLED),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [OperationMetadata_Status] = &[
OperationMetadata_Status::STATUS_UNSPECIFIED,
OperationMetadata_Status::DONE,
OperationMetadata_Status::NOT_STARTED,
OperationMetadata_Status::IN_PROGRESS,
OperationMetadata_Status::FAILED,
OperationMetadata_Status::CANCELLED,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<OperationMetadata_Status>("OperationMetadata.Status", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for OperationMetadata_Status {
}
impl ::std::default::Default for OperationMetadata_Status {
fn default() -> Self {
OperationMetadata_Status::STATUS_UNSPECIFIED
}
}
impl ::protobuf::reflect::ProtobufValue for OperationMetadata_Status {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Diagnostic {
// message fields
pub location: ::std::string::String,
pub kind: Diagnostic_Kind,
pub message: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Diagnostic {
fn default() -> &'a Diagnostic {
<Diagnostic as ::protobuf::Message>::default_instance()
}
}
impl Diagnostic {
pub fn new() -> Diagnostic {
::std::default::Default::default()
}
// string location = 1;
pub fn get_location(&self) -> &str {
&self.location
}
pub fn clear_location(&mut self) {
self.location.clear();
}
// Param is passed by value, moved
pub fn set_location(&mut self, v: ::std::string::String) {
self.location = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_location(&mut self) -> &mut ::std::string::String {
&mut self.location
}
// Take field
pub fn take_location(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.location, ::std::string::String::new())
}
// .google.api.servicemanagement.v1.Diagnostic.Kind kind = 2;
pub fn get_kind(&self) -> Diagnostic_Kind {
self.kind
}
pub fn clear_kind(&mut self) {
self.kind = Diagnostic_Kind::WARNING;
}
// Param is passed by value, moved
pub fn set_kind(&mut self, v: Diagnostic_Kind) {
self.kind = v;
}
// string message = 3;
pub fn get_message(&self) -> &str {
&self.message
}
pub fn clear_message(&mut self) {
self.message.clear();
}
// Param is passed by value, moved
pub fn set_message(&mut self, v: ::std::string::String) {
self.message = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_message(&mut self) -> &mut ::std::string::String {
&mut self.message
}
// Take field
pub fn take_message(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.message, ::std::string::String::new())
}
}
impl ::protobuf::Message for Diagnostic {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.location)?;
},
2 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.kind, 2, &mut self.unknown_fields)?
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.message)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.location.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.location);
}
if self.kind != Diagnostic_Kind::WARNING {
my_size += ::protobuf::rt::enum_size(2, self.kind);
}
if !self.message.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.message);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.location.is_empty() {
os.write_string(1, &self.location)?;
}
if self.kind != Diagnostic_Kind::WARNING {
os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.kind))?;
}
if !self.message.is_empty() {
os.write_string(3, &self.message)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Diagnostic {
Diagnostic::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"location",
|m: &Diagnostic| { &m.location },
|m: &mut Diagnostic| { &mut m.location },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<Diagnostic_Kind>>(
"kind",
|m: &Diagnostic| { &m.kind },
|m: &mut Diagnostic| { &mut m.kind },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"message",
|m: &Diagnostic| { &m.message },
|m: &mut Diagnostic| { &mut m.message },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Diagnostic>(
"Diagnostic",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Diagnostic {
static instance: ::protobuf::rt::LazyV2<Diagnostic> = ::protobuf::rt::LazyV2::INIT;
instance.get(Diagnostic::new)
}
}
impl ::protobuf::Clear for Diagnostic {
fn clear(&mut self) {
self.location.clear();
self.kind = Diagnostic_Kind::WARNING;
self.message.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Diagnostic {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Diagnostic {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum Diagnostic_Kind {
WARNING = 0,
ERROR = 1,
}
impl ::protobuf::ProtobufEnum for Diagnostic_Kind {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<Diagnostic_Kind> {
match value {
0 => ::std::option::Option::Some(Diagnostic_Kind::WARNING),
1 => ::std::option::Option::Some(Diagnostic_Kind::ERROR),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [Diagnostic_Kind] = &[
Diagnostic_Kind::WARNING,
Diagnostic_Kind::ERROR,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<Diagnostic_Kind>("Diagnostic.Kind", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for Diagnostic_Kind {
}
impl ::std::default::Default for Diagnostic_Kind {
fn default() -> Self {
Diagnostic_Kind::WARNING
}
}
impl ::protobuf::reflect::ProtobufValue for Diagnostic_Kind {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ConfigSource {
// message fields
pub id: ::std::string::String,
pub files: ::protobuf::RepeatedField<ConfigFile>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ConfigSource {
fn default() -> &'a ConfigSource {
<ConfigSource as ::protobuf::Message>::default_instance()
}
}
impl ConfigSource {
pub fn new() -> ConfigSource {
::std::default::Default::default()
}
// string id = 5;
pub fn get_id(&self) -> &str {
&self.id
}
pub fn clear_id(&mut self) {
self.id.clear();
}
// Param is passed by value, moved
pub fn set_id(&mut self, v: ::std::string::String) {
self.id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_id(&mut self) -> &mut ::std::string::String {
&mut self.id
}
// Take field
pub fn take_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.id, ::std::string::String::new())
}
// repeated .google.api.servicemanagement.v1.ConfigFile files = 2;
pub fn get_files(&self) -> &[ConfigFile] {
&self.files
}
pub fn clear_files(&mut self) {
self.files.clear();
}
// Param is passed by value, moved
pub fn set_files(&mut self, v: ::protobuf::RepeatedField<ConfigFile>) {
self.files = v;
}
// Mutable pointer to the field.
pub fn mut_files(&mut self) -> &mut ::protobuf::RepeatedField<ConfigFile> {
&mut self.files
}
// Take field
pub fn take_files(&mut self) -> ::protobuf::RepeatedField<ConfigFile> {
::std::mem::replace(&mut self.files, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for ConfigSource {
fn is_initialized(&self) -> bool {
for v in &self.files {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
5 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.id)?;
},
2 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.files)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.id.is_empty() {
my_size += ::protobuf::rt::string_size(5, &self.id);
}
for value in &self.files {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.id.is_empty() {
os.write_string(5, &self.id)?;
}
for v in &self.files {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ConfigSource {
ConfigSource::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"id",
|m: &ConfigSource| { &m.id },
|m: &mut ConfigSource| { &mut m.id },
));
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<ConfigFile>>(
"files",
|m: &ConfigSource| { &m.files },
|m: &mut ConfigSource| { &mut m.files },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ConfigSource>(
"ConfigSource",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ConfigSource {
static instance: ::protobuf::rt::LazyV2<ConfigSource> = ::protobuf::rt::LazyV2::INIT;
instance.get(ConfigSource::new)
}
}
impl ::protobuf::Clear for ConfigSource {
fn clear(&mut self) {
self.id.clear();
self.files.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ConfigSource {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ConfigSource {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ConfigFile {
// message fields
pub file_path: ::std::string::String,
pub file_contents: ::std::vec::Vec<u8>,
pub file_type: ConfigFile_FileType,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ConfigFile {
fn default() -> &'a ConfigFile {
<ConfigFile as ::protobuf::Message>::default_instance()
}
}
impl ConfigFile {
pub fn new() -> ConfigFile {
::std::default::Default::default()
}
// string file_path = 1;
pub fn get_file_path(&self) -> &str {
&self.file_path
}
pub fn clear_file_path(&mut self) {
self.file_path.clear();
}
// Param is passed by value, moved
pub fn set_file_path(&mut self, v: ::std::string::String) {
self.file_path = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_file_path(&mut self) -> &mut ::std::string::String {
&mut self.file_path
}
// Take field
pub fn take_file_path(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.file_path, ::std::string::String::new())
}
// bytes file_contents = 3;
pub fn get_file_contents(&self) -> &[u8] {
&self.file_contents
}
pub fn clear_file_contents(&mut self) {
self.file_contents.clear();
}
// Param is passed by value, moved
pub fn set_file_contents(&mut self, v: ::std::vec::Vec<u8>) {
self.file_contents = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_file_contents(&mut self) -> &mut ::std::vec::Vec<u8> {
&mut self.file_contents
}
// Take field
pub fn take_file_contents(&mut self) -> ::std::vec::Vec<u8> {
::std::mem::replace(&mut self.file_contents, ::std::vec::Vec::new())
}
// .google.api.servicemanagement.v1.ConfigFile.FileType file_type = 4;
pub fn get_file_type(&self) -> ConfigFile_FileType {
self.file_type
}
pub fn clear_file_type(&mut self) {
self.file_type = ConfigFile_FileType::FILE_TYPE_UNSPECIFIED;
}
// Param is passed by value, moved
pub fn set_file_type(&mut self, v: ConfigFile_FileType) {
self.file_type = v;
}
}
impl ::protobuf::Message for ConfigFile {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.file_path)?;
},
3 => {
::protobuf::rt::read_singular_proto3_bytes_into(wire_type, is, &mut self.file_contents)?;
},
4 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.file_type, 4, &mut self.unknown_fields)?
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.file_path.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.file_path);
}
if !self.file_contents.is_empty() {
my_size += ::protobuf::rt::bytes_size(3, &self.file_contents);
}
if self.file_type != ConfigFile_FileType::FILE_TYPE_UNSPECIFIED {
my_size += ::protobuf::rt::enum_size(4, self.file_type);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.file_path.is_empty() {
os.write_string(1, &self.file_path)?;
}
if !self.file_contents.is_empty() {
os.write_bytes(3, &self.file_contents)?;
}
if self.file_type != ConfigFile_FileType::FILE_TYPE_UNSPECIFIED {
os.write_enum(4, ::protobuf::ProtobufEnum::value(&self.file_type))?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ConfigFile {
ConfigFile::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"file_path",
|m: &ConfigFile| { &m.file_path },
|m: &mut ConfigFile| { &mut m.file_path },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeBytes>(
"file_contents",
|m: &ConfigFile| { &m.file_contents },
|m: &mut ConfigFile| { &mut m.file_contents },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ConfigFile_FileType>>(
"file_type",
|m: &ConfigFile| { &m.file_type },
|m: &mut ConfigFile| { &mut m.file_type },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ConfigFile>(
"ConfigFile",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ConfigFile {
static instance: ::protobuf::rt::LazyV2<ConfigFile> = ::protobuf::rt::LazyV2::INIT;
instance.get(ConfigFile::new)
}
}
impl ::protobuf::Clear for ConfigFile {
fn clear(&mut self) {
self.file_path.clear();
self.file_contents.clear();
self.file_type = ConfigFile_FileType::FILE_TYPE_UNSPECIFIED;
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ConfigFile {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ConfigFile {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum ConfigFile_FileType {
FILE_TYPE_UNSPECIFIED = 0,
SERVICE_CONFIG_YAML = 1,
OPEN_API_JSON = 2,
OPEN_API_YAML = 3,
FILE_DESCRIPTOR_SET_PROTO = 4,
PROTO_FILE = 6,
}
impl ::protobuf::ProtobufEnum for ConfigFile_FileType {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<ConfigFile_FileType> {
match value {
0 => ::std::option::Option::Some(ConfigFile_FileType::FILE_TYPE_UNSPECIFIED),
1 => ::std::option::Option::Some(ConfigFile_FileType::SERVICE_CONFIG_YAML),
2 => ::std::option::Option::Some(ConfigFile_FileType::OPEN_API_JSON),
3 => ::std::option::Option::Some(ConfigFile_FileType::OPEN_API_YAML),
4 => ::std::option::Option::Some(ConfigFile_FileType::FILE_DESCRIPTOR_SET_PROTO),
6 => ::std::option::Option::Some(ConfigFile_FileType::PROTO_FILE),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [ConfigFile_FileType] = &[
ConfigFile_FileType::FILE_TYPE_UNSPECIFIED,
ConfigFile_FileType::SERVICE_CONFIG_YAML,
ConfigFile_FileType::OPEN_API_JSON,
ConfigFile_FileType::OPEN_API_YAML,
ConfigFile_FileType::FILE_DESCRIPTOR_SET_PROTO,
ConfigFile_FileType::PROTO_FILE,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<ConfigFile_FileType>("ConfigFile.FileType", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for ConfigFile_FileType {
}
impl ::std::default::Default for ConfigFile_FileType {
fn default() -> Self {
ConfigFile_FileType::FILE_TYPE_UNSPECIFIED
}
}
impl ::protobuf::reflect::ProtobufValue for ConfigFile_FileType {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ConfigRef {
// message fields
pub name: ::std::string::String,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ConfigRef {
fn default() -> &'a ConfigRef {
<ConfigRef as ::protobuf::Message>::default_instance()
}
}
impl ConfigRef {
pub fn new() -> ConfigRef {
::std::default::Default::default()
}
// string name = 1;
pub fn get_name(&self) -> &str {
&self.name
}
pub fn clear_name(&mut self) {
self.name.clear();
}
// Param is passed by value, moved
pub fn set_name(&mut self, v: ::std::string::String) {
self.name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_name(&mut self) -> &mut ::std::string::String {
&mut self.name
}
// Take field
pub fn take_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.name, ::std::string::String::new())
}
}
impl ::protobuf::Message for ConfigRef {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.name)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.name.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.name);
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.name.is_empty() {
os.write_string(1, &self.name)?;
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ConfigRef {
ConfigRef::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"name",
|m: &ConfigRef| { &m.name },
|m: &mut ConfigRef| { &mut m.name },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ConfigRef>(
"ConfigRef",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ConfigRef {
static instance: ::protobuf::rt::LazyV2<ConfigRef> = ::protobuf::rt::LazyV2::INIT;
instance.get(ConfigRef::new)
}
}
impl ::protobuf::Clear for ConfigRef {
fn clear(&mut self) {
self.name.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ConfigRef {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ConfigRef {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct ChangeReport {
// message fields
pub config_changes: ::protobuf::RepeatedField<super::config_change::ConfigChange>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a ChangeReport {
fn default() -> &'a ChangeReport {
<ChangeReport as ::protobuf::Message>::default_instance()
}
}
impl ChangeReport {
pub fn new() -> ChangeReport {
::std::default::Default::default()
}
// repeated .google.api.ConfigChange config_changes = 1;
pub fn get_config_changes(&self) -> &[super::config_change::ConfigChange] {
&self.config_changes
}
pub fn clear_config_changes(&mut self) {
self.config_changes.clear();
}
// Param is passed by value, moved
pub fn set_config_changes(&mut self, v: ::protobuf::RepeatedField<super::config_change::ConfigChange>) {
self.config_changes = v;
}
// Mutable pointer to the field.
pub fn mut_config_changes(&mut self) -> &mut ::protobuf::RepeatedField<super::config_change::ConfigChange> {
&mut self.config_changes
}
// Take field
pub fn take_config_changes(&mut self) -> ::protobuf::RepeatedField<super::config_change::ConfigChange> {
::std::mem::replace(&mut self.config_changes, ::protobuf::RepeatedField::new())
}
}
impl ::protobuf::Message for ChangeReport {
fn is_initialized(&self) -> bool {
for v in &self.config_changes {
if !v.is_initialized() {
return false;
}
};
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_repeated_message_into(wire_type, is, &mut self.config_changes)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
for value in &self.config_changes {
let len = value.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
};
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
for v in &self.config_changes {
os.write_tag(1, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
};
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> ChangeReport {
ChangeReport::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_repeated_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::config_change::ConfigChange>>(
"config_changes",
|m: &ChangeReport| { &m.config_changes },
|m: &mut ChangeReport| { &mut m.config_changes },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<ChangeReport>(
"ChangeReport",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static ChangeReport {
static instance: ::protobuf::rt::LazyV2<ChangeReport> = ::protobuf::rt::LazyV2::INIT;
instance.get(ChangeReport::new)
}
}
impl ::protobuf::Clear for ChangeReport {
fn clear(&mut self) {
self.config_changes.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for ChangeReport {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for ChangeReport {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Rollout {
// message fields
pub rollout_id: ::std::string::String,
pub create_time: ::protobuf::SingularPtrField<::protobuf::well_known_types::Timestamp>,
pub created_by: ::std::string::String,
pub status: Rollout_RolloutStatus,
pub service_name: ::std::string::String,
// message oneof groups
pub strategy: ::std::option::Option<Rollout_oneof_strategy>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Rollout {
fn default() -> &'a Rollout {
<Rollout as ::protobuf::Message>::default_instance()
}
}
#[derive(Clone,PartialEq,Debug)]
pub enum Rollout_oneof_strategy {
traffic_percent_strategy(Rollout_TrafficPercentStrategy),
delete_service_strategy(Rollout_DeleteServiceStrategy),
}
impl Rollout {
pub fn new() -> Rollout {
::std::default::Default::default()
}
// string rollout_id = 1;
pub fn get_rollout_id(&self) -> &str {
&self.rollout_id
}
pub fn clear_rollout_id(&mut self) {
self.rollout_id.clear();
}
// Param is passed by value, moved
pub fn set_rollout_id(&mut self, v: ::std::string::String) {
self.rollout_id = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_rollout_id(&mut self) -> &mut ::std::string::String {
&mut self.rollout_id
}
// Take field
pub fn take_rollout_id(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.rollout_id, ::std::string::String::new())
}
// .google.protobuf.Timestamp create_time = 2;
pub fn get_create_time(&self) -> &::protobuf::well_known_types::Timestamp {
self.create_time.as_ref().unwrap_or_else(|| <::protobuf::well_known_types::Timestamp as ::protobuf::Message>::default_instance())
}
pub fn clear_create_time(&mut self) {
self.create_time.clear();
}
pub fn has_create_time(&self) -> bool {
self.create_time.is_some()
}
// Param is passed by value, moved
pub fn set_create_time(&mut self, v: ::protobuf::well_known_types::Timestamp) {
self.create_time = ::protobuf::SingularPtrField::some(v);
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_create_time(&mut self) -> &mut ::protobuf::well_known_types::Timestamp {
if self.create_time.is_none() {
self.create_time.set_default();
}
self.create_time.as_mut().unwrap()
}
// Take field
pub fn take_create_time(&mut self) -> ::protobuf::well_known_types::Timestamp {
self.create_time.take().unwrap_or_else(|| ::protobuf::well_known_types::Timestamp::new())
}
// string created_by = 3;
pub fn get_created_by(&self) -> &str {
&self.created_by
}
pub fn clear_created_by(&mut self) {
self.created_by.clear();
}
// Param is passed by value, moved
pub fn set_created_by(&mut self, v: ::std::string::String) {
self.created_by = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_created_by(&mut self) -> &mut ::std::string::String {
&mut self.created_by
}
// Take field
pub fn take_created_by(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.created_by, ::std::string::String::new())
}
// .google.api.servicemanagement.v1.Rollout.RolloutStatus status = 4;
pub fn get_status(&self) -> Rollout_RolloutStatus {
self.status
}
pub fn clear_status(&mut self) {
self.status = Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED;
}
// Param is passed by value, moved
pub fn set_status(&mut self, v: Rollout_RolloutStatus) {
self.status = v;
}
// .google.api.servicemanagement.v1.Rollout.TrafficPercentStrategy traffic_percent_strategy = 5;
pub fn get_traffic_percent_strategy(&self) -> &Rollout_TrafficPercentStrategy {
match self.strategy {
::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(ref v)) => v,
_ => <Rollout_TrafficPercentStrategy as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_traffic_percent_strategy(&mut self) {
self.strategy = ::std::option::Option::None;
}
pub fn has_traffic_percent_strategy(&self) -> bool {
match self.strategy {
::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_traffic_percent_strategy(&mut self, v: Rollout_TrafficPercentStrategy) {
self.strategy = ::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(v))
}
// Mutable pointer to the field.
pub fn mut_traffic_percent_strategy(&mut self) -> &mut Rollout_TrafficPercentStrategy {
if let ::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(_)) = self.strategy {
} else {
self.strategy = ::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(Rollout_TrafficPercentStrategy::new()));
}
match self.strategy {
::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_traffic_percent_strategy(&mut self) -> Rollout_TrafficPercentStrategy {
if self.has_traffic_percent_strategy() {
match self.strategy.take() {
::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(v)) => v,
_ => panic!(),
}
} else {
Rollout_TrafficPercentStrategy::new()
}
}
// .google.api.servicemanagement.v1.Rollout.DeleteServiceStrategy delete_service_strategy = 200;
pub fn get_delete_service_strategy(&self) -> &Rollout_DeleteServiceStrategy {
match self.strategy {
::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(ref v)) => v,
_ => <Rollout_DeleteServiceStrategy as ::protobuf::Message>::default_instance(),
}
}
pub fn clear_delete_service_strategy(&mut self) {
self.strategy = ::std::option::Option::None;
}
pub fn has_delete_service_strategy(&self) -> bool {
match self.strategy {
::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(..)) => true,
_ => false,
}
}
// Param is passed by value, moved
pub fn set_delete_service_strategy(&mut self, v: Rollout_DeleteServiceStrategy) {
self.strategy = ::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(v))
}
// Mutable pointer to the field.
pub fn mut_delete_service_strategy(&mut self) -> &mut Rollout_DeleteServiceStrategy {
if let ::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(_)) = self.strategy {
} else {
self.strategy = ::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(Rollout_DeleteServiceStrategy::new()));
}
match self.strategy {
::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(ref mut v)) => v,
_ => panic!(),
}
}
// Take field
pub fn take_delete_service_strategy(&mut self) -> Rollout_DeleteServiceStrategy {
if self.has_delete_service_strategy() {
match self.strategy.take() {
::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(v)) => v,
_ => panic!(),
}
} else {
Rollout_DeleteServiceStrategy::new()
}
}
// string service_name = 8;
pub fn get_service_name(&self) -> &str {
&self.service_name
}
pub fn clear_service_name(&mut self) {
self.service_name.clear();
}
// Param is passed by value, moved
pub fn set_service_name(&mut self, v: ::std::string::String) {
self.service_name = v;
}
// Mutable pointer to the field.
// If field is not initialized, it is initialized with default value first.
pub fn mut_service_name(&mut self) -> &mut ::std::string::String {
&mut self.service_name
}
// Take field
pub fn take_service_name(&mut self) -> ::std::string::String {
::std::mem::replace(&mut self.service_name, ::std::string::String::new())
}
}
impl ::protobuf::Message for Rollout {
fn is_initialized(&self) -> bool {
for v in &self.create_time {
if !v.is_initialized() {
return false;
}
};
if let Some(Rollout_oneof_strategy::traffic_percent_strategy(ref v)) = self.strategy {
if !v.is_initialized() {
return false;
}
}
if let Some(Rollout_oneof_strategy::delete_service_strategy(ref v)) = self.strategy {
if !v.is_initialized() {
return false;
}
}
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.rollout_id)?;
},
2 => {
::protobuf::rt::read_singular_message_into(wire_type, is, &mut self.create_time)?;
},
3 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.created_by)?;
},
4 => {
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.status, 4, &mut self.unknown_fields)?
},
5 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.strategy = ::std::option::Option::Some(Rollout_oneof_strategy::traffic_percent_strategy(is.read_message()?));
},
200 => {
if wire_type != ::protobuf::wire_format::WireTypeLengthDelimited {
return ::std::result::Result::Err(::protobuf::rt::unexpected_wire_type(wire_type));
}
self.strategy = ::std::option::Option::Some(Rollout_oneof_strategy::delete_service_strategy(is.read_message()?));
},
8 => {
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.service_name)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
if !self.rollout_id.is_empty() {
my_size += ::protobuf::rt::string_size(1, &self.rollout_id);
}
if let Some(ref v) = self.create_time.as_ref() {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
}
if !self.created_by.is_empty() {
my_size += ::protobuf::rt::string_size(3, &self.created_by);
}
if self.status != Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED {
my_size += ::protobuf::rt::enum_size(4, self.status);
}
if !self.service_name.is_empty() {
my_size += ::protobuf::rt::string_size(8, &self.service_name);
}
if let ::std::option::Option::Some(ref v) = self.strategy {
match v {
&Rollout_oneof_strategy::traffic_percent_strategy(ref v) => {
let len = v.compute_size();
my_size += 1 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
&Rollout_oneof_strategy::delete_service_strategy(ref v) => {
let len = v.compute_size();
my_size += 2 + ::protobuf::rt::compute_raw_varint32_size(len) + len;
},
};
}
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
if !self.rollout_id.is_empty() {
os.write_string(1, &self.rollout_id)?;
}
if let Some(ref v) = self.create_time.as_ref() {
os.write_tag(2, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
}
if !self.created_by.is_empty() {
os.write_string(3, &self.created_by)?;
}
if self.status != Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED {
os.write_enum(4, ::protobuf::ProtobufEnum::value(&self.status))?;
}
if !self.service_name.is_empty() {
os.write_string(8, &self.service_name)?;
}
if let ::std::option::Option::Some(ref v) = self.strategy {
match v {
&Rollout_oneof_strategy::traffic_percent_strategy(ref v) => {
os.write_tag(5, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
&Rollout_oneof_strategy::delete_service_strategy(ref v) => {
os.write_tag(200, ::protobuf::wire_format::WireTypeLengthDelimited)?;
os.write_raw_varint32(v.get_cached_size())?;
v.write_to_with_cached_sizes(os)?;
},
};
}
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Rollout {
Rollout::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"rollout_id",
|m: &Rollout| { &m.rollout_id },
|m: &mut Rollout| { &mut m.rollout_id },
));
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<::protobuf::well_known_types::Timestamp>>(
"create_time",
|m: &Rollout| { &m.create_time },
|m: &mut Rollout| { &mut m.create_time },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"created_by",
|m: &Rollout| { &m.created_by },
|m: &mut Rollout| { &mut m.created_by },
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<Rollout_RolloutStatus>>(
"status",
|m: &Rollout| { &m.status },
|m: &mut Rollout| { &mut m.status },
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, Rollout_TrafficPercentStrategy>(
"traffic_percent_strategy",
Rollout::has_traffic_percent_strategy,
Rollout::get_traffic_percent_strategy,
));
fields.push(::protobuf::reflect::accessor::make_singular_message_accessor::<_, Rollout_DeleteServiceStrategy>(
"delete_service_strategy",
Rollout::has_delete_service_strategy,
Rollout::get_delete_service_strategy,
));
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
"service_name",
|m: &Rollout| { &m.service_name },
|m: &mut Rollout| { &mut m.service_name },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Rollout>(
"Rollout",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Rollout {
static instance: ::protobuf::rt::LazyV2<Rollout> = ::protobuf::rt::LazyV2::INIT;
instance.get(Rollout::new)
}
}
impl ::protobuf::Clear for Rollout {
fn clear(&mut self) {
self.rollout_id.clear();
self.create_time.clear();
self.created_by.clear();
self.status = Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED;
self.strategy = ::std::option::Option::None;
self.strategy = ::std::option::Option::None;
self.service_name.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Rollout {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Rollout {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Rollout_TrafficPercentStrategy {
// message fields
pub percentages: ::std::collections::HashMap<::std::string::String, f64>,
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Rollout_TrafficPercentStrategy {
fn default() -> &'a Rollout_TrafficPercentStrategy {
<Rollout_TrafficPercentStrategy as ::protobuf::Message>::default_instance()
}
}
impl Rollout_TrafficPercentStrategy {
pub fn new() -> Rollout_TrafficPercentStrategy {
::std::default::Default::default()
}
// repeated .google.api.servicemanagement.v1.Rollout.TrafficPercentStrategy.PercentagesEntry percentages = 1;
pub fn get_percentages(&self) -> &::std::collections::HashMap<::std::string::String, f64> {
&self.percentages
}
pub fn clear_percentages(&mut self) {
self.percentages.clear();
}
// Param is passed by value, moved
pub fn set_percentages(&mut self, v: ::std::collections::HashMap<::std::string::String, f64>) {
self.percentages = v;
}
// Mutable pointer to the field.
pub fn mut_percentages(&mut self) -> &mut ::std::collections::HashMap<::std::string::String, f64> {
&mut self.percentages
}
// Take field
pub fn take_percentages(&mut self) -> ::std::collections::HashMap<::std::string::String, f64> {
::std::mem::replace(&mut self.percentages, ::std::collections::HashMap::new())
}
}
impl ::protobuf::Message for Rollout_TrafficPercentStrategy {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
1 => {
::protobuf::rt::read_map_into::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeDouble>(wire_type, is, &mut self.percentages)?;
},
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::compute_map_size::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeDouble>(1, &self.percentages);
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
::protobuf::rt::write_map_with_cached_sizes::<::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeDouble>(1, &self.percentages, os)?;
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Rollout_TrafficPercentStrategy {
Rollout_TrafficPercentStrategy::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let mut fields = ::std::vec::Vec::new();
fields.push(::protobuf::reflect::accessor::make_map_accessor::<_, ::protobuf::types::ProtobufTypeString, ::protobuf::types::ProtobufTypeDouble>(
"percentages",
|m: &Rollout_TrafficPercentStrategy| { &m.percentages },
|m: &mut Rollout_TrafficPercentStrategy| { &mut m.percentages },
));
::protobuf::reflect::MessageDescriptor::new_pb_name::<Rollout_TrafficPercentStrategy>(
"Rollout.TrafficPercentStrategy",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Rollout_TrafficPercentStrategy {
static instance: ::protobuf::rt::LazyV2<Rollout_TrafficPercentStrategy> = ::protobuf::rt::LazyV2::INIT;
instance.get(Rollout_TrafficPercentStrategy::new)
}
}
impl ::protobuf::Clear for Rollout_TrafficPercentStrategy {
fn clear(&mut self) {
self.percentages.clear();
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Rollout_TrafficPercentStrategy {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Rollout_TrafficPercentStrategy {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(PartialEq,Clone,Default)]
pub struct Rollout_DeleteServiceStrategy {
// special fields
pub unknown_fields: ::protobuf::UnknownFields,
pub cached_size: ::protobuf::CachedSize,
}
impl<'a> ::std::default::Default for &'a Rollout_DeleteServiceStrategy {
fn default() -> &'a Rollout_DeleteServiceStrategy {
<Rollout_DeleteServiceStrategy as ::protobuf::Message>::default_instance()
}
}
impl Rollout_DeleteServiceStrategy {
pub fn new() -> Rollout_DeleteServiceStrategy {
::std::default::Default::default()
}
}
impl ::protobuf::Message for Rollout_DeleteServiceStrategy {
fn is_initialized(&self) -> bool {
true
}
fn merge_from(&mut self, is: &mut ::protobuf::CodedInputStream<'_>) -> ::protobuf::ProtobufResult<()> {
while !is.eof()? {
let (field_number, wire_type) = is.read_tag_unpack()?;
match field_number {
_ => {
::protobuf::rt::read_unknown_or_skip_group(field_number, wire_type, is, self.mut_unknown_fields())?;
},
};
}
::std::result::Result::Ok(())
}
// Compute sizes of nested messages
#[allow(unused_variables)]
fn compute_size(&self) -> u32 {
let mut my_size = 0;
my_size += ::protobuf::rt::unknown_fields_size(self.get_unknown_fields());
self.cached_size.set(my_size);
my_size
}
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
os.write_unknown_fields(self.get_unknown_fields())?;
::std::result::Result::Ok(())
}
fn get_cached_size(&self) -> u32 {
self.cached_size.get()
}
fn get_unknown_fields(&self) -> &::protobuf::UnknownFields {
&self.unknown_fields
}
fn mut_unknown_fields(&mut self) -> &mut ::protobuf::UnknownFields {
&mut self.unknown_fields
}
fn as_any(&self) -> &dyn (::std::any::Any) {
self as &dyn (::std::any::Any)
}
fn as_any_mut(&mut self) -> &mut dyn (::std::any::Any) {
self as &mut dyn (::std::any::Any)
}
fn into_any(self: ::std::boxed::Box<Self>) -> ::std::boxed::Box<dyn (::std::any::Any)> {
self
}
fn descriptor(&self) -> &'static ::protobuf::reflect::MessageDescriptor {
Self::descriptor_static()
}
fn new() -> Rollout_DeleteServiceStrategy {
Rollout_DeleteServiceStrategy::new()
}
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::MessageDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
let fields = ::std::vec::Vec::new();
::protobuf::reflect::MessageDescriptor::new_pb_name::<Rollout_DeleteServiceStrategy>(
"Rollout.DeleteServiceStrategy",
fields,
file_descriptor_proto()
)
})
}
fn default_instance() -> &'static Rollout_DeleteServiceStrategy {
static instance: ::protobuf::rt::LazyV2<Rollout_DeleteServiceStrategy> = ::protobuf::rt::LazyV2::INIT;
instance.get(Rollout_DeleteServiceStrategy::new)
}
}
impl ::protobuf::Clear for Rollout_DeleteServiceStrategy {
fn clear(&mut self) {
self.unknown_fields.clear();
}
}
impl ::std::fmt::Debug for Rollout_DeleteServiceStrategy {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
::protobuf::text_format::fmt(self, f)
}
}
impl ::protobuf::reflect::ProtobufValue for Rollout_DeleteServiceStrategy {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Message(self)
}
}
#[derive(Clone,PartialEq,Eq,Debug,Hash)]
pub enum Rollout_RolloutStatus {
ROLLOUT_STATUS_UNSPECIFIED = 0,
IN_PROGRESS = 1,
SUCCESS = 2,
CANCELLED = 3,
FAILED = 4,
PENDING = 5,
FAILED_ROLLED_BACK = 6,
}
impl ::protobuf::ProtobufEnum for Rollout_RolloutStatus {
fn value(&self) -> i32 {
*self as i32
}
fn from_i32(value: i32) -> ::std::option::Option<Rollout_RolloutStatus> {
match value {
0 => ::std::option::Option::Some(Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED),
1 => ::std::option::Option::Some(Rollout_RolloutStatus::IN_PROGRESS),
2 => ::std::option::Option::Some(Rollout_RolloutStatus::SUCCESS),
3 => ::std::option::Option::Some(Rollout_RolloutStatus::CANCELLED),
4 => ::std::option::Option::Some(Rollout_RolloutStatus::FAILED),
5 => ::std::option::Option::Some(Rollout_RolloutStatus::PENDING),
6 => ::std::option::Option::Some(Rollout_RolloutStatus::FAILED_ROLLED_BACK),
_ => ::std::option::Option::None
}
}
fn values() -> &'static [Self] {
static values: &'static [Rollout_RolloutStatus] = &[
Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED,
Rollout_RolloutStatus::IN_PROGRESS,
Rollout_RolloutStatus::SUCCESS,
Rollout_RolloutStatus::CANCELLED,
Rollout_RolloutStatus::FAILED,
Rollout_RolloutStatus::PENDING,
Rollout_RolloutStatus::FAILED_ROLLED_BACK,
];
values
}
fn enum_descriptor_static() -> &'static ::protobuf::reflect::EnumDescriptor {
static descriptor: ::protobuf::rt::LazyV2<::protobuf::reflect::EnumDescriptor> = ::protobuf::rt::LazyV2::INIT;
descriptor.get(|| {
::protobuf::reflect::EnumDescriptor::new_pb_name::<Rollout_RolloutStatus>("Rollout.RolloutStatus", file_descriptor_proto())
})
}
}
impl ::std::marker::Copy for Rollout_RolloutStatus {
}
impl ::std::default::Default for Rollout_RolloutStatus {
fn default() -> Self {
Rollout_RolloutStatus::ROLLOUT_STATUS_UNSPECIFIED
}
}
impl ::protobuf::reflect::ProtobufValue for Rollout_RolloutStatus {
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
::protobuf::reflect::ReflectValueRef::Enum(::protobuf::ProtobufEnum::descriptor(self))
}
}
static file_descriptor_proto_data: &'static [u8] = b"\
\n/google/api/servicemanagement/v1/resources.proto\x12\x1fgoogle.api.ser\
vicemanagement.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/api/\
config_change.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x17google\
/api/metric.proto\x1a\x16google/api/quota.proto\x1a\x18google/api/servic\
e.proto\x1a#google/longrunning/operations.proto\x1a\x19google/protobuf/a\
ny.proto\x1a\x20google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/\
timestamp.proto\x1a\x17google/rpc/status.proto\"c\n\x0eManagedService\
\x12!\n\x0cservice_name\x18\x02\x20\x01(\tR\x0bserviceName\x12.\n\x13pro\
ducer_project_id\x18\x03\x20\x01(\tR\x11producerProjectId\"\xdb\x03\n\
\x11OperationMetadata\x12%\n\x0eresource_names\x18\x01\x20\x03(\tR\rreso\
urceNames\x12M\n\x05steps\x18\x02\x20\x03(\x0b27.google.api.servicemanag\
ement.v1.OperationMetadata.StepR\x05steps\x12/\n\x13progress_percentage\
\x18\x03\x20\x01(\x05R\x12progressPercentage\x129\n\nstart_time\x18\x04\
\x20\x01(\x0b2\x1a.google.protobuf.TimestampR\tstartTime\x1a{\n\x04Step\
\x12\x20\n\x0bdescription\x18\x02\x20\x01(\tR\x0bdescription\x12Q\n\x06s\
tatus\x18\x04\x20\x01(\x0e29.google.api.servicemanagement.v1.OperationMe\
tadata.StatusR\x06status\"g\n\x06Status\x12\x16\n\x12STATUS_UNSPECIFIED\
\x10\0\x12\x08\n\x04DONE\x10\x01\x12\x0f\n\x0bNOT_STARTED\x10\x02\x12\
\x0f\n\x0bIN_PROGRESS\x10\x03\x12\n\n\x06FAILED\x10\x04\x12\r\n\tCANCELL\
ED\x10\x05\"\xa8\x01\n\nDiagnostic\x12\x1a\n\x08location\x18\x01\x20\x01\
(\tR\x08location\x12D\n\x04kind\x18\x02\x20\x01(\x0e20.google.api.servic\
emanagement.v1.Diagnostic.KindR\x04kind\x12\x18\n\x07message\x18\x03\x20\
\x01(\tR\x07message\"\x1e\n\x04Kind\x12\x0b\n\x07WARNING\x10\0\x12\t\n\
\x05ERROR\x10\x01\"a\n\x0cConfigSource\x12\x0e\n\x02id\x18\x05\x20\x01(\
\tR\x02id\x12A\n\x05files\x18\x02\x20\x03(\x0b2+.google.api.servicemanag\
ement.v1.ConfigFileR\x05files\"\xb7\x02\n\nConfigFile\x12\x1b\n\tfile_pa\
th\x18\x01\x20\x01(\tR\x08filePath\x12#\n\rfile_contents\x18\x03\x20\x01\
(\x0cR\x0cfileContents\x12Q\n\tfile_type\x18\x04\x20\x01(\x0e24.google.a\
pi.servicemanagement.v1.ConfigFile.FileTypeR\x08fileType\"\x93\x01\n\x08\
FileType\x12\x19\n\x15FILE_TYPE_UNSPECIFIED\x10\0\x12\x17\n\x13SERVICE_C\
ONFIG_YAML\x10\x01\x12\x11\n\rOPEN_API_JSON\x10\x02\x12\x11\n\rOPEN_API_\
YAML\x10\x03\x12\x1d\n\x19FILE_DESCRIPTOR_SET_PROTO\x10\x04\x12\x0e\n\nP\
ROTO_FILE\x10\x06\"\x1f\n\tConfigRef\x12\x12\n\x04name\x18\x01\x20\x01(\
\tR\x04name\"O\n\x0cChangeReport\x12?\n\x0econfig_changes\x18\x01\x20\
\x03(\x0b2\x18.google.api.ConfigChangeR\rconfigChanges\"\xf8\x06\n\x07Ro\
llout\x12\"\n\nrollout_id\x18\x01\x20\x01(\tR\trolloutIdB\x03\xe0A\x01\
\x12;\n\x0bcreate_time\x18\x02\x20\x01(\x0b2\x1a.google.protobuf.Timesta\
mpR\ncreateTime\x12\x1d\n\ncreated_by\x18\x03\x20\x01(\tR\tcreatedBy\x12\
N\n\x06status\x18\x04\x20\x01(\x0e26.google.api.servicemanagement.v1.Rol\
lout.RolloutStatusR\x06status\x12{\n\x18traffic_percent_strategy\x18\x05\
\x20\x01(\x0b2?.google.api.servicemanagement.v1.Rollout.TrafficPercentSt\
rategyH\0R\x16trafficPercentStrategy\x12y\n\x17delete_service_strategy\
\x18\xc8\x01\x20\x01(\x0b2>.google.api.servicemanagement.v1.Rollout.Dele\
teServiceStrategyH\0R\x15deleteServiceStrategy\x12!\n\x0cservice_name\
\x18\x08\x20\x01(\tR\x0bserviceName\x1a\xcc\x01\n\x16TrafficPercentStrat\
egy\x12r\n\x0bpercentages\x18\x01\x20\x03(\x0b2P.google.api.servicemanag\
ement.v1.Rollout.TrafficPercentStrategy.PercentagesEntryR\x0bpercentages\
\x1a>\n\x10PercentagesEntry\x12\x10\n\x03key\x18\x01\x20\x01(\tR\x03key\
\x12\x14\n\x05value\x18\x02\x20\x01(\x01R\x05value:\x028\x01\x1a\x17\n\
\x15DeleteServiceStrategy\"\x8d\x01\n\rRolloutStatus\x12\x1e\n\x1aROLLOU\
T_STATUS_UNSPECIFIED\x10\0\x12\x0f\n\x0bIN_PROGRESS\x10\x01\x12\x0b\n\
\x07SUCCESS\x10\x02\x12\r\n\tCANCELLED\x10\x03\x12\n\n\x06FAILED\x10\x04\
\x12\x0b\n\x07PENDING\x10\x05\x12\x16\n\x12FAILED_ROLLED_BACK\x10\x06B\n\
\n\x08strategyB\xff\x01\n#com.google.api.servicemanagement.v1B\x0eResour\
cesProtoP\x01ZPgoogle.golang.org/genproto/googleapis/api/servicemanageme\
nt/v1;servicemanagement\xa2\x02\x04GASM\xaa\x02!Google.Cloud.ServiceMana\
gement.V1\xca\x02!Google\\Cloud\\ServiceManagement\\V1\xea\x02$Google::C\
loud::ServiceManagement::V1J\xe0P\n\x07\x12\x05\x0f\0\xaf\x02\x01\n\xbe\
\x04\n\x01\x0c\x12\x03\x0f\0\x122\xb3\x04\x20Copyright\x202019\x20Google\
\x20LLC.\n\n\x20Licensed\x20under\x20the\x20Apache\x20License,\x20Versio\
n\x202.0\x20(the\x20\"License\");\n\x20you\x20may\x20not\x20use\x20this\
\x20file\x20except\x20in\x20compliance\x20with\x20the\x20License.\n\x20Y\
ou\x20may\x20obtain\x20a\x20copy\x20of\x20the\x20License\x20at\n\n\x20\
\x20\x20\x20\x20http://www.apache.org/licenses/LICENSE-2.0\n\n\x20Unless\
\x20required\x20by\x20applicable\x20law\x20or\x20agreed\x20to\x20in\x20w\
riting,\x20software\n\x20distributed\x20under\x20the\x20License\x20is\
\x20distributed\x20on\x20an\x20\"AS\x20IS\"\x20BASIS,\n\x20WITHOUT\x20WA\
RRANTIES\x20OR\x20CONDITIONS\x20OF\x20ANY\x20KIND,\x20either\x20express\
\x20or\x20implied.\n\x20See\x20the\x20License\x20for\x20the\x20specific\
\x20language\x20governing\x20permissions\x20and\n\x20limitations\x20unde\
r\x20the\x20License.\n\n\n\x08\n\x01\x02\x12\x03\x11\0(\n\t\n\x02\x03\0\
\x12\x03\x13\0&\n\t\n\x02\x03\x01\x12\x03\x14\0(\n\t\n\x02\x03\x02\x12\
\x03\x15\0)\n\t\n\x02\x03\x03\x12\x03\x16\0!\n\t\n\x02\x03\x04\x12\x03\
\x17\0\x20\n\t\n\x02\x03\x05\x12\x03\x18\0\"\n\t\n\x02\x03\x06\x12\x03\
\x19\0-\n\t\n\x02\x03\x07\x12\x03\x1a\0#\n\t\n\x02\x03\x08\x12\x03\x1b\0\
*\n\t\n\x02\x03\t\x12\x03\x1c\0)\n\t\n\x02\x03\n\x12\x03\x1d\0!\n\x08\n\
\x01\x08\x12\x03\x1f\0>\n\t\n\x02\x08%\x12\x03\x1f\0>\n\x08\n\x01\x08\
\x12\x03\x20\0g\n\t\n\x02\x08\x0b\x12\x03\x20\0g\n\x08\n\x01\x08\x12\x03\
!\0\"\n\t\n\x02\x08\n\x12\x03!\0\"\n\x08\n\x01\x08\x12\x03\"\0/\n\t\n\
\x02\x08\x08\x12\x03\"\0/\n\x08\n\x01\x08\x12\x03#\0<\n\t\n\x02\x08\x01\
\x12\x03#\0<\n\x08\n\x01\x08\x12\x03$\0\"\n\t\n\x02\x08$\x12\x03$\0\"\n\
\x08\n\x01\x08\x12\x03%\0>\n\t\n\x02\x08)\x12\x03%\0>\n\x08\n\x01\x08\
\x12\x03&\0=\n\t\n\x02\x08-\x12\x03&\0=\na\n\x02\x04\0\x12\x04*\01\x01\
\x1aU\x20The\x20full\x20representation\x20of\x20a\x20Service\x20that\x20\
is\x20managed\x20by\n\x20Google\x20Service\x20Management.\n\n\n\n\x03\
\x04\0\x01\x12\x03*\x08\x16\n\x8a\x01\n\x04\x04\0\x02\0\x12\x03-\x02\x1a\
\x1a}\x20The\x20name\x20of\x20the\x20service.\x20See\x20the\x20[overview\
](https://cloud.google.com/service-management/overview)\n\x20for\x20nami\
ng\x20requirements.\n\n\x0c\n\x05\x04\0\x02\0\x05\x12\x03-\x02\x08\n\x0c\
\n\x05\x04\0\x02\0\x01\x12\x03-\t\x15\n\x0c\n\x05\x04\0\x02\0\x03\x12\
\x03-\x18\x19\nE\n\x04\x04\0\x02\x01\x12\x030\x02!\x1a8\x20ID\x20of\x20t\
he\x20project\x20that\x20produces\x20and\x20owns\x20this\x20service.\n\n\
\x0c\n\x05\x04\0\x02\x01\x05\x12\x030\x02\x08\n\x0c\n\x05\x04\0\x02\x01\
\x01\x12\x030\t\x1c\n\x0c\n\x05\x04\0\x02\x01\x03\x12\x030\x1f\x20\nM\n\
\x02\x04\x01\x12\x044\0`\x01\x1aA\x20The\x20metadata\x20associated\x20wi\
th\x20a\x20long\x20running\x20operation\x20resource.\n\n\n\n\x03\x04\x01\
\x01\x12\x034\x08\x19\n<\n\x04\x04\x01\x03\0\x12\x046\x02<\x03\x1a.\x20R\
epresents\x20the\x20status\x20of\x20one\x20operation\x20step.\n\n\x0c\n\
\x05\x04\x01\x03\0\x01\x12\x036\n\x0e\n3\n\x06\x04\x01\x03\0\x02\0\x12\
\x038\x04\x1b\x1a$\x20The\x20short\x20description\x20of\x20the\x20step.\
\n\n\x0e\n\x07\x04\x01\x03\0\x02\0\x05\x12\x038\x04\n\n\x0e\n\x07\x04\
\x01\x03\0\x02\0\x01\x12\x038\x0b\x16\n\x0e\n\x07\x04\x01\x03\0\x02\0\
\x03\x12\x038\x19\x1a\n!\n\x06\x04\x01\x03\0\x02\x01\x12\x03;\x04\x16\
\x1a\x12\x20The\x20status\x20code.\n\n\x0e\n\x07\x04\x01\x03\0\x02\x01\
\x06\x12\x03;\x04\n\n\x0e\n\x07\x04\x01\x03\0\x02\x01\x01\x12\x03;\x0b\
\x11\n\x0e\n\x07\x04\x01\x03\0\x02\x01\x03\x12\x03;\x14\x15\nQ\n\x04\x04\
\x01\x04\0\x12\x04?\x02R\x03\x1aC\x20Code\x20describes\x20the\x20status\
\x20of\x20the\x20operation\x20(or\x20one\x20of\x20its\x20steps).\n\n\x0c\
\n\x05\x04\x01\x04\0\x01\x12\x03?\x07\r\n!\n\x06\x04\x01\x04\0\x02\0\x12\
\x03A\x04\x1b\x1a\x12\x20Unspecifed\x20code.\n\n\x0e\n\x07\x04\x01\x04\0\
\x02\0\x01\x12\x03A\x04\x16\n\x0e\n\x07\x04\x01\x04\0\x02\0\x02\x12\x03A\
\x19\x1a\nD\n\x06\x04\x01\x04\0\x02\x01\x12\x03D\x04\r\x1a5\x20The\x20op\
eration\x20or\x20step\x20has\x20completed\x20without\x20errors.\n\n\x0e\
\n\x07\x04\x01\x04\0\x02\x01\x01\x12\x03D\x04\x08\n\x0e\n\x07\x04\x01\
\x04\0\x02\x01\x02\x12\x03D\x0b\x0c\n;\n\x06\x04\x01\x04\0\x02\x02\x12\
\x03G\x04\x14\x1a,\x20The\x20operation\x20or\x20step\x20has\x20not\x20st\
arted\x20yet.\n\n\x0e\n\x07\x04\x01\x04\0\x02\x02\x01\x12\x03G\x04\x0f\n\
\x0e\n\x07\x04\x01\x04\0\x02\x02\x02\x12\x03G\x12\x13\n6\n\x06\x04\x01\
\x04\0\x02\x03\x12\x03J\x04\x14\x1a'\x20The\x20operation\x20or\x20step\
\x20is\x20in\x20progress.\n\n\x0e\n\x07\x04\x01\x04\0\x02\x03\x01\x12\
\x03J\x04\x0f\n\x0e\n\x07\x04\x01\x04\0\x02\x03\x02\x12\x03J\x12\x13\n\
\x8c\x01\n\x06\x04\x01\x04\0\x02\x04\x12\x03N\x04\x0f\x1a}\x20The\x20ope\
ration\x20or\x20step\x20has\x20completed\x20with\x20errors.\x20If\x20the\
\x20operation\x20is\n\x20rollbackable,\x20the\x20rollback\x20completed\
\x20with\x20errors\x20too.\n\n\x0e\n\x07\x04\x01\x04\0\x02\x04\x01\x12\
\x03N\x04\n\n\x0e\n\x07\x04\x01\x04\0\x02\x04\x02\x12\x03N\r\x0e\nG\n\
\x06\x04\x01\x04\0\x02\x05\x12\x03Q\x04\x12\x1a8\x20The\x20operation\x20\
or\x20step\x20has\x20completed\x20with\x20cancellation.\n\n\x0e\n\x07\
\x04\x01\x04\0\x02\x05\x01\x12\x03Q\x04\r\n\x0e\n\x07\x04\x01\x04\0\x02\
\x05\x02\x12\x03Q\x10\x11\n_\n\x04\x04\x01\x02\0\x12\x03V\x02%\x1aR\x20T\
he\x20full\x20name\x20of\x20the\x20resources\x20that\x20this\x20operatio\
n\x20is\x20directly\n\x20associated\x20with.\n\n\x0c\n\x05\x04\x01\x02\0\
\x04\x12\x03V\x02\n\n\x0c\n\x05\x04\x01\x02\0\x05\x12\x03V\x0b\x11\n\x0c\
\n\x05\x04\x01\x02\0\x01\x12\x03V\x12\x20\n\x0c\n\x05\x04\x01\x02\0\x03\
\x12\x03V#$\nT\n\x04\x04\x01\x02\x01\x12\x03Y\x02\x1a\x1aG\x20Detailed\
\x20status\x20information\x20for\x20each\x20step.\x20The\x20order\x20is\
\x20undetermined.\n\n\x0c\n\x05\x04\x01\x02\x01\x04\x12\x03Y\x02\n\n\x0c\
\n\x05\x04\x01\x02\x01\x06\x12\x03Y\x0b\x0f\n\x0c\n\x05\x04\x01\x02\x01\
\x01\x12\x03Y\x10\x15\n\x0c\n\x05\x04\x01\x02\x01\x03\x12\x03Y\x18\x19\n\
Q\n\x04\x04\x01\x02\x02\x12\x03\\\x02\x20\x1aD\x20Percentage\x20of\x20co\
mpletion\x20of\x20this\x20operation,\x20ranging\x20from\x200\x20to\x2010\
0.\n\n\x0c\n\x05\x04\x01\x02\x02\x05\x12\x03\\\x02\x07\n\x0c\n\x05\x04\
\x01\x02\x02\x01\x12\x03\\\x08\x1b\n\x0c\n\x05\x04\x01\x02\x02\x03\x12\
\x03\\\x1e\x1f\n/\n\x04\x04\x01\x02\x03\x12\x03_\x02+\x1a\"\x20The\x20st\
art\x20time\x20of\x20the\x20operation.\n\n\x0c\n\x05\x04\x01\x02\x03\x06\
\x12\x03_\x02\x1b\n\x0c\n\x05\x04\x01\x02\x03\x01\x12\x03_\x1c&\n\x0c\n\
\x05\x04\x01\x02\x03\x03\x12\x03_)*\n@\n\x02\x04\x02\x12\x04c\0u\x01\x1a\
4\x20Represents\x20a\x20diagnostic\x20message\x20(error\x20or\x20warning\
)\n\n\n\n\x03\x04\x02\x01\x12\x03c\x08\x12\n<\n\x04\x04\x02\x04\0\x12\
\x04e\x02k\x03\x1a.\x20The\x20kind\x20of\x20diagnostic\x20information\
\x20possible.\n\n\x0c\n\x05\x04\x02\x04\0\x01\x12\x03e\x07\x0b\n$\n\x06\
\x04\x02\x04\0\x02\0\x12\x03g\x04\x10\x1a\x15\x20Warnings\x20and\x20erro\
rs\n\n\x0e\n\x07\x04\x02\x04\0\x02\0\x01\x12\x03g\x04\x0b\n\x0e\n\x07\
\x04\x02\x04\0\x02\0\x02\x12\x03g\x0e\x0f\n\x1c\n\x06\x04\x02\x04\0\x02\
\x01\x12\x03j\x04\x0e\x1a\r\x20Only\x20errors\n\n\x0e\n\x07\x04\x02\x04\
\0\x02\x01\x01\x12\x03j\x04\t\n\x0e\n\x07\x04\x02\x04\0\x02\x01\x02\x12\
\x03j\x0c\r\nA\n\x04\x04\x02\x02\0\x12\x03n\x02\x16\x1a4\x20File\x20name\
\x20and\x20line\x20number\x20of\x20the\x20error\x20or\x20warning.\n\n\
\x0c\n\x05\x04\x02\x02\0\x05\x12\x03n\x02\x08\n\x0c\n\x05\x04\x02\x02\0\
\x01\x12\x03n\t\x11\n\x0c\n\x05\x04\x02\x02\0\x03\x12\x03n\x14\x15\n;\n\
\x04\x04\x02\x02\x01\x12\x03q\x02\x10\x1a.\x20The\x20kind\x20of\x20diagn\
ostic\x20information\x20provided.\n\n\x0c\n\x05\x04\x02\x02\x01\x06\x12\
\x03q\x02\x06\n\x0c\n\x05\x04\x02\x02\x01\x01\x12\x03q\x07\x0b\n\x0c\n\
\x05\x04\x02\x02\x01\x03\x12\x03q\x0e\x0f\n7\n\x04\x04\x02\x02\x02\x12\
\x03t\x02\x15\x1a*\x20Message\x20describing\x20the\x20error\x20or\x20war\
ning.\n\n\x0c\n\x05\x04\x02\x02\x02\x05\x12\x03t\x02\x08\n\x0c\n\x05\x04\
\x02\x02\x02\x01\x12\x03t\t\x10\n\x0c\n\x05\x04\x02\x02\x02\x03\x12\x03t\
\x13\x14\n}\n\x02\x04\x03\x12\x05y\0\x82\x01\x01\x1ap\x20Represents\x20a\
\x20source\x20file\x20which\x20is\x20used\x20to\x20generate\x20the\x20se\
rvice\x20configuration\n\x20defined\x20by\x20`google.api.Service`.\n\n\n\
\n\x03\x04\x03\x01\x12\x03y\x08\x14\n\xb6\x01\n\x04\x04\x03\x02\0\x12\
\x03}\x02\x10\x1a\xa8\x01\x20A\x20unique\x20ID\x20for\x20a\x20specific\
\x20instance\x20of\x20this\x20message,\x20typically\x20assigned\n\x20by\
\x20the\x20client\x20for\x20tracking\x20purpose.\x20If\x20empty,\x20the\
\x20server\x20may\x20choose\x20to\n\x20generate\x20one\x20instead.\n\n\
\x0c\n\x05\x04\x03\x02\0\x05\x12\x03}\x02\x08\n\x0c\n\x05\x04\x03\x02\0\
\x01\x12\x03}\t\x0b\n\x0c\n\x05\x04\x03\x02\0\x03\x12\x03}\x0e\x0f\n|\n\
\x04\x04\x03\x02\x01\x12\x04\x81\x01\x02\x20\x1an\x20Set\x20of\x20source\
\x20configuration\x20files\x20that\x20are\x20used\x20to\x20generate\x20a\
\x20service\n\x20configuration\x20(`google.api.Service`).\n\n\r\n\x05\
\x04\x03\x02\x01\x04\x12\x04\x81\x01\x02\n\n\r\n\x05\x04\x03\x02\x01\x06\
\x12\x04\x81\x01\x0b\x15\n\r\n\x05\x04\x03\x02\x01\x01\x12\x04\x81\x01\
\x16\x1b\n\r\n\x05\x04\x03\x02\x01\x03\x12\x04\x81\x01\x1e\x1f\nD\n\x02\
\x04\x04\x12\x06\x85\x01\0\xac\x01\x01\x1a6\x20Generic\x20specification\
\x20of\x20a\x20source\x20configuration\x20file\n\n\x0b\n\x03\x04\x04\x01\
\x12\x04\x85\x01\x08\x12\n\x0e\n\x04\x04\x04\x04\0\x12\x06\x86\x01\x02\
\xa2\x01\x03\n\r\n\x05\x04\x04\x04\0\x01\x12\x04\x86\x01\x07\x0f\n$\n\
\x06\x04\x04\x04\0\x02\0\x12\x04\x88\x01\x04\x1e\x1a\x14\x20Unknown\x20f\
ile\x20type.\n\n\x0f\n\x07\x04\x04\x04\0\x02\0\x01\x12\x04\x88\x01\x04\
\x19\n\x0f\n\x07\x04\x04\x04\0\x02\0\x02\x12\x04\x88\x01\x1c\x1d\n0\n\
\x06\x04\x04\x04\0\x02\x01\x12\x04\x8b\x01\x04\x1c\x1a\x20\x20YAML-speci\
fication\x20of\x20service.\n\n\x0f\n\x07\x04\x04\x04\0\x02\x01\x01\x12\
\x04\x8b\x01\x04\x17\n\x0f\n\x07\x04\x04\x04\0\x02\x01\x02\x12\x04\x8b\
\x01\x1a\x1b\n<\n\x06\x04\x04\x04\0\x02\x02\x12\x04\x8e\x01\x04\x16\x1a,\
\x20OpenAPI\x20specification,\x20serialized\x20in\x20JSON.\n\n\x0f\n\x07\
\x04\x04\x04\0\x02\x02\x01\x12\x04\x8e\x01\x04\x11\n\x0f\n\x07\x04\x04\
\x04\0\x02\x02\x02\x12\x04\x8e\x01\x14\x15\n<\n\x06\x04\x04\x04\0\x02\
\x03\x12\x04\x91\x01\x04\x16\x1a,\x20OpenAPI\x20specification,\x20serial\
ized\x20in\x20YAML.\n\n\x0f\n\x07\x04\x04\x04\0\x02\x03\x01\x12\x04\x91\
\x01\x04\x11\n\x0f\n\x07\x04\x04\x04\0\x02\x03\x02\x12\x04\x91\x01\x14\
\x15\n\xaa\x02\n\x06\x04\x04\x04\0\x02\x04\x12\x04\x9a\x01\x04\"\x1a\x99\
\x02\x20FileDescriptorSet,\x20generated\x20by\x20protoc.\n\n\x20To\x20ge\
nerate,\x20use\x20protoc\x20with\x20imports\x20and\x20source\x20info\x20\
included.\n\x20For\x20an\x20example\x20test.proto\x20file,\x20the\x20fol\
lowing\x20command\x20would\x20put\x20the\x20value\n\x20in\x20a\x20new\
\x20file\x20named\x20out.pb.\n\n\x20$protoc\x20--include_imports\x20--in\
clude_source_info\x20test.proto\x20-o\x20out.pb\n\n\x0f\n\x07\x04\x04\
\x04\0\x02\x04\x01\x12\x04\x9a\x01\x04\x1d\n\x0f\n\x07\x04\x04\x04\0\x02\
\x04\x02\x12\x04\x9a\x01\x20!\n\xd3\x02\n\x06\x04\x04\x04\0\x02\x05\x12\
\x04\xa1\x01\x04\x13\x1a\xc2\x02\x20Uncompiled\x20Proto\x20file.\x20Used\
\x20for\x20storage\x20and\x20display\x20purposes\x20only,\n\x20currently\
\x20server-side\x20compilation\x20is\x20not\x20supported.\x20Should\x20m\
atch\x20the\n\x20inputs\x20to\x20'protoc'\x20command\x20used\x20to\x20ge\
nerated\x20FILE_DESCRIPTOR_SET_PROTO.\x20A\n\x20file\x20of\x20this\x20ty\
pe\x20can\x20only\x20be\x20included\x20if\x20at\x20least\x20one\x20file\
\x20of\x20type\n\x20FILE_DESCRIPTOR_SET_PROTO\x20is\x20included.\n\n\x0f\
\n\x07\x04\x04\x04\0\x02\x05\x01\x12\x04\xa1\x01\x04\x0e\n\x0f\n\x07\x04\
\x04\x04\0\x02\x05\x02\x12\x04\xa1\x01\x11\x12\nP\n\x04\x04\x04\x02\0\
\x12\x04\xa5\x01\x02\x17\x1aB\x20The\x20file\x20name\x20of\x20the\x20con\
figuration\x20file\x20(full\x20or\x20relative\x20path).\n\n\r\n\x05\x04\
\x04\x02\0\x05\x12\x04\xa5\x01\x02\x08\n\r\n\x05\x04\x04\x02\0\x01\x12\
\x04\xa5\x01\t\x12\n\r\n\x05\x04\x04\x02\0\x03\x12\x04\xa5\x01\x15\x16\n\
3\n\x04\x04\x04\x02\x01\x12\x04\xa8\x01\x02\x1a\x1a%\x20The\x20bytes\x20\
that\x20constitute\x20the\x20file.\n\n\r\n\x05\x04\x04\x02\x01\x05\x12\
\x04\xa8\x01\x02\x07\n\r\n\x05\x04\x04\x02\x01\x01\x12\x04\xa8\x01\x08\
\x15\n\r\n\x05\x04\x04\x02\x01\x03\x12\x04\xa8\x01\x18\x19\n?\n\x04\x04\
\x04\x02\x02\x12\x04\xab\x01\x02\x19\x1a1\x20The\x20type\x20of\x20config\
uration\x20file\x20this\x20represents.\n\n\r\n\x05\x04\x04\x02\x02\x06\
\x12\x04\xab\x01\x02\n\n\r\n\x05\x04\x04\x02\x02\x01\x12\x04\xab\x01\x0b\
\x14\n\r\n\x05\x04\x04\x02\x02\x03\x12\x04\xab\x01\x17\x18\nH\n\x02\x04\
\x05\x12\x06\xaf\x01\0\xb3\x01\x01\x1a:\x20Represents\x20a\x20service\
\x20configuration\x20with\x20its\x20name\x20and\x20id.\n\n\x0b\n\x03\x04\
\x05\x01\x12\x04\xaf\x01\x08\x11\n\x85\x01\n\x04\x04\x05\x02\0\x12\x04\
\xb2\x01\x02\x12\x1aw\x20Resource\x20name\x20of\x20a\x20service\x20confi\
g.\x20It\x20must\x20have\x20the\x20following\n\x20format:\x20\"services/\
{service\x20name}/configs/{config\x20id}\".\n\n\r\n\x05\x04\x05\x02\0\
\x05\x12\x04\xb2\x01\x02\x08\n\r\n\x05\x04\x05\x02\0\x01\x12\x04\xb2\x01\
\t\r\n\r\n\x05\x04\x05\x02\0\x03\x12\x04\xb2\x01\x10\x11\n\xb5\x01\n\x02\
\x04\x06\x12\x06\xb9\x01\0\xc0\x01\x01\x1a\xa6\x01\x20Change\x20report\
\x20associated\x20with\x20a\x20particular\x20service\x20configuration.\n\
\n\x20It\x20contains\x20a\x20list\x20of\x20ConfigChanges\x20based\x20on\
\x20the\x20comparison\x20between\n\x20two\x20service\x20configurations.\
\n\n\x0b\n\x03\x04\x06\x01\x12\x04\xb9\x01\x08\x14\n\xae\x02\n\x04\x04\
\x06\x02\0\x12\x04\xbf\x01\x026\x1a\x9f\x02\x20List\x20of\x20changes\x20\
between\x20two\x20service\x20configurations.\n\x20The\x20changes\x20will\
\x20be\x20alphabetically\x20sorted\x20based\x20on\x20the\x20identifier\n\
\x20of\x20each\x20change.\n\x20A\x20ConfigChange\x20identifier\x20is\x20\
a\x20dot\x20separated\x20path\x20to\x20the\x20configuration.\n\x20Exampl\
e:\x20visibility.rules[selector='LibraryService.CreateBook'].restriction\
\n\n\r\n\x05\x04\x06\x02\0\x04\x12\x04\xbf\x01\x02\n\n\r\n\x05\x04\x06\
\x02\0\x06\x12\x04\xbf\x01\x0b\"\n\r\n\x05\x04\x06\x02\0\x01\x12\x04\xbf\
\x01#1\n\r\n\x05\x04\x06\x02\0\x03\x12\x04\xbf\x0145\n\xeb\x01\n\x02\x04\
\x07\x12\x06\xc5\x01\0\xaf\x02\x01\x1a\xdc\x01\x20A\x20rollout\x20resour\
ce\x20that\x20defines\x20how\x20service\x20configuration\x20versions\x20\
are\x20pushed\n\x20to\x20control\x20plane\x20systems.\x20Typically,\x20y\
ou\x20create\x20a\x20new\x20version\x20of\x20the\n\x20service\x20config,\
\x20and\x20then\x20create\x20a\x20Rollout\x20to\x20push\x20the\x20servic\
e\x20config.\n\n\x0b\n\x03\x04\x07\x01\x12\x04\xc5\x01\x08\x0f\n\xbd\x06\
\n\x04\x04\x07\x03\0\x12\x06\xe4\x01\x02\xe9\x01\x03\x1a\xac\x06\x20Stra\
tegy\x20that\x20specifies\x20how\x20clients\x20of\x20Google\x20Service\
\x20Controller\x20want\x20to\n\x20send\x20traffic\x20to\x20use\x20differ\
ent\x20config\x20versions.\x20This\x20is\x20generally\n\x20used\x20by\
\x20API\x20proxy\x20to\x20split\x20traffic\x20based\x20on\x20your\x20con\
figured\x20percentage\x20for\n\x20each\x20config\x20version.\n\n\x20One\
\x20example\x20of\x20how\x20to\x20gradually\x20rollout\x20a\x20new\x20se\
rvice\x20configuration\x20using\n\x20this\n\x20strategy:\n\x20Day\x201\n\
\n\x20\x20\x20\x20\x20Rollout\x20{\n\x20\x20\x20\x20\x20\x20\x20id:\x20\
\"example.googleapis.com/rollout_20160206\"\n\x20\x20\x20\x20\x20\x20\
\x20traffic_percent_strategy\x20{\n\x20\x20\x20\x20\x20\x20\x20\x20\x20p\
ercentages:\x20{\n\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\"example.\
googleapis.com/20160201\":\x2070.00\n\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\"example.googleapis.com/20160206\":\x2030.00\n\x20\x20\x20\
\x20\x20\x20\x20\x20\x20}\n\x20\x20\x20\x20\x20\x20\x20}\n\x20\x20\x20\
\x20\x20}\n\n\x20Day\x202\n\n\x20\x20\x20\x20\x20Rollout\x20{\n\x20\x20\
\x20\x20\x20\x20\x20id:\x20\"example.googleapis.com/rollout_20160207\"\n\
\x20\x20\x20\x20\x20\x20\x20traffic_percent_strategy:\x20{\n\x20\x20\x20\
\x20\x20\x20\x20\x20\x20percentages:\x20{\n\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\"example.googleapis.com/20160206\":\x20100.00\n\x20\x20\
\x20\x20\x20\x20\x20\x20\x20}\n\x20\x20\x20\x20\x20\x20\x20}\n\x20\x20\
\x20\x20\x20}\n\n\r\n\x05\x04\x07\x03\0\x01\x12\x04\xe4\x01\n\x20\n\xe3\
\x01\n\x06\x04\x07\x03\0\x02\0\x12\x04\xe8\x01\x04(\x1a\xd2\x01\x20Maps\
\x20service\x20configuration\x20IDs\x20to\x20their\x20corresponding\x20t\
raffic\x20percentage.\n\x20Key\x20is\x20the\x20service\x20configuration\
\x20ID,\x20Value\x20is\x20the\x20traffic\x20percentage\n\x20which\x20mus\
t\x20be\x20greater\x20than\x200.0\x20and\x20the\x20sum\x20must\x20equal\
\x20to\x20100.0.\n\n\x0f\n\x07\x04\x07\x03\0\x02\0\x06\x12\x04\xe8\x01\
\x04\x17\n\x0f\n\x07\x04\x07\x03\0\x02\0\x01\x12\x04\xe8\x01\x18#\n\x0f\
\n\x07\x04\x07\x03\0\x02\0\x03\x12\x04\xe8\x01&'\n\x94\x01\n\x04\x04\x07\
\x03\x01\x12\x06\xed\x01\x02\xef\x01\x03\x1a\x83\x01\x20Strategy\x20used\
\x20to\x20delete\x20a\x20service.\x20This\x20strategy\x20is\x20a\x20plac\
eholder\x20only\n\x20used\x20by\x20the\x20system\x20generated\x20rollout\
\x20to\x20delete\x20a\x20service.\n\n\r\n\x05\x04\x07\x03\x01\x01\x12\
\x04\xed\x01\n\x1f\n&\n\x04\x04\x07\x04\0\x12\x06\xf2\x01\x02\x89\x02\
\x03\x1a\x16\x20Status\x20of\x20a\x20Rollout.\n\n\r\n\x05\x04\x07\x04\0\
\x01\x12\x04\xf2\x01\x07\x14\n&\n\x06\x04\x07\x04\0\x02\0\x12\x04\xf4\
\x01\x04#\x1a\x16\x20No\x20status\x20specified.\n\n\x0f\n\x07\x04\x07\
\x04\0\x02\0\x01\x12\x04\xf4\x01\x04\x1e\n\x0f\n\x07\x04\x07\x04\0\x02\0\
\x02\x12\x04\xf4\x01!\"\n-\n\x06\x04\x07\x04\0\x02\x01\x12\x04\xf7\x01\
\x04\x14\x1a\x1d\x20The\x20Rollout\x20is\x20in\x20progress.\n\n\x0f\n\
\x07\x04\x07\x04\0\x02\x01\x01\x12\x04\xf7\x01\x04\x0f\n\x0f\n\x07\x04\
\x07\x04\0\x02\x01\x02\x12\x04\xf7\x01\x12\x13\n9\n\x06\x04\x07\x04\0\
\x02\x02\x12\x04\xfa\x01\x04\x10\x1a)\x20The\x20Rollout\x20has\x20comple\
ted\x20successfully.\n\n\x0f\n\x07\x04\x07\x04\0\x02\x02\x01\x12\x04\xfa\
\x01\x04\x0b\n\x0f\n\x07\x04\x07\x04\0\x02\x02\x02\x12\x04\xfa\x01\x0e\
\x0f\n\x94\x01\n\x06\x04\x07\x04\0\x02\x03\x12\x04\xfe\x01\x04\x12\x1a\
\x83\x01\x20The\x20Rollout\x20has\x20been\x20cancelled.\x20This\x20can\
\x20happen\x20if\x20you\x20have\x20overlapping\n\x20Rollout\x20pushes,\
\x20and\x20the\x20previous\x20ones\x20will\x20be\x20cancelled.\n\n\x0f\n\
\x07\x04\x07\x04\0\x02\x03\x01\x12\x04\xfe\x01\x04\r\n\x0f\n\x07\x04\x07\
\x04\0\x02\x03\x02\x12\x04\xfe\x01\x10\x11\nQ\n\x06\x04\x07\x04\0\x02\
\x04\x12\x04\x81\x02\x04\x0f\x1aA\x20The\x20Rollout\x20has\x20failed\x20\
and\x20the\x20rollback\x20attempt\x20has\x20failed\x20too.\n\n\x0f\n\x07\
\x04\x07\x04\0\x02\x04\x01\x12\x04\x81\x02\x04\n\n\x0f\n\x07\x04\x07\x04\
\0\x02\x04\x02\x12\x04\x81\x02\r\x0e\nO\n\x06\x04\x07\x04\0\x02\x05\x12\
\x04\x84\x02\x04\x10\x1a?\x20The\x20Rollout\x20has\x20not\x20started\x20\
yet\x20and\x20is\x20pending\x20for\x20execution.\n\n\x0f\n\x07\x04\x07\
\x04\0\x02\x05\x01\x12\x04\x84\x02\x04\x0b\n\x0f\n\x07\x04\x07\x04\0\x02\
\x05\x02\x12\x04\x84\x02\x0e\x0f\n]\n\x06\x04\x07\x04\0\x02\x06\x12\x04\
\x88\x02\x04\x1b\x1aM\x20The\x20Rollout\x20has\x20failed\x20and\x20rolle\
d\x20back\x20to\x20the\x20previous\x20successful\n\x20Rollout.\n\n\x0f\n\
\x07\x04\x07\x04\0\x02\x06\x01\x12\x04\x88\x02\x04\x16\n\x0f\n\x07\x04\
\x07\x04\0\x02\x06\x02\x12\x04\x88\x02\x19\x1a\n\xfe\x03\n\x04\x04\x07\
\x02\0\x12\x04\x93\x02\x02A\x1a\xef\x03\x20Optional.\x20Unique\x20identi\
fier\x20of\x20this\x20Rollout.\x20Must\x20be\x20no\x20longer\x20than\x20\
63\x20characters\n\x20and\x20only\x20lower\x20case\x20letters,\x20digits\
,\x20'.',\x20'_'\x20and\x20'-'\x20are\x20allowed.\n\n\x20If\x20not\x20sp\
ecified\x20by\x20client,\x20the\x20server\x20will\x20generate\x20one.\
\x20The\x20generated\x20id\n\x20will\x20have\x20the\x20form\x20of\x20<da\
te><revision\x20number>,\x20where\x20\"date\"\x20is\x20the\x20create\n\
\x20date\x20in\x20ISO\x208601\x20format.\x20\x20\"revision\x20number\"\
\x20is\x20a\x20monotonically\x20increasing\n\x20positive\x20number\x20th\
at\x20is\x20reset\x20every\x20day\x20for\x20each\x20service.\n\x20An\x20\
example\x20of\x20the\x20generated\x20rollout_id\x20is\x20'2016-02-16r1'\
\n\n\r\n\x05\x04\x07\x02\0\x05\x12\x04\x93\x02\x02\x08\n\r\n\x05\x04\x07\
\x02\0\x01\x12\x04\x93\x02\t\x13\n\r\n\x05\x04\x07\x02\0\x03\x12\x04\x93\
\x02\x16\x17\n\r\n\x05\x04\x07\x02\0\x08\x12\x04\x93\x02\x18@\n\x10\n\
\x08\x04\x07\x02\0\x08\x9c\x08\0\x12\x04\x93\x02\x19?\n7\n\x04\x04\x07\
\x02\x01\x12\x04\x96\x02\x02,\x1a)\x20Creation\x20time\x20of\x20the\x20r\
ollout.\x20Readonly.\n\n\r\n\x05\x04\x07\x02\x01\x06\x12\x04\x96\x02\x02\
\x1b\n\r\n\x05\x04\x07\x02\x01\x01\x12\x04\x96\x02\x1c'\n\r\n\x05\x04\
\x07\x02\x01\x03\x12\x04\x96\x02*+\n;\n\x04\x04\x07\x02\x02\x12\x04\x99\
\x02\x02\x18\x1a-\x20The\x20user\x20who\x20created\x20the\x20Rollout.\
\x20Readonly.\n\n\r\n\x05\x04\x07\x02\x02\x05\x12\x04\x99\x02\x02\x08\n\
\r\n\x05\x04\x07\x02\x02\x01\x12\x04\x99\x02\t\x13\n\r\n\x05\x04\x07\x02\
\x02\x03\x12\x04\x99\x02\x16\x17\n\xa6\x01\n\x04\x04\x07\x02\x03\x12\x04\
\x9e\x02\x02\x1b\x1a\x97\x01\x20The\x20status\x20of\x20this\x20rollout.\
\x20Readonly.\x20In\x20case\x20of\x20a\x20failed\x20rollout,\n\x20the\
\x20system\x20will\x20automatically\x20rollback\x20to\x20the\x20current\
\x20Rollout\n\x20version.\x20Readonly.\n\n\r\n\x05\x04\x07\x02\x03\x06\
\x12\x04\x9e\x02\x02\x0f\n\r\n\x05\x04\x07\x02\x03\x01\x12\x04\x9e\x02\
\x10\x16\n\r\n\x05\x04\x07\x02\x03\x03\x12\x04\x9e\x02\x19\x1a\n\x8b\x01\
\n\x04\x04\x07\x08\0\x12\x06\xa3\x02\x02\xab\x02\x03\x1a{\x20Strategy\
\x20that\x20defines\x20which\x20versions\x20of\x20service\x20configurati\
ons\x20should\x20be\n\x20pushed\n\x20and\x20how\x20they\x20should\x20be\
\x20used\x20at\x20runtime.\n\n\r\n\x05\x04\x07\x08\0\x01\x12\x04\xa3\x02\
\x08\x10\nc\n\x04\x04\x07\x02\x04\x12\x04\xa6\x02\x048\x1aU\x20Google\
\x20Service\x20Control\x20selects\x20service\x20configurations\x20based\
\x20on\n\x20traffic\x20percentage.\n\n\r\n\x05\x04\x07\x02\x04\x06\x12\
\x04\xa6\x02\x04\x1a\n\r\n\x05\x04\x07\x02\x04\x01\x12\x04\xa6\x02\x1b3\
\n\r\n\x05\x04\x07\x02\x04\x03\x12\x04\xa6\x0267\n_\n\x04\x04\x07\x02\
\x05\x12\x04\xaa\x02\x048\x1aQ\x20The\x20strategy\x20associated\x20with\
\x20a\x20rollout\x20to\x20delete\x20a\x20`ManagedService`.\n\x20Readonly\
.\n\n\r\n\x05\x04\x07\x02\x05\x06\x12\x04\xaa\x02\x04\x19\n\r\n\x05\x04\
\x07\x02\x05\x01\x12\x04\xaa\x02\x1a1\n\r\n\x05\x04\x07\x02\x05\x03\x12\
\x04\xaa\x0247\nE\n\x04\x04\x07\x02\x06\x12\x04\xae\x02\x02\x1a\x1a7\x20\
The\x20name\x20of\x20the\x20service\x20associated\x20with\x20this\x20Rol\
lout.\n\n\r\n\x05\x04\x07\x02\x06\x05\x12\x04\xae\x02\x02\x08\n\r\n\x05\
\x04\x07\x02\x06\x01\x12\x04\xae\x02\t\x15\n\r\n\x05\x04\x07\x02\x06\x03\
\x12\x04\xae\x02\x18\x19b\x06proto3\
";
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
fn parse_descriptor_proto() -> ::protobuf::descriptor::FileDescriptorProto {
::protobuf::Message::parse_from_bytes(file_descriptor_proto_data).unwrap()
}
pub fn file_descriptor_proto() -> &'static ::protobuf::descriptor::FileDescriptorProto {
file_descriptor_proto_lazy.get(|| {
parse_descriptor_proto()
})
}
| 39.527402 | 174 | 0.624464 |
de8381cf249a9f6670c0a2744cfe891c391173d1 | 21,743 | // Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::{config::StoredConfiguration, Block, BlockProof, Blockchain, TransactionResult};
use crypto::{CryptoHash, Hash, PublicKey};
use helpers::{Height, Round};
use messages::{Connect, Message, Precommit, RawTransaction, Signed};
use storage::{
Entry, Fork, KeySetIndex, ListIndex, MapIndex, MapProof, ProofListIndex, ProofMapIndex,
Snapshot,
};
/// Defines `&str` constants with given name and value.
macro_rules! define_names {
(
$(
$name:ident => $value:expr;
)+
) => (
$(const $name: &str = concat!("core.", $value);)*
)
}
define_names!(
TRANSACTIONS => "transactions";
TRANSACTION_RESULTS => "transaction_results";
TRANSACTIONS_POOL => "transactions_pool";
TRANSACTIONS_POOL_LEN => "transactions_pool_len";
TRANSACTIONS_LOCATIONS => "transactions_locations";
BLOCKS => "blocks";
BLOCK_HASHES_BY_HEIGHT => "block_hashes_by_height";
BLOCK_TRANSACTIONS => "block_transactions";
PRECOMMITS => "precommits";
CONFIGS => "configs";
CONFIGS_ACTUAL_FROM => "configs_actual_from";
STATE_HASH_AGGREGATOR => "state_hash_aggregator";
PEERS_CACHE => "peers_cache";
CONSENSUS_MESSAGES_CACHE => "consensus_messages_cache";
CONSENSUS_ROUND => "consensus_round";
);
encoding_struct! {
/// Configuration index.
struct ConfigReference {
/// Height since which this configuration becomes actual.
actual_from: Height,
/// Hash of the configuration contents that serialized as raw bytes vec.
cfg_hash: &Hash,
}
}
encoding_struct! {
/// Transaction location in a block.
/// The given entity defines the block where the transaction was
/// included and the position of this transaction in that block.
struct TxLocation {
/// Height of the block where the transaction was included.
block_height: Height,
/// Zero-based position of this transaction in the block.
position_in_block: u64,
}
}
/// Information schema for indices maintained by the Exonum core logic.
///
/// Indices defined by this schema are present in the blockchain regardless of
/// the deployed services and store general-purpose information, such as
/// committed transactions.
#[derive(Debug)]
pub struct Schema<T> {
view: T,
}
impl<T> Schema<T>
where
T: AsRef<dyn Snapshot>,
{
/// Constructs information schema for the given `snapshot`.
pub fn new(snapshot: T) -> Self {
Self { view: snapshot }
}
/// Returns a table that represents a map with a key-value pair of a
/// transaction hash and raw transaction message.
pub fn transactions(&self) -> MapIndex<&T, Hash, Signed<RawTransaction>> {
MapIndex::new(TRANSACTIONS, &self.view)
}
/// Returns a table that represents a map with a key-value pair of a transaction
/// hash and execution result.
///
/// This method can be used to retrieve a proof that a certain transaction
/// result is present in the blockchain.
pub fn transaction_results(&self) -> ProofMapIndex<&T, Hash, TransactionResult> {
ProofMapIndex::new(TRANSACTION_RESULTS, &self.view)
}
/// Returns a table that represents a set of uncommitted transactions hashes.
pub fn transactions_pool(&self) -> KeySetIndex<&T, Hash> {
KeySetIndex::new(TRANSACTIONS_POOL, &self.view)
}
/// Returns an entry that represents count of uncommitted transactions.
pub(crate) fn transactions_pool_len_index(&self) -> Entry<&T, u64> {
Entry::new(TRANSACTIONS_POOL_LEN, &self.view)
}
/// Returns the number of transactions in the pool.
pub fn transactions_pool_len(&self) -> u64 {
let pool = self.transactions_pool_len_index();
pool.get().unwrap_or(0)
}
/// Returns a table that keeps the block height and transaction position inside the block for every
/// transaction hash.
pub fn transactions_locations(&self) -> MapIndex<&T, Hash, TxLocation> {
MapIndex::new(TRANSACTIONS_LOCATIONS, &self.view)
}
/// Returns a table that stores a block object for every block height.
pub fn blocks(&self) -> MapIndex<&T, Hash, Block> {
MapIndex::new(BLOCKS, &self.view)
}
/// Returns a table that keeps block hashes for corresponding block heights.
pub fn block_hashes_by_height(&self) -> ListIndex<&T, Hash> {
ListIndex::new(BLOCK_HASHES_BY_HEIGHT, &self.view)
}
/// Returns a table that keeps a list of transactions for each block.
pub fn block_transactions(&self, height: Height) -> ProofListIndex<&T, Hash> {
let height: u64 = height.into();
ProofListIndex::new_in_family(BLOCK_TRANSACTIONS, &height, &self.view)
}
/// Returns a table that keeps a list of precommits for the block with the given hash.
pub fn precommits(&self, hash: &Hash) -> ListIndex<&T, Signed<Precommit>> {
ListIndex::new_in_family(PRECOMMITS, hash, &self.view)
}
/// Returns a table that represents a map with a key-value pair of a
/// configuration hash and contents.
pub fn configs(&self) -> ProofMapIndex<&T, Hash, StoredConfiguration> {
// configs patricia merkle tree <block height> json
ProofMapIndex::new(CONFIGS, &self.view)
}
/// Returns an auxiliary table that keeps hash references to configurations in
/// the increasing order of their `actual_from` height.
pub fn configs_actual_from(&self) -> ListIndex<&T, ConfigReference> {
ListIndex::new(CONFIGS_ACTUAL_FROM, &self.view)
}
/// Returns the accessory `ProofMapIndex` for calculating
/// patches in the DBView layer.
///
/// The table calculates the "aggregation" of root hashes of individual
/// service tables, in effect summing the state of various entities,
/// scattered across distinct services and their tables. Sum is performed by
/// means of computing the root hash of this table.
///
/// - Table **key** is 32 bytes of normalized coordinates of a service
/// table, as returned by the `service_table_unique_key` helper function.
/// - Table **value** is the root hash of a service table, which contributes
/// to the `state_hash` of the resulting block.
///
/// Core tables participate in the resulting state_hash with `CORE_SERVICE`
/// service_id. Their vector is returned by the `core_state_hash` method.
pub fn state_hash_aggregator(&self) -> ProofMapIndex<&T, Hash, Hash> {
ProofMapIndex::new(STATE_HASH_AGGREGATOR, &self.view)
}
/// Returns peers that have to be recovered in case of process restart
/// after abnormal termination.
pub(crate) fn peers_cache(&self) -> MapIndex<&T, PublicKey, Signed<Connect>> {
MapIndex::new(PEERS_CACHE, &self.view)
}
/// Returns consensus messages that have to be recovered in case of process restart
/// after abnormal termination.
pub(crate) fn consensus_messages_cache(&self) -> ListIndex<&T, Message> {
ListIndex::new(CONSENSUS_MESSAGES_CACHE, &self.view)
}
/// Returns the saved value of the consensus round. Returns the first round
/// if it has not been saved.
pub(crate) fn consensus_round(&self) -> Round {
Entry::new(CONSENSUS_ROUND, &self.view)
.get()
.unwrap_or_else(Round::first)
}
/// Returns the block hash for the given height.
pub fn block_hash_by_height(&self, height: Height) -> Option<Hash> {
self.block_hashes_by_height().get(height.into())
}
/// Returns the block for the given height with the proof of its inclusion.
pub fn block_and_precommits(&self, height: Height) -> Option<BlockProof> {
let block_hash = match self.block_hash_by_height(height) {
None => return None,
Some(block_hash) => block_hash,
};
let block = self.blocks().get(&block_hash).unwrap();
let precommits_table = self.precommits(&block_hash);
let precommits = precommits_table.iter().collect();
let res = BlockProof { block, precommits };
Some(res)
}
/// Returns the latest committed block.
///
/// # Panics
///
/// Panics if the "genesis block" was not created.
pub fn last_block(&self) -> Block {
let hash = self.block_hashes_by_height()
.last()
.expect("An attempt to get the `last_block` during creating the genesis block.");
self.blocks().get(&hash).unwrap()
}
/// Returns the height of the latest committed block.
///
/// # Panics
///
/// Panics if the "genesis block" was not created.
pub fn height(&self) -> Height {
let len = self.block_hashes_by_height().len();
assert!(
len > 0,
"An attempt to get the actual `height` during creating the genesis block."
);
Height(len - 1)
}
/// Returns the configuration for the latest height of the blockchain.
///
/// # Panics
///
/// Panics if the "genesis block" was not created.
pub fn actual_configuration(&self) -> StoredConfiguration {
let next_height = self.next_height();
let res = self.configuration_by_height(next_height);
trace!("Retrieved actual_config: {:?}", res);
res
}
/// Returns the nearest following configuration which will be applied after
/// the current one, if it exists.
pub fn following_configuration(&self) -> Option<StoredConfiguration> {
let next_height = self.next_height();
let idx = self.find_configurations_index_by_height(next_height);
match self.configs_actual_from().get(idx + 1) {
Some(cfg_ref) => {
let cfg_hash = cfg_ref.cfg_hash();
let cfg = self.configuration_by_hash(cfg_hash).unwrap_or_else(|| {
panic!("Config with hash {:?} is absent in configs table", cfg_hash)
});
Some(cfg)
}
None => None,
}
}
/// Returns the previous configuration if it exists.
pub fn previous_configuration(&self) -> Option<StoredConfiguration> {
let next_height = self.next_height();
let idx = self.find_configurations_index_by_height(next_height);
if idx > 0 {
let cfg_ref = self.configs_actual_from()
.get(idx - 1)
.unwrap_or_else(|| panic!("Configuration at index {} not found", idx));
let cfg_hash = cfg_ref.cfg_hash();
let cfg = self.configuration_by_hash(cfg_hash).unwrap_or_else(|| {
panic!("Config with hash {:?} is absent in configs table", cfg_hash)
});
Some(cfg)
} else {
None
}
}
/// Returns the configuration that is actual for the given height.
pub fn configuration_by_height(&self, height: Height) -> StoredConfiguration {
let idx = self.find_configurations_index_by_height(height);
let cfg_ref = self.configs_actual_from()
.get(idx)
.unwrap_or_else(|| panic!("Configuration at index {} not found", idx));
let cfg_hash = cfg_ref.cfg_hash();
self.configuration_by_hash(cfg_hash)
.unwrap_or_else(|| panic!("Config with hash {:?} is absent in configs table", cfg_hash))
}
/// Returns the configuration for the given configuration hash.
pub fn configuration_by_hash(&self, hash: &Hash) -> Option<StoredConfiguration> {
self.configs().get(hash)
}
/// Returns the `state_hash` table for core tables.
pub fn core_state_hash(&self) -> Vec<Hash> {
vec![
self.configs().merkle_root(),
self.transaction_results().merkle_root(),
]
}
/// Constructs a proof of inclusion of a root hash of a specific service
/// table into the block `state_hash`.
///
/// The `service_id` and `table_idx` are automatically combined to form the key of the
/// required service table; this key serves as a search query for the method.
/// The service table key is uniquely identified by a `(u16, u16)` tuple
/// of table coordinates.
///
/// If found, the method returns the root hash as a value of the proof leaf
/// corresponding to the required service table key. Otherwise, a partial
/// path to the service table key is returned, which proves its exclusion.
///
/// The resulting proof can be used as a component of proof of state of an
/// entity stored in the blockchain state at a specific height. The proof is
/// tied to the `state_hash` of the corresponding `Block`. State of some meta tables
/// of core and services isn't tracked.
///
/// # Arguments
///
/// * `service_id` - `service_id` as returned by instance of type of
/// `Service` trait.
/// * `table_idx` - index of the service table in `Vec`, returned by the
/// `state_hash` method of an instance of a type of the `Service` trait.
pub fn get_proof_to_service_table(
&self,
service_id: u16,
table_idx: usize,
) -> MapProof<Hash, Hash> {
let key = Blockchain::service_table_unique_key(service_id, table_idx);
let sum_table = self.state_hash_aggregator();
sum_table.get_proof(key)
}
fn find_configurations_index_by_height(&self, height: Height) -> u64 {
let actual_from = self.configs_actual_from();
for i in (0..actual_from.len()).rev() {
if actual_from.get(i).unwrap().actual_from() <= height {
return i as u64;
}
}
panic!(
"Couldn't not find any config for height {}, \
that means that genesis block was created incorrectly.",
height
)
}
/// Returns the next height of the blockchain.
/// Its value is equal to "height of the latest committed block" + 1.
fn next_height(&self) -> Height {
Height(self.block_hashes_by_height().len())
}
}
impl<'a> Schema<&'a mut Fork> {
/// Mutable reference to the [`transactions`][1] index.
///
/// [1]: struct.Schema.html#method.transactions
pub(crate) fn transactions_mut(&mut self) -> MapIndex<&mut Fork, Hash, Signed<RawTransaction>> {
MapIndex::new(TRANSACTIONS, self.view)
}
/// Mutable reference to the [`transaction_results`][1] index.
///
/// [1]: struct.Schema.html#method.transaction_results
pub(crate) fn transaction_results_mut(
&mut self,
) -> ProofMapIndex<&mut Fork, Hash, TransactionResult> {
ProofMapIndex::new(TRANSACTION_RESULTS, self.view)
}
/// Mutable reference to the [`transactions_pool`][1] index.
///
/// [1]: struct.Schema.html#method.transactions_pool
fn transactions_pool_mut(&mut self) -> KeySetIndex<&mut Fork, Hash> {
KeySetIndex::new(TRANSACTIONS_POOL, self.view)
}
/// Mutable reference to the [`transactions_pool_len_index`][1] index.
///
/// [1]: struct.Schema.html#method.transactions_pool_len_index
pub(crate) fn transactions_pool_len_index_mut(&mut self) -> Entry<&mut Fork, u64> {
Entry::new(TRANSACTIONS_POOL_LEN, self.view)
}
/// Mutable reference to the [`transactions_locations`][1] index.
///
/// [1]: struct.Schema.html#method.transactions_locations
pub(crate) fn transactions_locations_mut(&mut self) -> MapIndex<&mut Fork, Hash, TxLocation> {
MapIndex::new(TRANSACTIONS_LOCATIONS, self.view)
}
/// Mutable reference to the [`blocks][1] index.
///
/// [1]: struct.Schema.html#method.blocks
pub(crate) fn blocks_mut(&mut self) -> MapIndex<&mut Fork, Hash, Block> {
MapIndex::new(BLOCKS, self.view)
}
/// Mutable reference to the [`block_hashes_by_height_mut`][1] index.
///
/// [1]: struct.Schema.html#method.block_hashes_by_height_mut
pub(crate) fn block_hashes_by_height_mut(&mut self) -> ListIndex<&mut Fork, Hash> {
ListIndex::new(BLOCK_HASHES_BY_HEIGHT, self.view)
}
/// Mutable reference to the [`block_transactions`][1] index.
///
/// [1]: struct.Schema.html#method.block_transactions
pub(crate) fn block_transactions_mut(
&mut self,
height: Height,
) -> ProofListIndex<&mut Fork, Hash> {
let height: u64 = height.into();
ProofListIndex::new_in_family(BLOCK_TRANSACTIONS, &height, self.view)
}
/// Mutable reference to the [`precommits`][1] index.
///
/// [1]: struct.Schema.html#method.precommits
pub(crate) fn precommits_mut(
&mut self,
hash: &Hash,
) -> ListIndex<&mut Fork, Signed<Precommit>> {
ListIndex::new_in_family(PRECOMMITS, hash, self.view)
}
/// Mutable reference to the [`configs`][1] index.
///
/// [1]: struct.Schema.html#method.configs
pub(crate) fn configs_mut(&mut self) -> ProofMapIndex<&mut Fork, Hash, StoredConfiguration> {
ProofMapIndex::new(CONFIGS, self.view)
}
/// Mutable reference to the [`configs_actual_from`][1] index.
///
/// [1]: struct.Schema.html#method.configs_actual_from
pub(crate) fn configs_actual_from_mut(&mut self) -> ListIndex<&mut Fork, ConfigReference> {
ListIndex::new(CONFIGS_ACTUAL_FROM, self.view)
}
/// Mutable reference to the [`state_hash_aggregator`][1] index.
///
/// [1]: struct.Schema.html#method.state_hash_aggregator
pub(crate) fn state_hash_aggregator_mut(&mut self) -> ProofMapIndex<&mut Fork, Hash, Hash> {
ProofMapIndex::new(STATE_HASH_AGGREGATOR, self.view)
}
/// Mutable reference to the [`peers_cache`][1] index.
///
/// [1]: struct.Schema.html#method.peers_cache
pub(crate) fn peers_cache_mut(&mut self) -> MapIndex<&mut Fork, PublicKey, Signed<Connect>> {
MapIndex::new(PEERS_CACHE, self.view)
}
/// Mutable reference to the [`consensus_messages_cache`][1] index.
///
/// [1]: struct.Schema.html#method.consensus_messages
pub(crate) fn consensus_messages_cache_mut(&mut self) -> ListIndex<&mut Fork, Message> {
ListIndex::new(CONSENSUS_MESSAGES_CACHE, self.view)
}
/// Saves the given consensus round value into the storage.
pub(crate) fn set_consensus_round(&mut self, round: Round) {
let mut entry: Entry<&mut Fork, _> = Entry::new(CONSENSUS_ROUND, self.view);
entry.set(round);
}
/// Adds a new configuration to the blockchain, which will become actual at
/// the `actual_from` height in `config_data`.
pub fn commit_configuration(&mut self, config_data: StoredConfiguration) {
let actual_from = config_data.actual_from;
if let Some(last_cfg) = self.configs_actual_from().last() {
if last_cfg.cfg_hash() != &config_data.previous_cfg_hash {
// TODO: Replace panic with errors. (ECR-123)
panic!(
"Attempting to commit configuration with incorrect previous hash: {:?}, \
expected: {:?}",
config_data.previous_cfg_hash,
last_cfg.cfg_hash()
);
}
if actual_from <= last_cfg.actual_from() {
panic!(
"Attempting to commit configuration with actual_from {} less than the last \
committed the last committed actual_from {}",
actual_from,
last_cfg.actual_from()
);
}
}
info!(
"Scheduled the following configuration for acceptance: {:?}",
&config_data
);
let cfg_hash = config_data.hash();
self.configs_mut().put(&cfg_hash, config_data);
let cfg_ref = ConfigReference::new(actual_from, &cfg_hash);
self.configs_actual_from_mut().push(cfg_ref);
}
/// Adds transaction into the persistent pool.
/// This method increment `transactions_pool_len_index`,
/// be sure to decrement it when transaction committed.
#[doc(hidden)]
pub fn add_transaction_into_pool(&mut self, tx: Signed<RawTransaction>) {
self.transactions_pool_mut().insert(tx.hash());
let x = self.transactions_pool_len_index().get().unwrap_or(0);
self.transactions_pool_len_index_mut().set(x + 1);
self.transactions_mut().put(&tx.hash(), tx);
}
/// Changes the transaction status from `in_pool`, to `committed`.
pub(crate) fn commit_transaction(&mut self, hash: &Hash) {
self.transactions_pool_mut().remove(hash);
}
/// Removes transaction from the persistent pool.
#[cfg(test)]
pub(crate) fn reject_transaction(&mut self, hash: &Hash) -> Result<(), ()> {
let contains = self.transactions_pool_mut().contains(hash);
self.transactions_pool_mut().remove(hash);
self.transactions_mut().remove(hash);
if contains {
let x = self.transactions_pool_len_index().get().unwrap();
self.transactions_pool_len_index_mut().set(x - 1);
Ok(())
} else {
Err(())
}
}
}
| 39.106115 | 103 | 0.641724 |
2956811c33851b7813c3993156ccc9633cfa478d | 19,501 | // Copyright (C) 2016-2017 Sebastian Dröge <[email protected]>
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::borrow::{Borrow, BorrowMut, ToOwned};
use std::ffi::CStr;
use std::fmt;
use std::marker::PhantomData;
use std::mem;
use std::ops::{Deref, DerefMut};
use std::ptr;
use std::str;
use Fraction;
use ffi;
use glib;
use glib::translate::{
from_glib, from_glib_full, from_glib_none, FromGlibPtrFull, FromGlibPtrNone, GlibPtrDefault,
Stash, StashMut, ToGlib, ToGlibPtr, ToGlibPtrMut,
};
use glib::value::{FromValueOptional, SendValue, ToSendValue};
use glib_ffi::gpointer;
use gobject_ffi;
pub struct Structure(ptr::NonNull<StructureRef>, PhantomData<StructureRef>);
unsafe impl Send for Structure {}
unsafe impl Sync for Structure {}
impl Structure {
pub fn builder(name: &str) -> Builder {
assert_initialized_main_thread!();
Builder::new(name)
}
pub fn new_empty(name: &str) -> Structure {
assert_initialized_main_thread!();
unsafe {
let ptr = ffi::gst_structure_new_empty(name.to_glib_none().0) as *mut StructureRef;
assert!(!ptr.is_null());
Structure(ptr::NonNull::new_unchecked(ptr), PhantomData)
}
}
pub fn new(name: &str, values: &[(&str, &ToSendValue)]) -> Structure {
assert_initialized_main_thread!();
let mut structure = Structure::new_empty(name);
for &(f, v) in values {
structure.set_value(f, v.to_send_value());
}
structure
}
pub fn from_string(s: &str) -> Option<Structure> {
assert_initialized_main_thread!();
unsafe {
let structure = ffi::gst_structure_from_string(s.to_glib_none().0, ptr::null_mut());
if structure.is_null() {
None
} else {
Some(Structure(
ptr::NonNull::new_unchecked(structure as *mut StructureRef),
PhantomData,
))
}
}
}
pub unsafe fn into_ptr(self) -> *mut ffi::GstStructure {
let ptr = self.0.as_ptr() as *mut StructureRef as *mut ffi::GstStructure;
mem::forget(self);
ptr
}
}
impl Deref for Structure {
type Target = StructureRef;
fn deref(&self) -> &StructureRef {
unsafe { self.0.as_ref() }
}
}
impl DerefMut for Structure {
fn deref_mut(&mut self) -> &mut StructureRef {
unsafe { self.0.as_mut() }
}
}
impl AsRef<StructureRef> for Structure {
fn as_ref(&self) -> &StructureRef {
self.deref()
}
}
impl AsMut<StructureRef> for Structure {
fn as_mut(&mut self) -> &mut StructureRef {
self.deref_mut()
}
}
impl Clone for Structure {
fn clone(&self) -> Self {
unsafe {
let ptr = ffi::gst_structure_copy(&self.0.as_ref().0) as *mut StructureRef;
assert!(!ptr.is_null());
Structure(ptr::NonNull::new_unchecked(ptr), PhantomData)
}
}
}
impl Drop for Structure {
fn drop(&mut self) {
unsafe { ffi::gst_structure_free(&mut self.0.as_mut().0) }
}
}
impl fmt::Debug for Structure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Structure").field(&self.to_string()).finish()
}
}
impl fmt::Display for Structure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Need to make sure to not call ToString::to_string() here, which
// we have because of the Display impl. We need StructureRef::to_string()
f.write_str(&StructureRef::to_string(self.as_ref()))
}
}
impl PartialEq for Structure {
fn eq(&self, other: &Structure) -> bool {
self.as_ref().eq(other)
}
}
impl PartialEq<StructureRef> for Structure {
fn eq(&self, other: &StructureRef) -> bool {
self.as_ref().eq(other)
}
}
impl Eq for Structure {}
impl str::FromStr for Structure {
type Err = ();
fn from_str(s: &str) -> Result<Self, ()> {
skip_assert_initialized!();
Structure::from_string(s).ok_or(())
}
}
impl Borrow<StructureRef> for Structure {
fn borrow(&self) -> &StructureRef {
unsafe { self.0.as_ref() }
}
}
impl BorrowMut<StructureRef> for Structure {
fn borrow_mut(&mut self) -> &mut StructureRef {
unsafe { self.0.as_mut() }
}
}
impl ToOwned for StructureRef {
type Owned = Structure;
fn to_owned(&self) -> Structure {
unsafe {
let ptr = ffi::gst_structure_copy(&self.0) as *mut StructureRef;
assert!(!ptr.is_null());
Structure(ptr::NonNull::new_unchecked(ptr), PhantomData)
}
}
}
impl glib::types::StaticType for Structure {
fn static_type() -> glib::types::Type {
unsafe { from_glib(ffi::gst_structure_get_type()) }
}
}
impl<'a> ToGlibPtr<'a, *const ffi::GstStructure> for Structure {
type Storage = &'a Self;
fn to_glib_none(&'a self) -> Stash<'a, *const ffi::GstStructure, Self> {
unsafe { Stash(&self.0.as_ref().0, self) }
}
fn to_glib_full(&self) -> *const ffi::GstStructure {
unsafe { ffi::gst_structure_copy(&self.0.as_ref().0) }
}
}
impl<'a> ToGlibPtr<'a, *mut ffi::GstStructure> for Structure {
type Storage = &'a Self;
fn to_glib_none(&'a self) -> Stash<'a, *mut ffi::GstStructure, Self> {
unsafe { Stash(&self.0.as_ref().0 as *const _ as *mut _, self) }
}
fn to_glib_full(&self) -> *mut ffi::GstStructure {
unsafe { ffi::gst_structure_copy(&self.0.as_ref().0) }
}
}
impl<'a> ToGlibPtrMut<'a, *mut ffi::GstStructure> for Structure {
type Storage = &'a mut Self;
fn to_glib_none_mut(&'a mut self) -> StashMut<*mut ffi::GstStructure, Self> {
unsafe { StashMut(&mut self.0.as_mut().0, self) }
}
}
impl FromGlibPtrNone<*const ffi::GstStructure> for Structure {
unsafe fn from_glib_none(ptr: *const ffi::GstStructure) -> Self {
assert!(!ptr.is_null());
let ptr = ffi::gst_structure_copy(ptr);
assert!(!ptr.is_null());
Structure(
ptr::NonNull::new_unchecked(ptr as *mut StructureRef),
PhantomData,
)
}
}
impl FromGlibPtrNone<*mut ffi::GstStructure> for Structure {
unsafe fn from_glib_none(ptr: *mut ffi::GstStructure) -> Self {
assert!(!ptr.is_null());
let ptr = ffi::gst_structure_copy(ptr);
assert!(!ptr.is_null());
Structure(
ptr::NonNull::new_unchecked(ptr as *mut StructureRef),
PhantomData,
)
}
}
impl FromGlibPtrFull<*const ffi::GstStructure> for Structure {
unsafe fn from_glib_full(ptr: *const ffi::GstStructure) -> Self {
assert!(!ptr.is_null());
Structure(
ptr::NonNull::new_unchecked(ptr as *mut StructureRef),
PhantomData,
)
}
}
impl FromGlibPtrFull<*mut ffi::GstStructure> for Structure {
unsafe fn from_glib_full(ptr: *mut ffi::GstStructure) -> Self {
assert!(!ptr.is_null());
Structure(
ptr::NonNull::new_unchecked(ptr as *mut StructureRef),
PhantomData,
)
}
}
impl<'a> glib::value::FromValueOptional<'a> for Structure {
unsafe fn from_value_optional(v: &'a glib::Value) -> Option<Self> {
let ptr = gobject_ffi::g_value_get_boxed(v.to_glib_none().0);
assert!(!ptr.is_null());
from_glib_none(ptr as *const ffi::GstStructure)
}
}
impl glib::value::SetValue for Structure {
unsafe fn set_value(v: &mut glib::Value, s: &Self) {
gobject_ffi::g_value_set_boxed(v.to_glib_none_mut().0, s.0.as_ptr() as gpointer);
}
}
impl glib::value::SetValueOptional for Structure {
unsafe fn set_value_optional(v: &mut glib::Value, s: Option<&Self>) {
if let Some(s) = s {
gobject_ffi::g_value_set_boxed(v.to_glib_none_mut().0, s.as_ptr() as gpointer);
} else {
gobject_ffi::g_value_set_boxed(v.to_glib_none_mut().0, ptr::null_mut());
}
}
}
impl GlibPtrDefault for Structure {
type GlibType = *mut ffi::GstStructure;
}
#[repr(C)]
pub struct StructureRef(ffi::GstStructure);
unsafe impl Send for StructureRef {}
unsafe impl Sync for StructureRef {}
impl StructureRef {
pub unsafe fn from_glib_borrow<'a>(ptr: *const ffi::GstStructure) -> &'a StructureRef {
assert!(!ptr.is_null());
&*(ptr as *mut StructureRef)
}
pub unsafe fn from_glib_borrow_mut<'a>(ptr: *mut ffi::GstStructure) -> &'a mut StructureRef {
assert!(!ptr.is_null());
&mut *(ptr as *mut StructureRef)
}
pub unsafe fn as_ptr(&self) -> *const ffi::GstStructure {
self as *const Self as *const ffi::GstStructure
}
pub unsafe fn as_mut_ptr(&self) -> *mut ffi::GstStructure {
self as *const Self as *mut ffi::GstStructure
}
pub fn to_string(&self) -> String {
unsafe { from_glib_full(ffi::gst_structure_to_string(&self.0)) }
}
pub fn get<'a, T: FromValueOptional<'a>>(&'a self, name: &str) -> Option<T> {
self.get_value(name).and_then(|v| v.get())
}
pub fn get_value<'a>(&'a self, name: &str) -> Option<&SendValue> {
unsafe {
let value = ffi::gst_structure_get_value(&self.0, name.to_glib_none().0);
if value.is_null() {
return None;
}
Some(&*(value as *const SendValue))
}
}
pub fn set<T: ToSendValue>(&mut self, name: &str, value: &T) {
let value = value.to_send_value();
self.set_value(name, value);
}
pub fn set_value(&mut self, name: &str, mut value: SendValue) {
unsafe {
ffi::gst_structure_take_value(
&mut self.0,
name.to_glib_none().0,
value.to_glib_none_mut().0,
);
mem::forget(value);
}
}
pub fn get_name(&self) -> &str {
unsafe {
CStr::from_ptr(ffi::gst_structure_get_name(&self.0))
.to_str()
.unwrap()
}
}
pub fn set_name(&mut self, name: &str) {
unsafe { ffi::gst_structure_set_name(&mut self.0, name.to_glib_none().0) }
}
pub fn has_field(&self, field: &str) -> bool {
unsafe {
from_glib(ffi::gst_structure_has_field(
&self.0,
field.to_glib_none().0,
))
}
}
pub fn has_field_with_type(&self, field: &str, type_: glib::Type) -> bool {
unsafe {
from_glib(ffi::gst_structure_has_field_typed(
&self.0,
field.to_glib_none().0,
type_.to_glib(),
))
}
}
pub fn remove_field(&mut self, field: &str) {
unsafe {
ffi::gst_structure_remove_field(&mut self.0, field.to_glib_none().0);
}
}
pub fn remove_fields(&mut self, fields: &[&str]) {
for f in fields {
self.remove_field(f)
}
}
pub fn remove_all_fields(&mut self) {
unsafe {
ffi::gst_structure_remove_all_fields(&mut self.0);
}
}
pub fn fields(&self) -> FieldIterator {
FieldIterator::new(self)
}
pub fn iter(&self) -> Iter {
Iter::new(self)
}
pub fn get_nth_field_name(&self, idx: u32) -> Option<&str> {
unsafe {
let field_name = ffi::gst_structure_nth_field_name(&self.0, idx);
if field_name.is_null() {
return None;
}
Some(CStr::from_ptr(field_name).to_str().unwrap())
}
}
pub fn n_fields(&self) -> u32 {
unsafe { ffi::gst_structure_n_fields(&self.0) as u32 }
}
pub fn can_intersect(&self, other: &StructureRef) -> bool {
unsafe { from_glib(ffi::gst_structure_can_intersect(&self.0, &other.0)) }
}
pub fn intersect(&self, other: &StructureRef) -> Option<Structure> {
unsafe { from_glib_full(ffi::gst_structure_intersect(&self.0, &other.0)) }
}
pub fn is_subset(&self, superset: &StructureRef) -> bool {
unsafe { from_glib(ffi::gst_structure_is_subset(&self.0, &superset.0)) }
}
pub fn fixate(&mut self) {
unsafe { ffi::gst_structure_fixate(&mut self.0) }
}
pub fn fixate_field(&mut self, name: &str) -> bool {
unsafe {
from_glib(ffi::gst_structure_fixate_field(
&mut self.0,
name.to_glib_none().0,
))
}
}
pub fn fixate_field_bool(&mut self, name: &str, target: bool) -> bool {
unsafe {
from_glib(ffi::gst_structure_fixate_field_boolean(
&mut self.0,
name.to_glib_none().0,
target.to_glib(),
))
}
}
pub fn fixate_field_str(&mut self, name: &str, target: &str) -> bool {
unsafe {
from_glib(ffi::gst_structure_fixate_field_string(
&mut self.0,
name.to_glib_none().0,
target.to_glib_none().0,
))
}
}
pub fn fixate_field_nearest_double(&mut self, name: &str, target: f64) -> bool {
unsafe {
from_glib(ffi::gst_structure_fixate_field_nearest_double(
&mut self.0,
name.to_glib_none().0,
target,
))
}
}
pub fn fixate_field_nearest_fraction<T: Into<Fraction>>(
&mut self,
name: &str,
target: T,
) -> bool {
skip_assert_initialized!();
let target = target.into();
unsafe {
from_glib(ffi::gst_structure_fixate_field_nearest_fraction(
&mut self.0,
name.to_glib_none().0,
*target.numer(),
*target.denom(),
))
}
}
pub fn fixate_field_nearest_int(&mut self, name: &str, target: i32) -> bool {
unsafe {
from_glib(ffi::gst_structure_fixate_field_nearest_int(
&mut self.0,
name.to_glib_none().0,
target,
))
}
}
}
impl fmt::Display for StructureRef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.to_string())
}
}
impl fmt::Debug for StructureRef {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.to_string())
}
}
impl PartialEq for StructureRef {
fn eq(&self, other: &StructureRef) -> bool {
unsafe { from_glib(ffi::gst_structure_is_equal(&self.0, &other.0)) }
}
}
impl Eq for StructureRef {}
pub struct FieldIterator<'a> {
structure: &'a StructureRef,
idx: u32,
n_fields: u32,
}
impl<'a> FieldIterator<'a> {
fn new(structure: &'a StructureRef) -> FieldIterator<'a> {
skip_assert_initialized!();
let n_fields = structure.n_fields();
FieldIterator {
structure,
idx: 0,
n_fields,
}
}
}
impl<'a> Iterator for FieldIterator<'a> {
type Item = &'a str;
fn next(&mut self) -> Option<&'a str> {
if self.idx >= self.n_fields {
return None;
}
if let Some(field_name) = self.structure.get_nth_field_name(self.idx) {
self.idx += 1;
Some(field_name)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
if self.idx == self.n_fields {
return (0, Some(0));
}
let remaining = (self.n_fields - self.idx) as usize;
(remaining, Some(remaining))
}
}
impl<'a> DoubleEndedIterator for FieldIterator<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
if self.idx == self.n_fields {
return None;
}
self.n_fields -= 1;
if let Some(field_name) = self.structure.get_nth_field_name(self.n_fields) {
Some(field_name)
} else {
None
}
}
}
impl<'a> ExactSizeIterator for FieldIterator<'a> {}
pub struct Iter<'a> {
iter: FieldIterator<'a>,
}
impl<'a> Iter<'a> {
fn new(structure: &'a StructureRef) -> Iter<'a> {
skip_assert_initialized!();
Iter {
iter: FieldIterator::new(structure),
}
}
}
impl<'a> Iterator for Iter<'a> {
type Item = (&'a str, &'a SendValue);
fn next(&mut self) -> Option<(&'a str, &'a SendValue)> {
if let Some(f) = self.iter.next() {
let v = self.iter.structure.get_value(f);
Some((f, v.unwrap()))
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a> DoubleEndedIterator for Iter<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
if let Some(f) = self.iter.next_back() {
let v = self.iter.structure.get_value(f);
Some((f, v.unwrap()))
} else {
None
}
}
}
impl<'a> ExactSizeIterator for Iter<'a> {}
pub struct Builder {
s: Structure,
}
impl Builder {
fn new(name: &str) -> Self {
Builder {
s: Structure::new_empty(name),
}
}
pub fn field<V: ToSendValue>(mut self, name: &str, value: &V) -> Self {
self.s.set(name, value);
self
}
pub fn build(self) -> Structure {
self.s
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn new_set_get() {
::init().unwrap();
let mut s = Structure::new_empty("test");
assert_eq!(s.get_name(), "test");
s.set("f1", &"abc");
s.set("f2", &String::from("bcd"));
s.set("f3", &123i32);
assert_eq!(s.get::<&str>("f1").unwrap(), "abc");
assert_eq!(s.get::<&str>("f2").unwrap(), "bcd");
assert_eq!(s.get::<i32>("f3").unwrap(), 123i32);
assert_eq!(s.fields().collect::<Vec<_>>(), vec!["f1", "f2", "f3"]);
let v = s.iter().map(|(f, v)| (f, v.clone())).collect::<Vec<_>>();
assert_eq!(v.len(), 3);
assert_eq!(v[0].0, "f1");
assert_eq!(v[0].1.get::<&str>().unwrap(), "abc");
assert_eq!(v[1].0, "f2");
assert_eq!(v[1].1.get::<&str>().unwrap(), "bcd");
assert_eq!(v[2].0, "f3");
assert_eq!(v[2].1.get::<i32>().unwrap(), 123i32);
let s2 = Structure::new("test", &[("f1", &"abc"), ("f2", &"bcd"), ("f3", &123i32)]);
assert_eq!(s, s2);
}
#[test]
fn test_builder() {
::init().unwrap();
let s = Structure::builder("test")
.field("f1", &"abc")
.field("f2", &String::from("bcd"))
.field("f3", &123i32)
.build();
assert_eq!(s.get_name(), "test");
assert_eq!(s.get::<&str>("f1").unwrap(), "abc");
assert_eq!(s.get::<&str>("f2").unwrap(), "bcd");
assert_eq!(s.get::<i32>("f3").unwrap(), 123i32);
}
#[test]
fn test_string_conversion() {
let a = "Test, f1=(string)abc, f2=(uint)123;";
let s = Structure::from_string(&a).unwrap();
assert_eq!(s.get::<&str>("f1").unwrap(), "abc");
assert_eq!(s.get::<u32>("f2").unwrap(), 123);
assert_eq!(a, s.to_string());
}
}
| 26.972337 | 97 | 0.559356 |
180720bf936814d027b2d7b2258145131c5d0507 | 3,363 | // Copyright 2020 Alex Dukhno
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use protocol::pgsql_types::PostgreSqlType;
use super::*;
#[rstest::rstest]
fn execute_insert_portal(sql_engine_with_schema: (QueryExecutor, ResultCollector)) {
let (mut engine, collector) = sql_engine_with_schema;
engine
.execute("create table schema_name.table_name (column_1 smallint, column_2 smallint);")
.expect("no system errors");
engine
.parse_prepared_statement(
"statement_name",
"insert into schema_name.table_name values ($1, $2);",
&[PostgreSqlType::SmallInt, PostgreSqlType::SmallInt],
)
.expect("no system errors");
engine
.bind_prepared_statement_to_portal(
"portal_name",
"statement_name",
&[PostgreSqlFormat::Binary, PostgreSqlFormat::Text],
&[Some(vec![0, 1]), Some(b"2".to_vec())],
&[],
)
.expect("no system errors");
engine.execute_portal("portal_name", 0).expect("no system errors");
collector.assert_content(vec![
Ok(QueryEvent::SchemaCreated),
Ok(QueryEvent::QueryComplete),
Ok(QueryEvent::TableCreated),
Ok(QueryEvent::QueryComplete),
Ok(QueryEvent::ParseComplete),
Ok(QueryEvent::BindComplete),
Ok(QueryEvent::RecordsInserted(1)),
]);
}
#[rstest::rstest]
fn execute_update_portal(sql_engine_with_schema: (QueryExecutor, ResultCollector)) {
let (mut engine, collector) = sql_engine_with_schema;
engine
.execute("create table schema_name.table_name (column_1 smallint, column_2 smallint);")
.expect("no system errors");
engine
.execute("insert into schema_name.table_name values (1, 2);")
.expect("no system errors");
engine
.parse_prepared_statement(
"statement_name",
"update schema_name.table_name set column_1 = $1, column_2 = $2;",
&[PostgreSqlType::SmallInt, PostgreSqlType::SmallInt],
)
.expect("no system errors");
engine
.bind_prepared_statement_to_portal(
"portal_name",
"statement_name",
&[PostgreSqlFormat::Binary, PostgreSqlFormat::Text],
&[Some(vec![0, 1]), Some(b"2".to_vec())],
&[],
)
.expect("no system errors");
engine.execute_portal("portal_name", 0).expect("no system errors");
collector.assert_content(vec![
Ok(QueryEvent::SchemaCreated),
Ok(QueryEvent::QueryComplete),
Ok(QueryEvent::TableCreated),
Ok(QueryEvent::QueryComplete),
Ok(QueryEvent::RecordsInserted(1)),
Ok(QueryEvent::QueryComplete),
Ok(QueryEvent::ParseComplete),
Ok(QueryEvent::BindComplete),
Ok(QueryEvent::RecordsUpdated(1)),
]);
}
| 36.16129 | 95 | 0.642878 |
5baf6f971197bf7fe5fa63cb881fa87c13e0e39c | 15,646 | use crate::buffer::PyBuffer;
use crate::builtins::pystr::PyStrRef;
use crate::common::hash::PyHash;
use crate::common::lock::PyRwLock;
use crate::function::{FuncArgs, OptionalArg, PyNativeFunc};
use crate::utils::Either;
use crate::VirtualMachine;
use crate::{
IdProtocol, PyComparisonValue, PyObjectRef, PyRef, PyResult, PyValue, TryFromObject,
TypeProtocol,
};
use crossbeam_utils::atomic::AtomicCell;
use std::cmp::Ordering;
bitflags! {
pub struct PyTpFlags: u64 {
const HEAPTYPE = 1 << 9;
const BASETYPE = 1 << 10;
const METHOD_DESCR = 1 << 17;
const HAS_DICT = 1 << 40;
#[cfg(debug_assertions)]
const _CREATED_WITH_FLAGS = 1 << 63;
}
}
impl PyTpFlags {
// CPython default: Py_TPFLAGS_HAVE_STACKLESS_EXTENSION | Py_TPFLAGS_HAVE_VERSION_TAG
pub const DEFAULT: Self = Self::HEAPTYPE;
pub fn has_feature(self, flag: Self) -> bool {
self.contains(flag)
}
#[cfg(debug_assertions)]
pub fn is_created_with_flags(self) -> bool {
self.contains(Self::_CREATED_WITH_FLAGS)
}
}
impl Default for PyTpFlags {
fn default() -> Self {
Self::DEFAULT
}
}
pub(crate) type GenericMethod = fn(&PyObjectRef, FuncArgs, &VirtualMachine) -> PyResult;
pub(crate) type DelFunc = fn(&PyObjectRef, &VirtualMachine) -> PyResult<()>;
pub(crate) type DescrGetFunc =
fn(PyObjectRef, Option<PyObjectRef>, Option<PyObjectRef>, &VirtualMachine) -> PyResult;
pub(crate) type DescrSetFunc =
fn(PyObjectRef, PyObjectRef, Option<PyObjectRef>, &VirtualMachine) -> PyResult<()>;
pub(crate) type HashFunc = fn(&PyObjectRef, &VirtualMachine) -> PyResult<PyHash>;
pub(crate) type RichCompareFunc = fn(
&PyObjectRef,
&PyObjectRef,
PyComparisonOp,
&VirtualMachine,
) -> PyResult<Either<PyObjectRef, PyComparisonValue>>;
pub(crate) type GetattroFunc = fn(PyObjectRef, PyStrRef, &VirtualMachine) -> PyResult;
pub(crate) type SetattroFunc =
fn(&PyObjectRef, PyStrRef, Option<PyObjectRef>, &VirtualMachine) -> PyResult<()>;
pub(crate) type BufferFunc = fn(&PyObjectRef, &VirtualMachine) -> PyResult<Box<dyn PyBuffer>>;
pub(crate) type IterFunc = fn(PyObjectRef, &VirtualMachine) -> PyResult;
pub(crate) type IterNextFunc = fn(&PyObjectRef, &VirtualMachine) -> PyResult;
#[derive(Default)]
pub struct PyTypeSlots {
pub name: PyRwLock<Option<String>>, // tp_name, not class name
// tp_basicsize, tp_itemsize
// Methods to implement standard operations
// Method suites for standard classes
// tp_as_number
// tp_as_sequence
// tp_as_mapping
// More standard operations (here for binary compatibility)
pub hash: AtomicCell<Option<HashFunc>>,
pub call: AtomicCell<Option<GenericMethod>>,
// tp_str
pub getattro: AtomicCell<Option<GetattroFunc>>,
pub setattro: AtomicCell<Option<SetattroFunc>>,
// Functions to access object as input/output buffer
pub as_buffer: Option<BufferFunc>,
// Assigned meaning in release 2.1
// rich comparisons
pub richcompare: AtomicCell<Option<RichCompareFunc>>,
// Iterators
pub iter: AtomicCell<Option<IterFunc>>,
pub iternext: AtomicCell<Option<IterNextFunc>>,
// Flags to define presence of optional/expanded features
pub flags: PyTpFlags,
// tp_doc
pub doc: Option<&'static str>,
// Strong reference on a heap type, borrowed reference on a static type
// tp_base
// tp_dict
pub descr_get: AtomicCell<Option<DescrGetFunc>>,
pub descr_set: AtomicCell<Option<DescrSetFunc>>,
// tp_dictoffset
// tp_init
// tp_alloc
pub new: Option<PyNativeFunc>,
// tp_free
// tp_is_gc
// tp_bases
// tp_mro
// tp_cache
// tp_subclasses
// tp_weaklist
pub del: AtomicCell<Option<DelFunc>>,
}
impl PyTypeSlots {
pub fn from_flags(flags: PyTpFlags) -> Self {
Self {
flags,
..Default::default()
}
}
}
impl std::fmt::Debug for PyTypeSlots {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str("PyTypeSlots")
}
}
#[pyimpl]
pub trait SlotDestructor: PyValue {
#[pyslot]
fn tp_del(zelf: &PyObjectRef, vm: &VirtualMachine) -> PyResult<()> {
if let Some(zelf) = zelf.downcast_ref() {
Self::del(zelf, vm)
} else {
Err(vm.new_type_error("unexpected payload for __del__".to_owned()))
}
}
#[pymethod(magic)]
fn __del__(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult<()> {
Self::del(&zelf, vm)
}
fn del(zelf: &PyRef<Self>, vm: &VirtualMachine) -> PyResult<()>;
}
#[pyimpl]
pub trait Callable: PyValue {
#[pyslot]
fn tp_call(zelf: &PyObjectRef, args: FuncArgs, vm: &VirtualMachine) -> PyResult {
if let Some(zelf) = zelf.downcast_ref() {
Self::call(zelf, args, vm)
} else {
Err(vm.new_type_error("unexpected payload for __call__".to_owned()))
}
}
#[pymethod]
fn __call__(zelf: PyRef<Self>, args: FuncArgs, vm: &VirtualMachine) -> PyResult {
Self::call(&zelf, args, vm)
}
fn call(zelf: &PyRef<Self>, args: FuncArgs, vm: &VirtualMachine) -> PyResult;
}
#[pyimpl]
pub trait SlotDescriptor: PyValue {
#[pyslot]
fn descr_get(
zelf: PyObjectRef,
obj: Option<PyObjectRef>,
cls: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult;
#[pymethod(magic)]
fn get(
zelf: PyObjectRef,
obj: PyObjectRef,
cls: OptionalArg<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult {
Self::descr_get(zelf, Some(obj), cls.into_option(), vm)
}
fn _zelf(zelf: PyObjectRef, vm: &VirtualMachine) -> PyResult<PyRef<Self>> {
PyRef::<Self>::try_from_object(vm, zelf)
}
fn _unwrap(
zelf: PyObjectRef,
obj: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<(PyRef<Self>, PyObjectRef)> {
let zelf = Self::_zelf(zelf, vm)?;
let obj = vm.unwrap_or_none(obj);
Ok((zelf, obj))
}
fn _check(
zelf: PyObjectRef,
obj: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> Result<(PyRef<Self>, PyObjectRef), PyResult> {
// CPython descr_check
if let Some(obj) = obj {
// if (!PyObject_TypeCheck(obj, descr->d_type)) {
// PyErr_Format(PyExc_TypeError,
// "descriptor '%V' for '%.100s' objects "
// "doesn't apply to a '%.100s' object",
// descr_name((PyDescrObject *)descr), "?",
// descr->d_type->tp_name,
// obj->ob_type->tp_name);
// *pres = NULL;
// return 1;
// } else {
Ok((Self::_zelf(zelf, vm).unwrap(), obj))
// }
} else {
Err(Ok(zelf))
}
}
fn _cls_is<T>(cls: &Option<PyObjectRef>, other: &T) -> bool
where
T: IdProtocol,
{
cls.as_ref().map_or(false, |cls| other.is(cls))
}
}
#[pyimpl]
pub trait Hashable: PyValue {
#[pyslot]
fn tp_hash(zelf: &PyObjectRef, vm: &VirtualMachine) -> PyResult<PyHash> {
if let Some(zelf) = zelf.downcast_ref() {
Self::hash(zelf, vm)
} else {
Err(vm.new_type_error("unexpected payload for __hash__".to_owned()))
}
}
#[pymethod]
fn __hash__(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult<PyHash> {
Self::hash(&zelf, vm)
}
fn hash(zelf: &PyRef<Self>, vm: &VirtualMachine) -> PyResult<PyHash>;
}
pub trait Unhashable: PyValue {}
impl<T> Hashable for T
where
T: Unhashable,
{
fn hash(_zelf: &PyRef<Self>, vm: &VirtualMachine) -> PyResult<PyHash> {
Err(vm.new_type_error(format!("unhashable type: '{}'", _zelf.class().name)))
}
}
#[pyimpl]
pub trait Comparable: PyValue {
#[pyslot]
fn tp_richcompare(
zelf: &PyObjectRef,
other: &PyObjectRef,
op: PyComparisonOp,
vm: &VirtualMachine,
) -> PyResult<Either<PyObjectRef, PyComparisonValue>> {
if let Some(zelf) = zelf.downcast_ref() {
Self::cmp(zelf, other, op, vm).map(Either::B)
} else {
Err(vm.new_type_error(format!("unexpected payload for {}", op.method_name())))
}
}
fn cmp(
zelf: &PyRef<Self>,
other: &PyObjectRef,
op: PyComparisonOp,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue>;
#[pymethod(magic)]
fn eq(
zelf: PyRef<Self>,
other: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue> {
Self::cmp(&zelf, &other, PyComparisonOp::Eq, vm)
}
#[pymethod(magic)]
fn ne(
zelf: PyRef<Self>,
other: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue> {
Self::cmp(&zelf, &other, PyComparisonOp::Ne, vm)
}
#[pymethod(magic)]
fn lt(
zelf: PyRef<Self>,
other: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue> {
Self::cmp(&zelf, &other, PyComparisonOp::Lt, vm)
}
#[pymethod(magic)]
fn le(
zelf: PyRef<Self>,
other: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue> {
Self::cmp(&zelf, &other, PyComparisonOp::Le, vm)
}
#[pymethod(magic)]
fn ge(
zelf: PyRef<Self>,
other: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue> {
Self::cmp(&zelf, &other, PyComparisonOp::Ge, vm)
}
#[pymethod(magic)]
fn gt(
zelf: PyRef<Self>,
other: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyComparisonValue> {
Self::cmp(&zelf, &other, PyComparisonOp::Gt, vm)
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum PyComparisonOp {
// be intentional with bits so that we can do eval_ord with just a bitwise and
// bits: | Equal | Greater | Less |
Lt = 0b001,
Gt = 0b010,
Ne = 0b011,
Eq = 0b100,
Le = 0b101,
Ge = 0b110,
}
use PyComparisonOp::*;
impl PyComparisonOp {
pub fn eq_only(
self,
f: impl FnOnce() -> PyResult<PyComparisonValue>,
) -> PyResult<PyComparisonValue> {
match self {
Self::Eq => f(),
Self::Ne => f().map(|x| x.map(|eq| !eq)),
_ => Ok(PyComparisonValue::NotImplemented),
}
}
pub fn eval_ord(self, ord: Ordering) -> bool {
let bit = match ord {
Ordering::Less => Lt,
Ordering::Equal => Eq,
Ordering::Greater => Gt,
};
self as u8 & bit as u8 != 0
}
pub fn swapped(self) -> Self {
match self {
Lt => Gt,
Le => Ge,
Eq => Eq,
Ne => Ne,
Ge => Le,
Gt => Lt,
}
}
pub fn method_name(self) -> &'static str {
match self {
Lt => "__lt__",
Le => "__le__",
Eq => "__eq__",
Ne => "__ne__",
Ge => "__ge__",
Gt => "__gt__",
}
}
pub fn operator_token(self) -> &'static str {
match self {
Lt => "<",
Le => "<=",
Eq => "==",
Ne => "!=",
Ge => ">=",
Gt => ">",
}
}
/// Returns an appropriate return value for the comparison when a and b are the same object, if an
/// appropriate return value exists.
pub fn identical_optimization(self, a: &impl IdProtocol, b: &impl IdProtocol) -> Option<bool> {
self.map_eq(|| a.is(b))
}
/// Returns `Some(true)` when self is `Eq` and `f()` returns true. Returns `Some(false)` when self
/// is `Ne` and `f()` returns true. Otherwise returns `None`.
pub fn map_eq(self, f: impl FnOnce() -> bool) -> Option<bool> {
match self {
Self::Eq => {
if f() {
Some(true)
} else {
None
}
}
Self::Ne => {
if f() {
Some(false)
} else {
None
}
}
_ => None,
}
}
}
#[pyimpl]
pub trait SlotGetattro: PyValue {
#[pyslot]
fn tp_getattro(obj: PyObjectRef, name: PyStrRef, vm: &VirtualMachine) -> PyResult {
if let Ok(zelf) = obj.downcast::<Self>() {
Self::getattro(zelf, name, vm)
} else {
Err(vm.new_type_error("unexpected payload for __getattribute__".to_owned()))
}
}
// TODO: make zelf: &PyRef<Self>
fn getattro(zelf: PyRef<Self>, name: PyStrRef, vm: &VirtualMachine) -> PyResult;
#[pymethod]
fn __getattribute__(zelf: PyRef<Self>, name: PyStrRef, vm: &VirtualMachine) -> PyResult {
Self::getattro(zelf, name, vm)
}
}
#[pyimpl]
pub trait SlotSetattro: PyValue {
#[pyslot]
fn tp_setattro(
obj: &PyObjectRef,
name: PyStrRef,
value: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<()> {
if let Some(zelf) = obj.downcast_ref::<Self>() {
Self::setattro(zelf, name, value, vm)
} else {
Err(vm.new_type_error("unexpected payload for __setattr__".to_owned()))
}
}
fn setattro(
zelf: &PyRef<Self>,
name: PyStrRef,
value: Option<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<()>;
#[pymethod]
fn __setattr__(
zelf: PyRef<Self>,
name: PyStrRef,
value: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<()> {
Self::setattro(&zelf, name, Some(value), vm)
}
#[pymethod]
fn __delattr__(zelf: PyRef<Self>, name: PyStrRef, vm: &VirtualMachine) -> PyResult<()> {
Self::setattro(&zelf, name, None, vm)
}
}
#[pyimpl]
pub trait AsBuffer: PyValue {
#[pyslot]
fn tp_as_buffer(zelf: &PyObjectRef, vm: &VirtualMachine) -> PyResult<Box<dyn PyBuffer>> {
if let Some(zelf) = zelf.downcast_ref() {
Self::get_buffer(zelf, vm)
} else {
Err(vm.new_type_error("unexpected payload for get_buffer".to_owned()))
}
}
fn get_buffer(zelf: &PyRef<Self>, vm: &VirtualMachine) -> PyResult<Box<dyn PyBuffer>>;
}
#[pyimpl]
pub trait Iterable: PyValue {
#[pyslot]
fn tp_iter(zelf: PyObjectRef, vm: &VirtualMachine) -> PyResult {
if let Ok(zelf) = zelf.downcast() {
Self::iter(zelf, vm)
} else {
Err(vm.new_type_error("unexpected payload for __iter__".to_owned()))
}
}
#[pymethod(magic)]
fn iter(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult;
}
#[pyimpl(with(Iterable))]
pub trait PyIter: PyValue {
#[pyslot]
fn tp_iternext(zelf: &PyObjectRef, vm: &VirtualMachine) -> PyResult {
if let Some(zelf) = zelf.downcast_ref() {
Self::next(zelf, vm)
} else {
Err(vm.new_type_error("unexpected payload for __next__".to_owned()))
}
}
fn next(zelf: &PyRef<Self>, vm: &VirtualMachine) -> PyResult;
#[pymethod]
fn __next__(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult {
Self::next(&zelf, vm)
}
}
impl<T> Iterable for T
where
T: PyIter,
{
fn tp_iter(zelf: PyObjectRef, _vm: &VirtualMachine) -> PyResult {
Ok(zelf)
}
fn iter(zelf: PyRef<Self>, _vm: &VirtualMachine) -> PyResult {
Ok(zelf.into_object())
}
}
| 28.140288 | 102 | 0.568388 |
ffbbf3ff0493482a7deb42ee483d45cc77ccc874 | 2,316 | // Copyright 2020-2022 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use crate::types::metrics::NodeMetrics;
use bee_ledger::workers::event::{MilestoneConfirmed, PrunedIndex, SnapshottedIndex};
use bee_runtime::{node::Node, shutdown_stream::ShutdownStream, worker::Worker};
use async_trait::async_trait;
use futures::StreamExt;
use log::info;
use tokio::time::interval;
use tokio_stream::wrappers::IntervalStream;
use std::{convert::Infallible, time::Duration};
const METRICS_INTERVAL: Duration = Duration::from_secs(60);
pub struct MetricsWorker {}
#[async_trait]
impl<N: Node> Worker<N> for MetricsWorker {
type Config = ();
type Error = Infallible;
async fn start(node: &mut N, _config: Self::Config) -> Result<Self, Self::Error> {
node.register_resource(NodeMetrics::new());
let metrics = node.resource::<NodeMetrics>();
node.bus().add_listener::<Self, MilestoneConfirmed, _>(move |event| {
metrics.referenced_messages_inc(event.referenced_messages as u64);
metrics.excluded_no_transaction_messages_inc(event.excluded_no_transaction_messages.len() as u64);
metrics.excluded_conflicting_messages_inc(event.excluded_conflicting_messages.len() as u64);
metrics.included_messages_inc(event.included_messages.len() as u64);
metrics.created_outputs_inc(event.created_outputs as u64);
metrics.consumed_outputs_inc(event.consumed_outputs as u64);
metrics.receipts_inc(event.receipt as u64);
});
let metrics = node.resource::<NodeMetrics>();
node.bus().add_listener::<Self, SnapshottedIndex, _>(move |_| {
metrics.snapshots_inc(1);
});
let metrics = node.resource::<NodeMetrics>();
node.bus().add_listener::<Self, PrunedIndex, _>(move |_| {
metrics.prunings_inc(1);
});
let metrics = node.resource::<NodeMetrics>();
node.spawn::<Self, _, _>(|shutdown| async move {
info!("Running.");
let mut ticker = ShutdownStream::new(shutdown, IntervalStream::new(interval(METRICS_INTERVAL)));
while ticker.next().await.is_some() {
info!("{:?}", *metrics);
}
info!("Stopped.");
});
Ok(Self {})
}
}
| 34.567164 | 110 | 0.653282 |
bf019d488cb522989824ddcf3d0dafc318154e87 | 16,315 | extern crate serde;
extern crate serde_json;
extern crate toml;
#[macro_use]
extern crate serde_derive;
use serde::{Deserialize, Deserializer};
use std::collections::{BTreeMap, HashSet};
use toml::map::Map;
use toml::Value;
use toml::Value::{Array, Float, Integer, Table};
macro_rules! t {
($e:expr) => {
match $e {
Ok(t) => t,
Err(e) => panic!("{} failed with {}", stringify!($e), e),
}
};
}
macro_rules! equivalent {
($literal:expr, $toml:expr,) => {{
let toml = $toml;
let literal = $literal;
// In/out of Value is equivalent
println!("try_from");
assert_eq!(t!(Value::try_from(literal.clone())), toml);
println!("try_into");
assert_eq!(literal, t!(toml.clone().try_into()));
// Through a string equivalent
println!("to_string(literal)");
assert_eq!(t!(toml::to_string(&literal)), toml.to_string());
println!("to_string(toml)");
assert_eq!(t!(toml::to_string(&toml)), toml.to_string());
println!("literal, from_str(toml)");
assert_eq!(literal, t!(toml::from_str(&toml.to_string())));
println!("toml, from_str(toml)");
assert_eq!(toml, t!(toml::from_str(&toml.to_string())));
}};
}
macro_rules! error {
($ty:ty, $toml:expr, $error:expr) => {{
println!("attempting parsing");
match toml::from_str::<$ty>(&$toml.to_string()) {
Ok(_) => panic!("successful"),
Err(e) => {
assert!(e.to_string().contains($error), "bad error: {}", e);
}
}
println!("attempting toml decoding");
match $toml.try_into::<$ty>() {
Ok(_) => panic!("successful"),
Err(e) => {
assert!(e.to_string().contains($error), "bad error: {}", e);
}
}
}};
}
macro_rules! map( ($($k:ident: $v:expr),*) => ({
let mut _m = Map::new();
$(_m.insert(stringify!($k).to_string(), t!(Value::try_from($v)));)*
_m
}) );
#[test]
fn smoke() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: isize,
}
equivalent!(Foo { a: 2 }, Table(map! { a: Integer(2) }),);
}
#[test]
fn smoke_hyphen() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a_b: isize,
}
equivalent! {
Foo { a_b: 2 },
Table(map! { a_b: Integer(2) }),
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo2 {
#[serde(rename = "a-b")]
a_b: isize,
}
let mut m = Map::new();
m.insert("a-b".to_string(), Integer(2));
equivalent! {
Foo2 { a_b: 2 },
Table(m),
}
}
#[test]
fn nested() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: isize,
b: Bar,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Bar {
a: String,
}
equivalent! {
Foo { a: 2, b: Bar { a: "test".to_string() } },
Table(map! {
a: Integer(2),
b: Table(map! {
a: Value::String("test".to_string())
})
}),
}
}
#[test]
fn application_decode_error() {
#[derive(PartialEq, Debug)]
struct Range10(usize);
impl<'de> Deserialize<'de> for Range10 {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Range10, D::Error> {
let x: usize = Deserialize::deserialize(d)?;
if x > 10 {
Err(serde::de::Error::custom("more than 10"))
} else {
Ok(Range10(x))
}
}
}
let d_good = Integer(5);
let d_bad1 = Value::String("not an isize".to_string());
let d_bad2 = Integer(11);
assert_eq!(Range10(5), d_good.try_into().unwrap());
let err1: Result<Range10, _> = d_bad1.try_into();
assert!(err1.is_err());
let err2: Result<Range10, _> = d_bad2.try_into();
assert!(err2.is_err());
}
#[test]
fn array() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: Vec<isize>,
}
equivalent! {
Foo { a: vec![1, 2, 3, 4] },
Table(map! {
a: Array(vec![
Integer(1),
Integer(2),
Integer(3),
Integer(4)
])
}),
};
}
#[test]
fn inner_structs_with_options() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: Option<Box<Foo>>,
b: Bar,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Bar {
a: String,
b: f64,
}
equivalent! {
Foo {
a: Some(Box::new(Foo {
a: None,
b: Bar { a: "foo".to_string(), b: 4.5 },
})),
b: Bar { a: "bar".to_string(), b: 1.0 },
},
Table(map! {
a: Table(map! {
b: Table(map! {
a: Value::String("foo".to_string()),
b: Float(4.5)
})
}),
b: Table(map! {
a: Value::String("bar".to_string()),
b: Float(1.0)
})
}),
}
}
#[test]
fn hashmap() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
set: HashSet<char>,
map: BTreeMap<String, isize>,
}
equivalent! {
Foo {
map: {
let mut m = BTreeMap::new();
m.insert("foo".to_string(), 10);
m.insert("bar".to_string(), 4);
m
},
set: {
let mut s = HashSet::new();
s.insert('a');
s
},
},
Table(map! {
map: Table(map! {
foo: Integer(10),
bar: Integer(4)
}),
set: Array(vec![Value::String("a".to_string())])
}),
}
}
#[test]
fn table_array() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: Vec<Bar>,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Bar {
a: isize,
}
equivalent! {
Foo { a: vec![Bar { a: 1 }, Bar { a: 2 }] },
Table(map! {
a: Array(vec![
Table(map!{ a: Integer(1) }),
Table(map!{ a: Integer(2) }),
])
}),
}
}
#[test]
fn type_errors() {
#[derive(Deserialize)]
#[allow(dead_code)]
struct Foo {
bar: isize,
}
error! {
Foo,
Table(map! {
bar: Value::String("a".to_string())
}),
"invalid type: string \"a\", expected isize for key `bar`"
}
#[derive(Deserialize)]
#[allow(dead_code)]
struct Bar {
foo: Foo,
}
error! {
Bar,
Table(map! {
foo: Table(map! {
bar: Value::String("a".to_string())
})
}),
"invalid type: string \"a\", expected isize for key `foo.bar`"
}
}
#[test]
fn missing_errors() {
#[derive(Serialize, Deserialize, PartialEq, Debug)]
struct Foo {
bar: isize,
}
error! {
Foo,
Table(map! { }),
"missing field `bar`"
}
}
#[test]
fn parse_enum() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: E,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
#[serde(untagged)]
enum E {
Bar(isize),
Baz(String),
Last(Foo2),
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo2 {
test: String,
}
equivalent! {
Foo { a: E::Bar(10) },
Table(map! { a: Integer(10) }),
}
equivalent! {
Foo { a: E::Baz("foo".to_string()) },
Table(map! { a: Value::String("foo".to_string()) }),
}
equivalent! {
Foo { a: E::Last(Foo2 { test: "test".to_string() }) },
Table(map! { a: Table(map! { test: Value::String("test".to_string()) }) }),
}
}
#[test]
fn parse_enum_string() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: Sort,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
#[serde(rename_all = "lowercase")]
enum Sort {
Asc,
Desc,
}
equivalent! {
Foo { a: Sort::Desc },
Table(map! { a: Value::String("desc".to_string()) }),
}
}
// #[test]
// fn unused_fields() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: isize }
//
// let v = Foo { a: 2 };
// let mut d = Decoder::new(Table(map! {
// a, Integer(2),
// b, Integer(5)
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, Some(Table(map! {
// b, Integer(5)
// })));
// }
//
// #[test]
// fn unused_fields2() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: Bar }
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Bar { a: isize }
//
// let v = Foo { a: Bar { a: 2 } };
// let mut d = Decoder::new(Table(map! {
// a, Table(map! {
// a, Integer(2),
// b, Integer(5)
// })
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, Some(Table(map! {
// a, Table(map! {
// b, Integer(5)
// })
// })));
// }
//
// #[test]
// fn unused_fields3() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: Bar }
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Bar { a: isize }
//
// let v = Foo { a: Bar { a: 2 } };
// let mut d = Decoder::new(Table(map! {
// a, Table(map! {
// a, Integer(2)
// })
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, None);
// }
//
// #[test]
// fn unused_fields4() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: BTreeMap<String, String> }
//
// let v = Foo { a: map! { a, "foo".to_string() } };
// let mut d = Decoder::new(Table(map! {
// a, Table(map! {
// a, Value::String("foo".to_string())
// })
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, None);
// }
//
// #[test]
// fn unused_fields5() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: Vec<String> }
//
// let v = Foo { a: vec!["a".to_string()] };
// let mut d = Decoder::new(Table(map! {
// a, Array(vec![Value::String("a".to_string())])
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, None);
// }
//
// #[test]
// fn unused_fields6() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: Option<Vec<String>> }
//
// let v = Foo { a: Some(vec![]) };
// let mut d = Decoder::new(Table(map! {
// a, Array(vec![])
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, None);
// }
//
// #[test]
// fn unused_fields7() {
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Foo { a: Vec<Bar> }
// #[derive(Serialize, Deserialize, PartialEq, Debug)]
// struct Bar { a: isize }
//
// let v = Foo { a: vec![Bar { a: 1 }] };
// let mut d = Decoder::new(Table(map! {
// a, Array(vec![Table(map! {
// a, Integer(1),
// b, Integer(2)
// })])
// }));
// assert_eq!(v, t!(Deserialize::deserialize(&mut d)));
//
// assert_eq!(d.toml, Some(Table(map! {
// a, Array(vec![Table(map! {
// b, Integer(2)
// })])
// })));
// }
#[test]
fn empty_arrays() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: Vec<Bar>,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Bar;
equivalent! {
Foo { a: vec![] },
Table(map! {a: Array(Vec::new())}),
}
}
#[test]
fn empty_arrays2() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Foo {
a: Option<Vec<Bar>>,
}
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Bar;
equivalent! {
Foo { a: None },
Table(map! {}),
}
equivalent! {
Foo { a: Some(vec![]) },
Table(map! { a: Array(vec![]) }),
}
}
#[test]
fn extra_keys() {
#[derive(Serialize, Deserialize)]
struct Foo {
a: isize,
}
let toml = Table(map! { a: Integer(2), b: Integer(2) });
assert!(toml.clone().try_into::<Foo>().is_ok());
assert!(toml::from_str::<Foo>(&toml.to_string()).is_ok());
}
#[test]
fn newtypes() {
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
struct A {
b: B,
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
struct B(u32);
equivalent! {
A { b: B(2) },
Table(map! { b: Integer(2) }),
}
}
#[test]
fn newtypes2() {
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
struct A {
b: B,
}
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
struct B(Option<C>);
#[derive(Deserialize, Serialize, PartialEq, Debug, Clone)]
struct C {
x: u32,
y: u32,
z: u32,
}
equivalent! {
A { b: B(Some(C { x: 0, y: 1, z: 2 })) },
Table(map! {
b: Table(map! {
x: Integer(0),
y: Integer(1),
z: Integer(2)
})
}),
}
}
#[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
struct CanBeEmpty {
a: Option<String>,
b: Option<String>,
}
#[test]
fn table_structs_empty() {
let text = "[bar]\n\n[baz]\n\n[bazv]\na = \"foo\"\n\n[foo]\n";
let value: BTreeMap<String, CanBeEmpty> = toml::from_str(text).unwrap();
let mut expected: BTreeMap<String, CanBeEmpty> = BTreeMap::new();
expected.insert("bar".to_string(), CanBeEmpty::default());
expected.insert("baz".to_string(), CanBeEmpty::default());
expected.insert(
"bazv".to_string(),
CanBeEmpty {
a: Some("foo".to_string()),
b: None,
},
);
expected.insert("foo".to_string(), CanBeEmpty::default());
assert_eq!(value, expected);
assert_eq!(toml::to_string(&value).unwrap(), text);
}
#[test]
fn fixed_size_array() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Entity {
pos: [i32; 2],
}
equivalent! {
Entity { pos: [1, 2] },
Table(map! {
pos: Array(vec![
Integer(1),
Integer(2),
])
}),
}
}
#[test]
fn homogeneous_tuple() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Collection {
elems: (i64, i64, i64),
}
equivalent! {
Collection { elems: (0, 1, 2) },
Table(map! {
elems: Array(vec![
Integer(0),
Integer(1),
Integer(2),
])
}),
}
}
#[test]
fn homogeneous_tuple_struct() {
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
struct Object(Vec<String>, Vec<String>, Vec<String>);
equivalent! {
map! {
obj: Object(vec!["foo".to_string()], vec![], vec!["bar".to_string(), "baz".to_string()])
},
Table(map! {
obj: Array(vec![
Array(vec![
Value::String("foo".to_string()),
]),
Array(vec![]),
Array(vec![
Value::String("bar".to_string()),
Value::String("baz".to_string()),
]),
])
}),
}
}
#[test]
fn json_interoperability() {
#[derive(Serialize, Deserialize)]
struct Foo {
any: toml::Value,
}
let _foo: Foo = serde_json::from_str(
r#"
{"any":1}
"#,
)
.unwrap();
}
| 23.817518 | 100 | 0.484401 |
38c0722a98736d2fdd544493b58e23771f2c42d8 | 82,873 | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_connector_profile_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateConnectorProfileOutput,
crate::error::CreateConnectorProfileError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateConnectorProfileError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::CreateConnectorProfileError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::CreateConnectorProfileError {
meta: generic,
kind: crate::error::CreateConnectorProfileErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"ConnectorAuthenticationException" => crate::error::CreateConnectorProfileError {
meta: generic,
kind: crate::error::CreateConnectorProfileErrorKind::ConnectorAuthenticationException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::connector_authentication_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_authentication_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InternalServerException" => crate::error::CreateConnectorProfileError {
meta: generic,
kind: crate::error::CreateConnectorProfileErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ServiceQuotaExceededException" => crate::error::CreateConnectorProfileError {
meta: generic,
kind: crate::error::CreateConnectorProfileErrorKind::ServiceQuotaExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::service_quota_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_service_quota_exceeded_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::CreateConnectorProfileError {
meta: generic,
kind: crate::error::CreateConnectorProfileErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateConnectorProfileError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_connector_profile_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::CreateConnectorProfileOutput,
crate::error::CreateConnectorProfileError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_connector_profile_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_connector_profile(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateConnectorProfileError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_flow_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::CreateFlowOutput, crate::error::CreateFlowError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::CreateFlowError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::CreateFlowError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"ConnectorAuthenticationException" => crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::ConnectorAuthenticationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::connector_authentication_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_authentication_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConnectorServerException" => crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::ConnectorServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::connector_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServerException" => crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ServiceQuotaExceededException" => crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::ServiceQuotaExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::service_quota_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_service_quota_exceeded_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::CreateFlowError {
meta: generic,
kind: crate::error::CreateFlowErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::CreateFlowError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_create_flow_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::CreateFlowOutput, crate::error::CreateFlowError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::create_flow_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_create_flow(
response.body().as_ref(),
output,
)
.map_err(crate::error::CreateFlowError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_connector_profile_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteConnectorProfileOutput,
crate::error::DeleteConnectorProfileError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteConnectorProfileError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DeleteConnectorProfileError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::DeleteConnectorProfileError {
meta: generic,
kind: crate::error::DeleteConnectorProfileErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DeleteConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServerException" => crate::error::DeleteConnectorProfileError {
meta: generic,
kind: crate::error::DeleteConnectorProfileErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DeleteConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteConnectorProfileError {
meta: generic,
kind: crate::error::DeleteConnectorProfileErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DeleteConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteConnectorProfileError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_connector_profile_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DeleteConnectorProfileOutput,
crate::error::DeleteConnectorProfileError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_connector_profile_output::Builder::default();
let _ = response;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_flow_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::DeleteFlowOutput, crate::error::DeleteFlowError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DeleteFlowError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::DeleteFlowError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::DeleteFlowError {
meta: generic,
kind: crate::error::DeleteFlowErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DeleteFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServerException" => crate::error::DeleteFlowError {
meta: generic,
kind: crate::error::DeleteFlowErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DeleteFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DeleteFlowError {
meta: generic,
kind: crate::error::DeleteFlowErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DeleteFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DeleteFlowError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_delete_flow_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::DeleteFlowOutput, crate::error::DeleteFlowError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::delete_flow_output::Builder::default();
let _ = response;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_connector_entity_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeConnectorEntityOutput,
crate::error::DescribeConnectorEntityError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DescribeConnectorEntityError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConnectorAuthenticationException" => crate::error::DescribeConnectorEntityError {
meta: generic,
kind: crate::error::DescribeConnectorEntityErrorKind::ConnectorAuthenticationException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::connector_authentication_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_authentication_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"ConnectorServerException" => crate::error::DescribeConnectorEntityError {
meta: generic,
kind: crate::error::DescribeConnectorEntityErrorKind::ConnectorServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::connector_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServerException" => crate::error::DescribeConnectorEntityError {
meta: generic,
kind: crate::error::DescribeConnectorEntityErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DescribeConnectorEntityError {
meta: generic,
kind: crate::error::DescribeConnectorEntityErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::DescribeConnectorEntityError {
meta: generic,
kind: crate::error::DescribeConnectorEntityErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DescribeConnectorEntityError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_connector_entity_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeConnectorEntityOutput,
crate::error::DescribeConnectorEntityError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::describe_connector_entity_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_describe_connector_entity(
response.body().as_ref(),
output,
)
.map_err(crate::error::DescribeConnectorEntityError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_connector_profiles_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeConnectorProfilesOutput,
crate::error::DescribeConnectorProfilesError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DescribeConnectorProfilesError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DescribeConnectorProfilesError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::DescribeConnectorProfilesError {
meta: generic,
kind: crate::error::DescribeConnectorProfilesErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorProfilesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::DescribeConnectorProfilesError {
meta: generic,
kind: crate::error::DescribeConnectorProfilesErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorProfilesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DescribeConnectorProfilesError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_connector_profiles_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeConnectorProfilesOutput,
crate::error::DescribeConnectorProfilesError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::describe_connector_profiles_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_describe_connector_profiles(
response.body().as_ref(),
output,
)
.map_err(crate::error::DescribeConnectorProfilesError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_connectors_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeConnectorsOutput,
crate::error::DescribeConnectorsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DescribeConnectorsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::DescribeConnectorsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::DescribeConnectorsError {
meta: generic,
kind: crate::error::DescribeConnectorsErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::DescribeConnectorsError {
meta: generic,
kind: crate::error::DescribeConnectorsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeConnectorsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DescribeConnectorsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_connectors_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeConnectorsOutput,
crate::error::DescribeConnectorsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::describe_connectors_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_describe_connectors(
response.body().as_ref(),
output,
)
.map_err(crate::error::DescribeConnectorsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_flow_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::DescribeFlowOutput, crate::error::DescribeFlowError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DescribeFlowError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::DescribeFlowError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::DescribeFlowError {
meta: generic,
kind: crate::error::DescribeFlowErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DescribeFlowError {
meta: generic,
kind: crate::error::DescribeFlowErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DescribeFlowError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_flow_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::DescribeFlowOutput, crate::error::DescribeFlowError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::describe_flow_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_describe_flow(
response.body().as_ref(),
output,
)
.map_err(crate::error::DescribeFlowError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_flow_execution_records_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeFlowExecutionRecordsOutput,
crate::error::DescribeFlowExecutionRecordsError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::DescribeFlowExecutionRecordsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::DescribeFlowExecutionRecordsError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::DescribeFlowExecutionRecordsError {
meta: generic,
kind: crate::error::DescribeFlowExecutionRecordsErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeFlowExecutionRecordsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::DescribeFlowExecutionRecordsError {
meta: generic,
kind: crate::error::DescribeFlowExecutionRecordsErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeFlowExecutionRecordsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::DescribeFlowExecutionRecordsError {
meta: generic,
kind: crate::error::DescribeFlowExecutionRecordsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::DescribeFlowExecutionRecordsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::DescribeFlowExecutionRecordsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_describe_flow_execution_records_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::DescribeFlowExecutionRecordsOutput,
crate::error::DescribeFlowExecutionRecordsError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::describe_flow_execution_records_output::Builder::default();
let _ = response;
output =
crate::json_deser::deser_operation_crate_operation_describe_flow_execution_records(
response.body().as_ref(),
output,
)
.map_err(crate::error::DescribeFlowExecutionRecordsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_connector_entities_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListConnectorEntitiesOutput,
crate::error::ListConnectorEntitiesError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListConnectorEntitiesError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConnectorAuthenticationException" => crate::error::ListConnectorEntitiesError {
meta: generic,
kind: crate::error::ListConnectorEntitiesErrorKind::ConnectorAuthenticationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::connector_authentication_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_authentication_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConnectorServerException" => crate::error::ListConnectorEntitiesError {
meta: generic,
kind: crate::error::ListConnectorEntitiesErrorKind::ConnectorServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::connector_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServerException" => crate::error::ListConnectorEntitiesError {
meta: generic,
kind: crate::error::ListConnectorEntitiesErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::ListConnectorEntitiesError {
meta: generic,
kind: crate::error::ListConnectorEntitiesErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListConnectorEntitiesError {
meta: generic,
kind: crate::error::ListConnectorEntitiesErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListConnectorEntitiesError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_connector_entities_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListConnectorEntitiesOutput,
crate::error::ListConnectorEntitiesError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_connector_entities_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_connector_entities(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListConnectorEntitiesError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_flows_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::ListFlowsOutput, crate::error::ListFlowsError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListFlowsError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListFlowsError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::ListFlowsError {
meta: generic,
kind: crate::error::ListFlowsErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListFlowsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListFlowsError {
meta: generic,
kind: crate::error::ListFlowsErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListFlowsError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListFlowsError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_flows_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::ListFlowsOutput, crate::error::ListFlowsError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_flows_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_flows(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListFlowsError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_tags_for_resource_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::ListTagsForResourceError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::ListTagsForResourceError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::ListTagsForResourceError {
meta: generic,
kind: crate::error::ListTagsForResourceErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::ListTagsForResourceError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_list_tags_for_resource_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::list_tags_for_resource_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_list_tags_for_resource(
response.body().as_ref(),
output,
)
.map_err(crate::error::ListTagsForResourceError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_start_flow_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::StartFlowOutput, crate::error::StartFlowError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::StartFlowError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::StartFlowError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::StartFlowError {
meta: generic,
kind: crate::error::StartFlowErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StartFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServerException" => crate::error::StartFlowError {
meta: generic,
kind: crate::error::StartFlowErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StartFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::StartFlowError {
meta: generic,
kind: crate::error::StartFlowErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StartFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ServiceQuotaExceededException" => crate::error::StartFlowError {
meta: generic,
kind: crate::error::StartFlowErrorKind::ServiceQuotaExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::service_quota_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_service_quota_exceeded_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StartFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::StartFlowError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_start_flow_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::StartFlowOutput, crate::error::StartFlowError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::start_flow_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_start_flow(
response.body().as_ref(),
output,
)
.map_err(crate::error::StartFlowError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_stop_flow_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::StopFlowOutput, crate::error::StopFlowError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::StopFlowError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::StopFlowError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::StopFlowError {
meta: generic,
kind: crate::error::StopFlowErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StopFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"InternalServerException" => crate::error::StopFlowError {
meta: generic,
kind: crate::error::StopFlowErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StopFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::StopFlowError {
meta: generic,
kind: crate::error::StopFlowErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StopFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"UnsupportedOperationException" => crate::error::StopFlowError {
meta: generic,
kind: crate::error::StopFlowErrorKind::UnsupportedOperationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::unsupported_operation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_unsupported_operation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::StopFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::StopFlowError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_stop_flow_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::StopFlowOutput, crate::error::StopFlowError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::stop_flow_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_stop_flow(
response.body().as_ref(),
output,
)
.map_err(crate::error::StopFlowError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_tag_resource_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::TagResourceError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::TagResourceError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::TagResourceError {
meta: generic,
kind: crate::error::TagResourceErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::TagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::TagResourceError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_tag_resource_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::TagResourceOutput, crate::error::TagResourceError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::tag_resource_output::Builder::default();
let _ = response;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_untag_resource_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UntagResourceError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UntagResourceError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"InternalServerException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::UntagResourceError {
meta: generic,
kind: crate::error::UntagResourceErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UntagResourceError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UntagResourceError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_untag_resource_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::UntagResourceOutput, crate::error::UntagResourceError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::untag_resource_output::Builder::default();
let _ = response;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_connector_profile_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateConnectorProfileOutput,
crate::error::UpdateConnectorProfileError,
> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => {
return Err(crate::error::UpdateConnectorProfileError::unhandled(
generic,
))
}
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::UpdateConnectorProfileError {
meta: generic,
kind: crate::error::UpdateConnectorProfileErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"ConnectorAuthenticationException" => crate::error::UpdateConnectorProfileError {
meta: generic,
kind: crate::error::UpdateConnectorProfileErrorKind::ConnectorAuthenticationException(
{
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::connector_authentication_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_authentication_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
},
),
},
"InternalServerException" => crate::error::UpdateConnectorProfileError {
meta: generic,
kind: crate::error::UpdateConnectorProfileErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateConnectorProfileError {
meta: generic,
kind: crate::error::UpdateConnectorProfileErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::UpdateConnectorProfileError {
meta: generic,
kind: crate::error::UpdateConnectorProfileErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateConnectorProfileError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_connector_profile_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<
crate::output::UpdateConnectorProfileOutput,
crate::error::UpdateConnectorProfileError,
> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_connector_profile_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_connector_profile(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateConnectorProfileError::unhandled)?;
output.build()
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_flow_error(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::UpdateFlowOutput, crate::error::UpdateFlowError> {
let generic = crate::json_deser::parse_http_generic_error(response)
.map_err(crate::error::UpdateFlowError::unhandled)?;
let error_code = match generic.code() {
Some(code) => code,
None => return Err(crate::error::UpdateFlowError::unhandled(generic)),
};
let _error_message = generic.message().map(|msg| msg.to_owned());
Err(match error_code {
"ConflictException" => {
crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::ConflictException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::conflict_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_conflict_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
}
}
"ConnectorAuthenticationException" => crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::ConnectorAuthenticationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::connector_authentication_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_authentication_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ConnectorServerException" => crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::ConnectorServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::connector_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_connector_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"InternalServerException" => crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::InternalServerException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::internal_server_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_internal_server_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ResourceNotFoundException" => crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::ResourceNotFoundException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::resource_not_found_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_resource_not_found_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ServiceQuotaExceededException" => crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::ServiceQuotaExceededException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output =
crate::error::service_quota_exceeded_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_service_quota_exceeded_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
"ValidationException" => crate::error::UpdateFlowError {
meta: generic,
kind: crate::error::UpdateFlowErrorKind::ValidationException({
#[allow(unused_mut)]
let mut tmp = {
#[allow(unused_mut)]
let mut output = crate::error::validation_exception::Builder::default();
let _ = response;
output = crate::json_deser::deser_structure_crate_error_validation_exceptionjson_err(response.body().as_ref(), output).map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
};
if (&tmp.message).is_none() {
tmp.message = _error_message;
}
tmp
}),
},
_ => crate::error::UpdateFlowError::generic(generic),
})
}
#[allow(clippy::unnecessary_wraps)]
pub fn parse_update_flow_response(
response: &http::Response<bytes::Bytes>,
) -> std::result::Result<crate::output::UpdateFlowOutput, crate::error::UpdateFlowError> {
Ok({
#[allow(unused_mut)]
let mut output = crate::output::update_flow_output::Builder::default();
let _ = response;
output = crate::json_deser::deser_operation_crate_operation_update_flow(
response.body().as_ref(),
output,
)
.map_err(crate::error::UpdateFlowError::unhandled)?;
output.build()
})
}
| 44.531435 | 221 | 0.560557 |
9121bac96207c254b03407c89b4d8128a1b12132 | 808 | // Copyright 2019, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::object::IsA;
use glib::translate::*;
use gtk_sys;
use libc::c_char;
use Actionable;
pub trait ActionableExtManual: 'static {
fn set_action_target(&self, string: &str);
}
impl<O: IsA<Actionable>> ActionableExtManual for O {
fn set_action_target(&self, string: &str) {
let string: Stash<*const c_char, _> = string.to_glib_none();
unsafe {
gtk_sys::gtk_actionable_set_action_target(
self.as_ref().to_glib_none().0,
b"%s\0".as_ptr() as *const c_char,
string.0,
);
}
}
}
| 29.925926 | 95 | 0.639851 |
64429ebdb0e4f2650f11a604375f191633244757 | 1,097 | use std::string::FromUtf8Error;
use tokio::task::JoinError;
#[derive(Debug)]
pub enum DtsupErrorType {
GraphMissingError,
ParseFileError,
UTF8Error,
JoinError,
IOError,
}
#[derive(Debug)]
pub struct Error {
err_type: DtsupErrorType,
reason: Option<String>,
}
impl Error {
pub fn new(err_type: DtsupErrorType) -> Self {
Self {
err_type,
reason: None,
}
}
pub fn new_with_reason(err_type: DtsupErrorType, reason: &str) -> Self {
Self {
err_type,
reason: Some(reason.to_owned()),
}
}
}
impl From<JoinError> for Error {
fn from(_: JoinError) -> Self {
Error::new(DtsupErrorType::JoinError)
}
}
impl From<std::io::Error> for Error {
fn from(err: std::io::Error) -> Self {
Error::new_with_reason(DtsupErrorType::IOError, &err.to_string())
}
}
impl From<FromUtf8Error> for Error {
fn from(err: FromUtf8Error) -> Self {
Error::new_with_reason(DtsupErrorType::UTF8Error, &err.to_string())
}
}
// impl From<anyhow::Error> for Error {
// fn from(err: anyhow::Error) -> Self {
// // Error::new()
// }
// }
| 18.59322 | 74 | 0.639927 |
87d390117d4125d7dfa546797f09914f9eac41dc | 1,347 | #[cfg(test)]
pub mod zero_poly_tests {
use kzg_bench::tests::zero_poly::*;
use mcl_rust::data_types::fr::Fr;
use mcl_rust::fk20_fft::FFTSettings;
use mcl_rust::kzg10::Polynomial;
use mcl_rust::mcl_methods::init;
use mcl_rust::CurveType;
#[test]
fn test_reduce_partials_() {
assert!(init(CurveType::BLS12_381));
test_reduce_partials::<Fr, FFTSettings, Polynomial>();
}
#[test]
fn reduce_partials_random_() {
assert!(init(CurveType::BLS12_381));
reduce_partials_random::<Fr, FFTSettings, Polynomial>();
}
#[test]
fn check_test_data_() {
assert!(init(CurveType::BLS12_381));
check_test_data::<Fr, FFTSettings, Polynomial>();
}
#[test]
fn zero_poly_known_() {
assert!(init(CurveType::BLS12_381));
zero_poly_known::<Fr, FFTSettings, Polynomial>();
}
#[test]
fn zero_poly_random_() {
assert!(init(CurveType::BLS12_381));
zero_poly_random::<Fr, FFTSettings, Polynomial>();
}
#[test]
fn zero_poly_all_but_one_() {
assert!(init(CurveType::BLS12_381));
zero_poly_all_but_one::<Fr, FFTSettings, Polynomial>();
}
#[test]
fn zero_poly_252_() {
assert!(init(CurveType::BLS12_381));
zero_poly_252::<Fr, FFTSettings, Polynomial>();
}
}
| 25.903846 | 64 | 0.616927 |
4860837fce329d5052ded0c9384c9120ebccd493 | 1,065 | /*
* Camunda BPM REST API
*
* OpenApi Spec for Camunda BPM REST API.
*
* The version of the OpenAPI document: 7.14.0
*
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct MissingAuthorizationDto {
/// The permission name that the user is missing.
#[serde(rename = "permissionName", skip_serializing_if = "Option::is_none")]
pub permission_name: Option<String>,
/// The name of the resource that the user is missing permission for.
#[serde(rename = "resourceName", skip_serializing_if = "Option::is_none")]
pub resource_name: Option<String>,
/// The id of the resource that the user is missing permission for.
#[serde(rename = "resourceId", skip_serializing_if = "Option::is_none")]
pub resource_id: Option<String>,
}
impl MissingAuthorizationDto {
pub fn new() -> MissingAuthorizationDto {
MissingAuthorizationDto {
permission_name: None,
resource_name: None,
resource_id: None,
}
}
}
| 28.026316 | 80 | 0.673239 |
6275ab96f80e2f6bf44ef7d7ed97d7b376dc1736 | 782 | use poi::prelude::*;
fn main() {
test_binary(And, |a, b| a && b);
test_binary(Or, |a, b| a || b);
test_binary(Eqb, |a, b| a == b);
test_binary(Xor, |a, b| a ^ b);
test_binary(Nand, |a, b| !(a && b));
test_binary(Nor, |a, b| !(a || b));
test_binary(Exc, |a, b| a && !b);
test_binary(Imply, |a, b| !a || b);
test_binary(Fstb, |a, _| a);
test_binary(Sndb, |_, b| b);
}
pub fn test_binary(sym: Symbol, f: fn(bool, bool) -> bool) {
let ref std = std();
let cases = &[
(false, false),
(false, true),
(true, false),
(true, true)
];
for case in cases {
let r = f(case.0, case.1);
let a = app(sym.clone(), (case.0, case.1)).eval(std).unwrap();
assert_eq!(a, r.into());
}
}
| 26.066667 | 70 | 0.484655 |
7122e933c7793cd78f853c1f22449145add6412f | 1,644 | use fastup::{Document, Node, parse_for_first_node};
use fastup::Node::{Text, Foreground, Background, Bold};
/// Expand widgets to nodes until there are no widgets in the node tree
/// any more.
///
/// It is guranteed that all `Node`s in the returned `Document`
/// contains no `Node::Widget(..)`.
pub fn expand(doc: Document) -> Document {
Document(doc.0.into_iter().map(expand_node).collect())
}
fn expand_node(node: Node) -> Node {
match node {
Text(..) => node,
Foreground(color, doc) => Foreground(color, expand(doc)),
Background(color, doc) => Background(color, expand(doc)),
Bold(doc) => Bold(expand(doc)),
Node::Widget(name, args) => {
let node = make(&name)
.map(|widget| widget.expand(args))
.unwrap_or_else(|| no_widget_error_node(&name));
expand_node(node)
}
}
}
fn no_widget_error_node(widget_name: &str) -> Node {
let err = format!("no such widget: (ccdd44: {})", widget_name);
error_node(&err)
}
/// Create a `Node` as formatted error message `err`.
/// The `err` should be a valid fastup markup text.
pub fn error_node(err: &str) -> Node {
let source = format!("[551100: (ff6666: \
\\ (44ccdd:\\<)ERROR: {}(44ccdd:\\>) )]",
err);
parse_for_first_node(&source).unwrap()
}
factory! {
pub trait Widget {
fn expand(&self, args: Vec<String>) -> Node;
}
/// Create a widget by `name`.
make {
time;
gradient;
cpu;
mem;
net;
}
/// Get a list of widgets names
list;
}
| 27.4 | 71 | 0.565693 |
1c771ea1ba463640b3cdbac78110b92956709440 | 684 | pub mod cranelift;
pub mod singlepass;
pub use wasmer_runtime_core::backend::Compiler;
pub fn compiler_for_backend(backend: &str) -> Option<Box<dyn Compiler>> {
match backend {
#[cfg(any(feature = "cranelift", feature = "default-cranelift"))]
"cranelift" => Some(cranelift::compiler()),
#[cfg(any(feature = "singlepass", feature = "default-singlepass"))]
"singlepass" => Some(singlepass::compiler()),
_ => None,
}
}
#[cfg(feature = "default-cranelift")]
pub use cranelift::{backend, compile, get_gas_left, set_gas_limit};
#[cfg(feature = "default-singlepass")]
pub use singlepass::{backend, compile, get_gas_left, set_gas_limit};
| 29.73913 | 75 | 0.671053 |
c1b18060d76146ec9d4ae509aa278c1438533e04 | 38,376 | // Copyright 2021 The Chromium OS Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause
//! This module writes Flattened Devicetree blobs as defined here:
//! <https://devicetree-specification.readthedocs.io/en/stable/flattened-format.html>
use std::cmp::{Ord, Ordering};
use std::collections::BTreeMap;
use std::convert::TryInto;
use std::ffi::CString;
use std::fmt;
use std::mem::size_of;
use crate::{FDT_BEGIN_NODE, FDT_END, FDT_END_NODE, FDT_MAGIC, FDT_PROP};
#[derive(Debug, PartialEq)]
pub enum Error {
PropertyAfterEndNode,
PropertyValueTooLarge,
TotalSizeTooLarge,
InvalidString,
OutOfOrderEndNode,
UnclosedNode,
InvalidMemoryReservation,
OverlappingMemoryReservations,
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::PropertyAfterEndNode => {
write!(f, "Properties may not be added after a node has been ended")
}
Error::PropertyValueTooLarge => write!(f, "Property value size must fit in 32 bits"),
Error::TotalSizeTooLarge => write!(f, "Total size must fit in 32 bits"),
Error::InvalidString => write!(f, "Strings cannot contain NUL"),
Error::OutOfOrderEndNode => {
write!(f, "Attempted to end a node that was not the most recent")
}
Error::UnclosedNode => write!(f, "Attempted to call finish without ending all nodes"),
Error::InvalidMemoryReservation => write!(f, "Memory reservation is invalid"),
Error::OverlappingMemoryReservations => {
write!(f, "Memory reservations are overlapping")
}
}
}
}
/// Result of a FDT writer operation.
pub type Result<T> = std::result::Result<T, Error>;
const FDT_HEADER_SIZE: usize = 40;
const FDT_VERSION: u32 = 17;
const FDT_LAST_COMP_VERSION: u32 = 16;
/// Interface for writing a Flattened Devicetree (FDT) and emitting a Devicetree Blob (DTB).
///
/// # Example
///
/// ```rust
/// use vm_fdt::FdtWriter;
///
/// # fn fdt_example() -> vm_fdt::FdtWriterResult<()> {
/// let mut fdt = FdtWriter::new()?;
/// let root_node = fdt.begin_node("")?;
/// fdt.property_string("compatible", "linux,dummy-virt")?;
/// fdt.property_u32("#address-cells", 0x2)?;
/// fdt.property_u32("#size-cells", 0x2)?;
/// let chosen_node = fdt.begin_node("chosen")?;
/// fdt.property_u32("linux,pci-probe-only", 1)?;
/// fdt.property_string("bootargs", "panic=-1 console=hvc0 root=/dev/vda")?;
/// fdt.end_node(chosen_node)?;
/// fdt.end_node(root_node)?;
/// let dtb = fdt.finish()?;
/// # Ok(())
/// # }
/// # let _ = fdt_example().unwrap();
/// ```
#[derive(Debug)]
pub struct FdtWriter {
data: Vec<u8>,
off_mem_rsvmap: u32,
off_dt_struct: u32,
strings: Vec<u8>,
string_offsets: BTreeMap<CString, u32>,
node_depth: usize,
node_ended: bool,
boot_cpuid_phys: u32,
}
/// Reserved physical memory region.
///
/// This represents an area of physical memory reserved by the firmware and unusable by the OS.
/// For example, this could be used to preserve bootloader code or data used at runtime.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct FdtReserveEntry {
address: u64,
size: u64,
}
impl FdtReserveEntry {
/// Create a memory reservation for the FDT.
///
/// # Arguments
///
/// * address: Physical address of the beginning of the reserved region.
/// * size: Size of the reserved region in bytes.
pub fn new(address: u64, size: u64) -> Result<Self> {
if address.checked_add(size).is_none() || size == 0 {
return Err(Error::InvalidMemoryReservation);
}
Ok(FdtReserveEntry { address, size })
}
}
impl Ord for FdtReserveEntry {
fn cmp(&self, other: &Self) -> Ordering {
self.address.cmp(&other.address)
}
}
impl PartialOrd for FdtReserveEntry {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.address.partial_cmp(&other.address)
}
}
// Returns true if there are any overlapping memory reservations.
fn check_overlapping(mem_reservations: &[FdtReserveEntry]) -> Result<()> {
let mut mem_rsvmap_copy = mem_reservations.to_vec();
mem_rsvmap_copy.sort();
let overlapping = mem_rsvmap_copy.windows(2).any(|w| {
// The following add cannot overflow because we can only have
// valid FdtReserveEntry (as per the constructor of the type).
w[0].address + w[0].size > w[1].address
});
if overlapping {
return Err(Error::OverlappingMemoryReservations);
}
Ok(())
}
/// Handle to an open node created by `FdtWriter::begin_node`.
///
/// This must be passed back to `FdtWriter::end_node` to close the nodes.
/// Nodes must be closed in reverse order as they were opened, matching the nesting structure
/// of the devicetree.
#[derive(Debug)]
pub struct FdtWriterNode {
depth: usize,
}
impl FdtWriter {
/// Create a new Flattened Devicetree writer instance.
pub fn new() -> Result<Self> {
FdtWriter::new_with_mem_reserv(&[])
}
/// Create a new Flattened Devicetree writer instance.
///
/// # Arguments
///
/// `mem_reservations` - reserved physical memory regions to list in the FDT header.
pub fn new_with_mem_reserv(mem_reservations: &[FdtReserveEntry]) -> Result<Self> {
let data = vec![0u8; FDT_HEADER_SIZE]; // Reserve space for header.
let mut fdt = FdtWriter {
data,
off_mem_rsvmap: 0,
off_dt_struct: 0,
strings: Vec::new(),
string_offsets: BTreeMap::new(),
node_depth: 0,
node_ended: false,
boot_cpuid_phys: 0,
};
fdt.align(8);
// This conversion cannot fail since the size of the header is fixed.
fdt.off_mem_rsvmap = fdt.data.len() as u32;
check_overlapping(&mem_reservations)?;
fdt.write_mem_rsvmap(mem_reservations);
fdt.align(4);
fdt.off_dt_struct = fdt
.data
.len()
.try_into()
.map_err(|_| Error::TotalSizeTooLarge)?;
Ok(fdt)
}
fn write_mem_rsvmap(&mut self, mem_reservations: &[FdtReserveEntry]) {
for rsv in mem_reservations {
self.append_u64(rsv.address);
self.append_u64(rsv.size);
}
self.append_u64(0);
self.append_u64(0);
}
/// Set the `boot_cpuid_phys` field of the devicetree header.
pub fn set_boot_cpuid_phys(&mut self, boot_cpuid_phys: u32) {
self.boot_cpuid_phys = boot_cpuid_phys;
}
// Append `num_bytes` padding bytes (0x00).
fn pad(&mut self, num_bytes: usize) {
self.data.extend(std::iter::repeat(0).take(num_bytes));
}
// Append padding bytes (0x00) until the length of data is a multiple of `alignment`.
fn align(&mut self, alignment: usize) {
let offset = self.data.len() % alignment;
if offset != 0 {
self.pad(alignment - offset);
}
}
// Rewrite the value of a big-endian u32 within data.
fn update_u32(&mut self, offset: usize, val: u32) {
let data_slice = &mut self.data[offset..offset + 4];
data_slice.copy_from_slice(&val.to_be_bytes());
}
fn append_u32(&mut self, val: u32) {
self.data.extend_from_slice(&val.to_be_bytes());
}
fn append_u64(&mut self, val: u64) {
self.data.extend_from_slice(&val.to_be_bytes());
}
/// Open a new FDT node.
///
/// The node must be closed using `end_node`.
///
/// # Arguments
///
/// `name` - name of the node; must not contain any NUL bytes.
pub fn begin_node(&mut self, name: &str) -> Result<FdtWriterNode> {
let name_cstr = CString::new(name).map_err(|_| Error::InvalidString)?;
self.append_u32(FDT_BEGIN_NODE);
self.data.extend(name_cstr.to_bytes_with_nul());
self.align(4);
self.node_depth += 1;
self.node_ended = false;
Ok(FdtWriterNode {
depth: self.node_depth,
})
}
/// Close a node previously opened with `begin_node`.
pub fn end_node(&mut self, node: FdtWriterNode) -> Result<()> {
if node.depth != self.node_depth {
return Err(Error::OutOfOrderEndNode);
}
self.append_u32(FDT_END_NODE);
self.node_depth -= 1;
self.node_ended = true;
Ok(())
}
// Find an existing instance of a string `s`, or add it to the strings block.
// Returns the offset into the strings block.
fn intern_string(&mut self, s: CString) -> Result<u32> {
if let Some(off) = self.string_offsets.get(&s) {
Ok(*off)
} else {
let off = self
.strings
.len()
.try_into()
.map_err(|_| Error::TotalSizeTooLarge)?;
self.strings.extend_from_slice(s.to_bytes_with_nul());
self.string_offsets.insert(s, off);
Ok(off)
}
}
/// Write a property.
///
/// # Arguments
///
/// `name` - name of the property; must not contain any NUL bytes.
/// `val` - value of the property (raw byte array).
pub fn property(&mut self, name: &str, val: &[u8]) -> Result<()> {
if self.node_ended {
return Err(Error::PropertyAfterEndNode);
}
let name_cstr = CString::new(name).map_err(|_| Error::InvalidString)?;
let len = val
.len()
.try_into()
.map_err(|_| Error::PropertyValueTooLarge)?;
let nameoff = self.intern_string(name_cstr)?;
self.append_u32(FDT_PROP);
self.append_u32(len);
self.append_u32(nameoff);
self.data.extend_from_slice(val);
self.align(4);
Ok(())
}
/// Write an empty property.
pub fn property_null(&mut self, name: &str) -> Result<()> {
self.property(name, &[])
}
/// Write a string property.
pub fn property_string(&mut self, name: &str, val: &str) -> Result<()> {
let cstr_value = CString::new(val).map_err(|_| Error::InvalidString)?;
self.property(name, cstr_value.to_bytes_with_nul())
}
/// Write a stringlist property.
pub fn property_string_list(&mut self, name: &str, values: Vec<String>) -> Result<()> {
let mut bytes = Vec::new();
for s in values {
let cstr = CString::new(s).map_err(|_| Error::InvalidString)?;
bytes.extend_from_slice(&cstr.to_bytes_with_nul());
}
self.property(name, &bytes)
}
/// Write a 32-bit unsigned integer property.
pub fn property_u32(&mut self, name: &str, val: u32) -> Result<()> {
self.property(name, &val.to_be_bytes())
}
/// Write a 64-bit unsigned integer property.
pub fn property_u64(&mut self, name: &str, val: u64) -> Result<()> {
self.property(name, &val.to_be_bytes())
}
/// Write a property containing an array of 32-bit unsigned integers.
pub fn property_array_u32(&mut self, name: &str, cells: &[u32]) -> Result<()> {
let mut arr = Vec::with_capacity(cells.len() * size_of::<u32>());
for &c in cells {
arr.extend(&c.to_be_bytes());
}
self.property(name, &arr)
}
/// Write a property containing an array of 64-bit unsigned integers.
pub fn property_array_u64(&mut self, name: &str, cells: &[u64]) -> Result<()> {
let mut arr = Vec::with_capacity(cells.len() * size_of::<u64>());
for &c in cells {
arr.extend(&c.to_be_bytes());
}
self.property(name, &arr)
}
/// Finish writing the Devicetree Blob (DTB).
///
/// Returns the DTB as a vector of bytes, consuming the `FdtWriter`.
pub fn finish(mut self) -> Result<Vec<u8>> {
if self.node_depth > 0 {
return Err(Error::UnclosedNode);
}
self.append_u32(FDT_END);
let size_dt_plus_header: u32 = self
.data
.len()
.try_into()
.map_err(|_| Error::TotalSizeTooLarge)?;
let size_dt_struct = size_dt_plus_header - self.off_dt_struct;
let totalsize = self.data.len() + self.strings.len();
let totalsize = totalsize.try_into().map_err(|_| Error::TotalSizeTooLarge)?;
let off_dt_strings = self
.data
.len()
.try_into()
.map_err(|_| Error::TotalSizeTooLarge)?;
let size_dt_strings = self
.strings
.len()
.try_into()
.map_err(|_| Error::TotalSizeTooLarge)?;
// Finalize the header.
self.update_u32(0, FDT_MAGIC);
self.update_u32(4, totalsize);
self.update_u32(2 * 4, self.off_dt_struct);
self.update_u32(3 * 4, off_dt_strings);
self.update_u32(4 * 4, self.off_mem_rsvmap);
self.update_u32(5 * 4, FDT_VERSION);
self.update_u32(6 * 4, FDT_LAST_COMP_VERSION);
self.update_u32(7 * 4, self.boot_cpuid_phys);
self.update_u32(8 * 4, size_dt_strings);
self.update_u32(9 * 4, size_dt_struct);
// Add the strings block.
self.data.append(&mut self.strings);
Ok(self.data)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn minimal() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x48, // 0004: totalsize (0x48)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0x48, // 000C: off_dt_strings (0x48)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x00, // 0020: size_dt_strings (0)
0x00, 0x00, 0x00, 0x10, // 0024: size_dt_struct (0x10)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x02, // 0040: FDT_END_NODE
0x00, 0x00, 0x00, 0x09, // 0044: FDT_END
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn reservemap() {
let mut fdt = FdtWriter::new_with_mem_reserv(&[
FdtReserveEntry::new(0x12345678AABBCCDD, 0x1234).unwrap(),
FdtReserveEntry::new(0x1020304050607080, 0x5678).unwrap(),
])
.unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x68, // 0004: totalsize (0x68)
0x00, 0x00, 0x00, 0x58, // 0008: off_dt_struct (0x58)
0x00, 0x00, 0x00, 0x68, // 000C: off_dt_strings (0x68)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x00, // 0020: size_dt_strings (0)
0x00, 0x00, 0x00, 0x10, // 0024: size_dt_struct (0x10)
0x12, 0x34, 0x56, 0x78, // 0028: rsvmap entry 0 address high
0xAA, 0xBB, 0xCC, 0xDD, // 002C: rsvmap entry 0 address low
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap entry 0 size high
0x00, 0x00, 0x12, 0x34, // 0034: rsvmap entry 0 size low
0x10, 0x20, 0x30, 0x40, // 0038: rsvmap entry 1 address high
0x50, 0x60, 0x70, 0x80, // 003C: rsvmap entry 1 address low
0x00, 0x00, 0x00, 0x00, // 0040: rsvmap entry 1 size high
0x00, 0x00, 0x56, 0x78, // 0044: rsvmap entry 1 size low
0x00, 0x00, 0x00, 0x00, // 0048: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 004C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0050: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0054: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0058: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 005C: node name ("") + padding
0x00, 0x00, 0x00, 0x02, // 0060: FDT_END_NODE
0x00, 0x00, 0x00, 0x09, // 0064: FDT_END
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn prop_null() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.property_null("null").unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x59, // 0004: totalsize (0x59)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0x54, // 000C: off_dt_strings (0x54)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x05, // 0020: size_dt_strings (0x05)
0x00, 0x00, 0x00, 0x1c, // 0024: size_dt_struct (0x1C)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x03, // 0040: FDT_PROP
0x00, 0x00, 0x00, 0x00, // 0044: prop len (0)
0x00, 0x00, 0x00, 0x00, // 0048: prop nameoff (0)
0x00, 0x00, 0x00, 0x02, // 004C: FDT_END_NODE
0x00, 0x00, 0x00, 0x09, // 0050: FDT_END
b'n', b'u', b'l', b'l', 0x00, // 0054: strings block
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn prop_u32() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.property_u32("u32", 0x12345678).unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x5c, // 0004: totalsize (0x5C)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0x58, // 000C: off_dt_strings (0x58)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x04, // 0020: size_dt_strings (0x04)
0x00, 0x00, 0x00, 0x20, // 0024: size_dt_struct (0x20)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x03, // 0040: FDT_PROP
0x00, 0x00, 0x00, 0x04, // 0044: prop len (4)
0x00, 0x00, 0x00, 0x00, // 0048: prop nameoff (0)
0x12, 0x34, 0x56, 0x78, // 004C: prop u32 value (0x12345678)
0x00, 0x00, 0x00, 0x02, // 0050: FDT_END_NODE
0x00, 0x00, 0x00, 0x09, // 0054: FDT_END
b'u', b'3', b'2', 0x00, // 0058: strings block
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn all_props() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.property_null("null").unwrap();
fdt.property_u32("u32", 0x12345678).unwrap();
fdt.property_u64("u64", 0x1234567887654321).unwrap();
fdt.property_string("str", "hello").unwrap();
fdt.property_string_list("strlst", vec!["hi".into(), "bye".into()])
.unwrap();
fdt.property_array_u32("arru32", &[0x12345678, 0xAABBCCDD])
.unwrap();
fdt.property_array_u64("arru64", &[0x1234567887654321])
.unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0xee, // 0004: totalsize (0xEE)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0xc8, // 000C: off_dt_strings (0xC8)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x26, // 0020: size_dt_strings (0x26)
0x00, 0x00, 0x00, 0x90, // 0024: size_dt_struct (0x90)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x03, // 0040: FDT_PROP (null)
0x00, 0x00, 0x00, 0x00, // 0044: prop len (0)
0x00, 0x00, 0x00, 0x00, // 0048: prop nameoff (0)
0x00, 0x00, 0x00, 0x03, // 004C: FDT_PROP (u32)
0x00, 0x00, 0x00, 0x04, // 0050: prop len (4)
0x00, 0x00, 0x00, 0x05, // 0054: prop nameoff (0x05)
0x12, 0x34, 0x56, 0x78, // 0058: prop u32 value (0x12345678)
0x00, 0x00, 0x00, 0x03, // 005C: FDT_PROP (u64)
0x00, 0x00, 0x00, 0x08, // 0060: prop len (8)
0x00, 0x00, 0x00, 0x09, // 0064: prop nameoff (0x09)
0x12, 0x34, 0x56, 0x78, // 0068: prop u64 value high (0x12345678)
0x87, 0x65, 0x43, 0x21, // 006C: prop u64 value low (0x87654321)
0x00, 0x00, 0x00, 0x03, // 0070: FDT_PROP (string)
0x00, 0x00, 0x00, 0x06, // 0074: prop len (6)
0x00, 0x00, 0x00, 0x0D, // 0078: prop nameoff (0x0D)
b'h', b'e', b'l', b'l', // 007C: prop str value ("hello") + padding
b'o', 0x00, 0x00, 0x00, // 0080: "o\0" + padding
0x00, 0x00, 0x00, 0x03, // 0084: FDT_PROP (string list)
0x00, 0x00, 0x00, 0x07, // 0088: prop len (7)
0x00, 0x00, 0x00, 0x11, // 008C: prop nameoff (0x11)
b'h', b'i', 0x00, b'b', // 0090: prop value ("hi", "bye")
b'y', b'e', 0x00, 0x00, // 0094: "ye\0" + padding
0x00, 0x00, 0x00, 0x03, // 0098: FDT_PROP (u32 array)
0x00, 0x00, 0x00, 0x08, // 009C: prop len (8)
0x00, 0x00, 0x00, 0x18, // 00A0: prop nameoff (0x18)
0x12, 0x34, 0x56, 0x78, // 00A4: prop value 0
0xAA, 0xBB, 0xCC, 0xDD, // 00A8: prop value 1
0x00, 0x00, 0x00, 0x03, // 00AC: FDT_PROP (u64 array)
0x00, 0x00, 0x00, 0x08, // 00B0: prop len (8)
0x00, 0x00, 0x00, 0x1f, // 00B4: prop nameoff (0x1F)
0x12, 0x34, 0x56, 0x78, // 00B8: prop u64 value 0 high
0x87, 0x65, 0x43, 0x21, // 00BC: prop u64 value 0 low
0x00, 0x00, 0x00, 0x02, // 00C0: FDT_END_NODE
0x00, 0x00, 0x00, 0x09, // 00C4: FDT_END
b'n', b'u', b'l', b'l', 0x00, // 00C8: strings + 0x00: "null""
b'u', b'3', b'2', 0x00, // 00CD: strings + 0x05: "u32"
b'u', b'6', b'4', 0x00, // 00D1: strings + 0x09: "u64"
b's', b't', b'r', 0x00, // 00D5: strings + 0x0D: "str"
b's', b't', b'r', b'l', b's', b't', 0x00, // 00D9: strings + 0x11: "strlst"
b'a', b'r', b'r', b'u', b'3', b'2', 0x00, // 00E0: strings + 0x18: "arru32"
b'a', b'r', b'r', b'u', b'6', b'4', 0x00, // 00E7: strings + 0x1F: "arru64"
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn nested_nodes() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.property_u32("abc", 0x13579024).unwrap();
let nested_node = fdt.begin_node("nested").unwrap();
fdt.property_u32("def", 0x12121212).unwrap();
fdt.end_node(nested_node).unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x80, // 0004: totalsize (0x80)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0x78, // 000C: off_dt_strings (0x78)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x08, // 0020: size_dt_strings (0x08)
0x00, 0x00, 0x00, 0x40, // 0024: size_dt_struct (0x40)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x03, // 0040: FDT_PROP
0x00, 0x00, 0x00, 0x04, // 0044: prop len (4)
0x00, 0x00, 0x00, 0x00, // 0048: prop nameoff (0x00)
0x13, 0x57, 0x90, 0x24, // 004C: prop u32 value (0x13579024)
0x00, 0x00, 0x00, 0x01, // 0050: FDT_BEGIN_NODE
b'n', b'e', b's', b't', // 0054: Node name ("nested")
b'e', b'd', 0x00, 0x00, // 0058: "ed\0" + pad
0x00, 0x00, 0x00, 0x03, // 005C: FDT_PROP
0x00, 0x00, 0x00, 0x04, // 0060: prop len (4)
0x00, 0x00, 0x00, 0x04, // 0064: prop nameoff (0x04)
0x12, 0x12, 0x12, 0x12, // 0068: prop u32 value (0x12121212)
0x00, 0x00, 0x00, 0x02, // 006C: FDT_END_NODE ("nested")
0x00, 0x00, 0x00, 0x02, // 0070: FDT_END_NODE ("")
0x00, 0x00, 0x00, 0x09, // 0074: FDT_END
b'a', b'b', b'c', 0x00, // 0078: strings + 0x00: "abc"
b'd', b'e', b'f', 0x00, // 007C: strings + 0x04: "def"
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn prop_name_string_reuse() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.property_u32("abc", 0x13579024).unwrap();
let nested_node = fdt.begin_node("nested").unwrap();
fdt.property_u32("def", 0x12121212).unwrap();
fdt.property_u32("abc", 0x12121212).unwrap(); // This should reuse the "abc" string.
fdt.end_node(nested_node).unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x90, // 0004: totalsize (0x90)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0x88, // 000C: off_dt_strings (0x88)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x00, 0x00, 0x00, 0x00, // 001C: boot_cpuid_phys (0)
0x00, 0x00, 0x00, 0x08, // 0020: size_dt_strings (0x08)
0x00, 0x00, 0x00, 0x50, // 0024: size_dt_struct (0x50)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x03, // 0040: FDT_PROP
0x00, 0x00, 0x00, 0x04, // 0044: prop len (4)
0x00, 0x00, 0x00, 0x00, // 0048: prop nameoff (0x00)
0x13, 0x57, 0x90, 0x24, // 004C: prop u32 value (0x13579024)
0x00, 0x00, 0x00, 0x01, // 0050: FDT_BEGIN_NODE
b'n', b'e', b's', b't', // 0054: Node name ("nested")
b'e', b'd', 0x00, 0x00, // 0058: "ed\0" + pad
0x00, 0x00, 0x00, 0x03, // 005C: FDT_PROP
0x00, 0x00, 0x00, 0x04, // 0060: prop len (4)
0x00, 0x00, 0x00, 0x04, // 0064: prop nameoff (0x04)
0x12, 0x12, 0x12, 0x12, // 0068: prop u32 value (0x12121212)
0x00, 0x00, 0x00, 0x03, // 006C: FDT_PROP
0x00, 0x00, 0x00, 0x04, // 0070: prop len (4)
0x00, 0x00, 0x00, 0x00, // 0074: prop nameoff (0x00 - reuse)
0x12, 0x12, 0x12, 0x12, // 0078: prop u32 value (0x12121212)
0x00, 0x00, 0x00, 0x02, // 007C: FDT_END_NODE ("nested")
0x00, 0x00, 0x00, 0x02, // 0080: FDT_END_NODE ("")
0x00, 0x00, 0x00, 0x09, // 0084: FDT_END
b'a', b'b', b'c', 0x00, // 0088: strings + 0x00: "abc"
b'd', b'e', b'f', 0x00, // 008C: strings + 0x04: "def"
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn boot_cpuid() {
let mut fdt = FdtWriter::new().unwrap();
fdt.set_boot_cpuid_phys(0x12345678);
let root_node = fdt.begin_node("").unwrap();
fdt.end_node(root_node).unwrap();
let actual_fdt = fdt.finish().unwrap();
let expected_fdt = vec![
0xd0, 0x0d, 0xfe, 0xed, // 0000: magic (0xd00dfeed)
0x00, 0x00, 0x00, 0x48, // 0004: totalsize (0x48)
0x00, 0x00, 0x00, 0x38, // 0008: off_dt_struct (0x38)
0x00, 0x00, 0x00, 0x48, // 000C: off_dt_strings (0x48)
0x00, 0x00, 0x00, 0x28, // 0010: off_mem_rsvmap (0x28)
0x00, 0x00, 0x00, 0x11, // 0014: version (0x11 = 17)
0x00, 0x00, 0x00, 0x10, // 0018: last_comp_version (0x10 = 16)
0x12, 0x34, 0x56, 0x78, // 001C: boot_cpuid_phys (0x12345678)
0x00, 0x00, 0x00, 0x00, // 0020: size_dt_strings (0)
0x00, 0x00, 0x00, 0x10, // 0024: size_dt_struct (0x10)
0x00, 0x00, 0x00, 0x00, // 0028: rsvmap terminator (address = 0 high)
0x00, 0x00, 0x00, 0x00, // 002C: rsvmap terminator (address = 0 low)
0x00, 0x00, 0x00, 0x00, // 0030: rsvmap terminator (size = 0 high)
0x00, 0x00, 0x00, 0x00, // 0034: rsvmap terminator (size = 0 low)
0x00, 0x00, 0x00, 0x01, // 0038: FDT_BEGIN_NODE
0x00, 0x00, 0x00, 0x00, // 003C: node name ("") + padding
0x00, 0x00, 0x00, 0x02, // 0040: FDT_END_NODE
0x00, 0x00, 0x00, 0x09, // 0044: FDT_END
];
assert_eq!(expected_fdt, actual_fdt);
}
#[test]
fn invalid_node_name_nul() {
let mut fdt = FdtWriter::new().unwrap();
assert_eq!(
fdt.begin_node("abc\0def").unwrap_err(),
Error::InvalidString
);
}
#[test]
fn invalid_prop_name_nul() {
let mut fdt = FdtWriter::new().unwrap();
assert_eq!(
fdt.property_u32("abc\0def", 0).unwrap_err(),
Error::InvalidString
);
}
#[test]
fn invalid_prop_string_value_nul() {
let mut fdt = FdtWriter::new().unwrap();
assert_eq!(
fdt.property_string("mystr", "abc\0def").unwrap_err(),
Error::InvalidString
);
}
#[test]
fn invalid_prop_string_list_value_nul() {
let mut fdt = FdtWriter::new().unwrap();
let strs = vec!["test".into(), "abc\0def".into()];
assert_eq!(
fdt.property_string_list("mystr", strs).unwrap_err(),
Error::InvalidString
);
}
#[test]
fn invalid_prop_after_end_node() {
let mut fdt = FdtWriter::new().unwrap();
let _root_node = fdt.begin_node("").unwrap();
fdt.property_u32("ok_prop", 1234).unwrap();
let nested_node = fdt.begin_node("mynode").unwrap();
fdt.property_u32("ok_nested_prop", 5678).unwrap();
fdt.end_node(nested_node).unwrap();
assert_eq!(
fdt.property_u32("bad_prop_after_end_node", 1357)
.unwrap_err(),
Error::PropertyAfterEndNode
);
}
#[test]
fn invalid_end_node_out_of_order() {
let mut fdt = FdtWriter::new().unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.property_u32("ok_prop", 1234).unwrap();
let _nested_node = fdt.begin_node("mynode").unwrap();
assert_eq!(
fdt.end_node(root_node).unwrap_err(),
Error::OutOfOrderEndNode
);
}
#[test]
fn invalid_finish_while_node_open() {
let mut fdt = FdtWriter::new().unwrap();
let _root_node = fdt.begin_node("").unwrap();
fdt.property_u32("ok_prop", 1234).unwrap();
let _nested_node = fdt.begin_node("mynode").unwrap();
fdt.property_u32("ok_nested_prop", 5678).unwrap();
assert_eq!(fdt.finish().unwrap_err(), Error::UnclosedNode);
}
#[test]
#[cfg(feature = "long_running_test")]
fn test_overflow_subtract() {
let overflow_size = std::u32::MAX / size_of::<FdtReserveEntry>() as u32 - 3;
let too_large_mem_reserve: Vec<FdtReserveEntry> = (0..overflow_size)
.map(|i| FdtReserveEntry::new(u64::from(i) * 2, 1).unwrap())
.collect();
let mut fdt = FdtWriter::new_with_mem_reserv(&too_large_mem_reserve).unwrap();
let root_node = fdt.begin_node("").unwrap();
fdt.end_node(root_node).unwrap();
assert_eq!(fdt.finish().unwrap_err(), Error::TotalSizeTooLarge);
}
#[test]
fn test_invalid_mem_reservations() {
// Test that we cannot create an invalid FDT reserve entry where the
// end address of the region would not fit in an u64.
assert_eq!(
FdtReserveEntry::new(0x1, u64::MAX).unwrap_err(),
Error::InvalidMemoryReservation
);
// Test that we cannot have a memory reservation with size 0.
assert_eq!(
FdtReserveEntry::new(0x1, 0).unwrap_err(),
Error::InvalidMemoryReservation
);
}
#[test]
fn test_cmp_mem_reservations() {
// Test that just the address is taken into consideration when comparing to `FdtReserveEntry`.
assert_eq!(
FdtReserveEntry::new(0x1, 10)
.unwrap()
.cmp(&FdtReserveEntry::new(0x1, 11).unwrap()),
Ordering::Equal
);
assert_eq!(
FdtReserveEntry::new(0x1, 10)
.unwrap()
.cmp(&FdtReserveEntry::new(0x2, 10).unwrap()),
Ordering::Less
);
assert_eq!(
FdtReserveEntry::new(0x1, 10)
.unwrap()
.cmp(&FdtReserveEntry::new(0x0, 10).unwrap()),
Ordering::Greater
);
}
#[test]
fn test_overlapping_mem_reservations() {
// Check that regions that overlap return an error on new.
// Check overlapping by one.
let overlapping = [
FdtReserveEntry::new(0x3, 1).unwrap(), // this overlaps with
FdtReserveEntry::new(0x0, 1).unwrap(),
FdtReserveEntry::new(0x2, 2).unwrap(), // this one.
];
let fdt = FdtWriter::new_with_mem_reserv(&overlapping);
assert_eq!(fdt.unwrap_err(), Error::OverlappingMemoryReservations);
// Check a larger overlap.
let overlapping = [
FdtReserveEntry::new(0x100, 100).unwrap(),
FdtReserveEntry::new(0x50, 300).unwrap(),
];
let fdt = FdtWriter::new_with_mem_reserv(&overlapping);
assert_eq!(fdt.unwrap_err(), Error::OverlappingMemoryReservations);
}
#[test]
fn test_off_by_one_mem_rsv() {
// This test is for making sure we do not introduce off by one errors
// in the memory reservations checks.
let non_overlapping = [
FdtReserveEntry::new(0x0, 1).unwrap(),
FdtReserveEntry::new(0x1, 1).unwrap(),
FdtReserveEntry::new(0x2, 2).unwrap(),
];
assert!(FdtWriter::new_with_mem_reserv(&non_overlapping).is_ok());
}
}
| 41.577465 | 102 | 0.568741 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.