hexsha
stringlengths 40
40
| size
int64 4
1.05M
| content
stringlengths 4
1.05M
| avg_line_length
float64 1.33
100
| max_line_length
int64 1
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
fc97ead3b7b9bab046855a7f668c2f8414af6fb9
| 3,574 |
/// A data structure for performing UnionFind queries and updates.
pub struct DisjointSet {
/// Parent of a given element. If 'parent[u] == u' then element 'u' has no
/// parent.
parent: Vec<usize>,
/// Size of a set. Only valid for the root of a set.
size: Vec<usize>,
/// Upper bound on the height of a set. Only valid for the root of a set.
rank: Vec<usize>,
}
impl DisjointSet {
/// Construct a new disjoint set with 'n' singleton sets.
pub fn new(n: usize) -> Self {
let mut set = DisjointSet {
parent: vec![0; n],
size: vec![1; n],
rank: vec![0; n],
};
for u in 0..n {
set.parent[u] = u
}
set
}
/// Find the root of element 'u'. If 'u' is the root return itself. Perform
/// path optimization on the way.
fn root(&mut self, u: usize) -> usize {
if self.parent[u] == u {
u
} else {
let r = self.root(self.parent[u]);
self.parent[u] = r;
r
}
}
/// Join the sets containing 'u' and 'v'. If they are already in the same
/// set do nothing and return false, otherwise join the sets and return
/// true.
pub fn join(&mut self, u: usize, v: usize) -> bool {
let r1 = self.root(u);
let r2 = self.root(v);
if r1 != r2 {
if self.rank[r1] <= self.rank[r2] {
if self.rank[r1] == self.rank[r2] {
self.rank[r2] += 1
}
self.parent[r1] = r2;
self.size[r2] += self.size[r1];
} else {
self.parent[r2] = r1;
self.size[r1] += self.size[r2]
}
true
} else {
false
}
}
/// Returns true if 'u' and 'v' are in the same set, otherwise false.
pub fn same_set(&mut self, u: usize, v: usize) -> bool {
self.root(u) == self.root(v)
}
/// Returns the size of the set which contains element 'u'.
pub fn size(&mut self, u: usize) -> usize {
let r = self.root(u);
self.size[r]
}
}
#[cfg(test)]
mod tests {
use super::DisjointSet;
#[test]
fn it_constructs_empty() {
let set = DisjointSet::new(0);
assert!(set.parent.is_empty())
}
#[test]
fn it_constructs_small() {
let n = 10;
let set = DisjointSet::new(n);
for u in 0..n {
assert_eq!(set.parent[u], u)
}
}
#[test]
fn it_finds_root() {
let n = 10;
let mut set = DisjointSet::new(n);
for u in 0..n {
assert_eq!(set.root(u), u)
}
}
#[test]
fn it_joins_sets() {
let mut set = DisjointSet::new(10);
assert!(!set.same_set(0, 1));
assert!(set.join(0, 1));
assert!(set.same_set(0, 1));
assert!(!set.same_set(1, 2));
assert!(set.join(1, 2));
assert!(set.same_set(1, 2));
assert!(set.same_set(0, 2));
assert!(!set.join(0, 2));
}
#[test]
fn it_maintains_set_size() {
let n = 10;
let mut set = DisjointSet::new(n);
for u in 0..n {
assert_eq!(set.size(u), 1)
}
set.join(0, 1);
assert_eq!(set.size(0), 2);
assert_eq!(set.size(1), 2);
for u in 2..n {
assert_eq!(set.size(u), 1)
}
for u in 2..n {
set.join(u - 1, u);
}
for u in 0..n {
assert_eq!(set.size(u), n)
}
}
}
| 27.075758 | 79 | 0.476217 |
01b82e6a38cdea8208d1e8a26f58c31754ae904b
| 146 |
//! Root module for JavaScript API bindings.
pub mod definition;
pub mod instance;
pub mod binding;
pub use definition::*;
pub use instance::*;
| 16.222222 | 44 | 0.732877 |
1e5a27cdee591172b4faed943534d4853ac3434c
| 13,769 |
use std::ops::{Index,IndexMut};
use std::cell::{RefCell,RefMut};
use crate::nes::cpu;
use crate::nes::ppu;
use crate::nes::controller;
use crate::nes::address::{Address,Addressable};
use crate::nes::rom::{Rom,Bank};
use crate::nes::cpu::disassemble::OPCODES;
const SYSTEM_RAM: usize = 2 * 1024;
pub struct Interconnect {
pub ppu: ppu::Ppu,
ram: Vec<u8>, // Make this an array at some time. I think it needs boxed
rom: Rom,
controller1: RefCell<controller::Controller>,
controller2: RefCell<controller::Controller>,
pub dma_in_progress: bool,
pub dma_write_iteration: u8,
pub dma_high_byte: u8,
interrupt: Option<cpu::Interrupt>,
}
impl Interconnect {
pub fn new(ppu: ppu::Ppu, rom: Rom) -> Self {
Interconnect {
ppu,
rom,
ram: vec![0; SYSTEM_RAM],
controller1: RefCell::new(controller::Controller::new()),
controller2: RefCell::new(controller::Controller::new()),
dma_in_progress: false,
dma_write_iteration: 0,
dma_high_byte: 0,
interrupt: None,
}
}
/// set_interrupt set the interrupt on the interconnect
pub fn set_interrupt(&mut self, int: Option<cpu::Interrupt>) {
self.interrupt = int;
}
/// update_interrupt sets the interrupt only if the provided option is Some(_). This is useful
/// for ensuring a previously set Interrupt is not cleared prematurely.
pub fn update_interrupt(&mut self, int: Option<cpu::Interrupt>) {
if int.is_some() {
self.interrupt = int;
}
}
/// get_interrupt returns the interrupt on the interconnect
pub fn get_interrupt(&self) -> Option<cpu::Interrupt> {
self.interrupt
}
/// fetch_interrupt returns, and then resets, the interrupt on the interconnect
pub fn fetch_interrupt(&mut self) -> Option<cpu::Interrupt> {
let ret = self.interrupt;
self.interrupt = None;
ret
}
pub fn find_reset_vector_address(&self) -> Address {
Address::new(self.read_word(0xFFFD), self.read_word(0xFFFC))
}
pub fn find_nmi_vector_address(&self) -> Address {
Address::new(self.read_word(0xFFFB), self.read_word(0xFFFA))
}
pub fn find_irq_vector_address(&self) -> Address {
Address::new(self.read_word(0xFFFF), self.read_word(0xFFFE))
}
// TODO Make this work
// pub fn read_word<T: Addressable>(&self, addr: T) -> u8 {
pub fn read_word(&self, addr: u16) -> u8 {
match addr {
0x0000..=0x07ff => {
self.ram[addr as usize] // Includes zero page, stack, and ram
}
0x0800..=0x0fff => {
self.ram[(addr-0x0800) as usize] // Mirror 1
}
0x1000..=0x17ff => {
self.ram[(addr-0x1000) as usize] // Mirror 2
}
0x1800..=0x1fff => {
self.ram[(addr-0x1800) as usize] // Mirror 3
}
// PPU
0x2002|0x2007 => {
self.ppu.read_at(addr)
}
// Controllers
0x4016 => {
{
let mut controller = self.controller1.borrow_mut();
controller.read() as u8
}
}
0x4017 => {
{
let mut controller = self.controller2.borrow_mut();
controller.read() as u8
}
}
// ROM
0x8000..=0xFFFF => {
self.rom[addr]
}
_ => {
panic!("unknown address {:#x}", addr);
}
}
}
pub fn read_range(&self, mut start: u16, count: i16) -> (Vec<u8>, u16, usize) {
let end = if count >= 0 {
start + (count as u16)
} else {
let e = start+1;
start = (start + 1) - (-count as u16);
e
};
let mut result: Vec<u8> = Vec::new();
let mut real_count = 0;
for i in start..end {
// XXX This is super inefficient but since this is only used by the debugger I'm not
// super concerned. Its unlikely that we'll read large amounts of memory where the
// function call cost will be noticeable.
result.push(self.read_word(i));
real_count += 1;
}
(result, start, real_count)
}
pub fn read_range_by_instruction(&self, start: u16, count: i16) -> (Vec<u8>, u16, usize) {
let mut addr = start;
let abs_count = if count > 0 {
count as usize
} else {
addr = (start + 1) - (-count as u16);
-count as usize
};
let mut result: Vec<u8> = Vec::new();
let mut i = 0;
while i < abs_count {
let b0 = self.read_word(addr);
addr += 1;
match OPCODES[b0 as usize] {
Some(op) => {
result.push(b0);
match op.1.len() {
2 => {
result.push(self.read_word(addr));
addr += 1;
}
3 => {
result.push(self.read_word(addr));
addr += 1;
result.push(self.read_word(addr));
addr += 1;
}
_ => {}
}
}
None => break,
}
i += 1;
}
(result, start, i)
}
pub fn write_word(&mut self, addr: u16, value: u8) {
match addr {
// RAM
0x00..=0x07ff => {
self.ram[addr as usize] = value;
}
0x0800..=0x0fff => {
self.ram[(addr-0x0800) as usize] = value; // Mirror 1
}
0x1000..=0x17ff => {
self.ram[(addr-0x1000) as usize] = value; // Mirror 2
}
0x1800..=0x1fff => {
self.ram[(addr-0x1800) as usize] = value; // Mirror 3
}
// PPU Control
0x2000 => {
self.ppu.write_register(ppu::PpuRegister::Control, value);
}
// PPU Mask
0x2001 => {
self.ppu.write_register(ppu::PpuRegister::Mask, value);
}
0x2002 => {
panic!("ppu not implemented yet. write access at {:#x}", addr);
}
// PPU OAMADDR
0x2003 => {
self.ppu.write_register(ppu::PpuRegister::Oamaddr, value);
}
0x2004 => {
panic!("ppu not implemented yet. write access at {:#x}", addr);
}
// PPU Scroll
0x2005 => {
self.ppu.write_register(ppu::PpuRegister::Scroll, value);
}
// PPU Addr
0x2006 => {
self.ppu.write_register(ppu::PpuRegister::Addr, value);
}
// PPU Data
0x2007 => {
self.ppu.write_register(ppu::PpuRegister::Data, value);
}
// PPU
0x4014 => {
self.dma_high_byte = value;
}
// APU
0x4015 => {
// println!("Write APU status not implemented. Skipping");
}
// Controllers
0x4016 => {
{
let mut controller = self.controller1.borrow_mut();
controller.write(value);
}
{
let mut controller = self.controller2.borrow_mut();
controller.write(value);
}
}
// APU
0x4000..=0x4008 => {
// println!("Write APU thing not implemented. Skipping");
}
0x400A..=0x4017 => {
// println!("Write APU thing not implemented. Skipping");
}
_ => {
panic!("unimplemented write address {:#x}", addr);
}
}
}
}
#[cfg(test)]
mod test {
use super::Interconnect;
use crate::nes::ppu::Ppu;
use crate::nes::rom::{Bank,Rom};
#[test]
fn test_write_word() {
let rom = Rom::new_double_bank(Bank::new(&[0; 16384]), Bank::new(&[0; 16384]));
let ppu = Ppu::new();
let mut interconnect = Interconnect::new(ppu, rom);
let mut result: u8;
result = interconnect.read_word(0x0010);
assert!(result == 0x00, "expected 0x00, got {:#x}", result);
interconnect.write_word(0x0010, 0xff);
result = interconnect.read_word(0x0010);
assert!(result == 0xff, "expected 0xff, got {:#x}", result);
}
#[test]
fn test_read_system_ram() {
let rom = Rom::new_double_bank(Bank::new(&[0; 16384]), Bank::new(&[0; 16384]));
let ppu = Ppu::new();
let mut interconnect = Interconnect::new(ppu, rom);
interconnect.ram[0] = 0xFF;
interconnect.ram[0x10] = 0xFF;
interconnect.ram[0xa0] = 0xFF;
interconnect.ram[0x7ff] = 0xFF;
assert_eq!(0xFF, interconnect.read_word(0x00));
assert_eq!(0xFF, interconnect.read_word(0x10));
assert_eq!(0xFF, interconnect.read_word(0x7ff));
assert_eq!(0, interconnect.read_word(0x01));
}
#[test]
fn test_read_range() {
let rom = Rom::new_double_bank(Bank::new(&[0; 16384]), Bank::new(&[0; 16384]));
let ppu = Ppu::new();
let mut interconnect = Interconnect::new(ppu, rom);
interconnect.ram[0x0080] = 0xFF;
interconnect.ram[0x0081] = 0xFF;
interconnect.ram[0x0082] = 0xFF;
interconnect.ram[0x0083] = 0xFF;
interconnect.ram[0x0084] = 0xFF;
interconnect.ram[0x0085] = 0xFF;
interconnect.ram[0x0086] = 0xFF;
interconnect.ram[0x0087] = 0xFF;
interconnect.ram[0x0088] = 0xFF;
interconnect.ram[0x0089] = 0xFF;
interconnect.ram[0x008a] = 0xAA;
interconnect.ram[0x008b] = 0xAA;
interconnect.ram[0x008c] = 0xAA;
interconnect.ram[0x008d] = 0xAA;
interconnect.ram[0x008e] = 0xAA;
interconnect.ram[0x008f] = 0xAA;
let (result, start, count) = interconnect.read_range(0x0080, 10);
assert!(result.len() == 10, "expected length of 10m got {}", result.len());
assert!(result.iter().all(|x| *x == 0xFF), "not all elements equal 0xFF: {:?}", &result);
assert!(start == 0x0080, "starting address is wrong; expect 0x0080, got {}", start);
assert!(count == 10, "count is wrong; expect 10, got {}", count);
let (result2, start2, count2) = interconnect.read_range(0x008f, -6);
assert!(result2.len() == 6, "expected length of 6, got {}", result2.len());
assert!(result2.iter().all(|x| *x == 0xAA), "not all elements equal 0xAA: {:?}", &result2);
assert!(start2 == 0x008a, "starting address is wrong; expect 0x008a, got {:#04x}", start2);
assert!(count2 == 6, "count is wrong; expect 6, got {}", count2);
}
#[test]
fn test_find_reset_vector_address() {
let mut mock_rom = vec![0; 1024*16];
mock_rom[0x3ffc] = 0xef;
mock_rom[0x3ffd] = 0xbe;
let rom = Rom::new_single_bank(Bank::new(&mock_rom));
let ppu = Ppu::new();
let interconnect = Interconnect::new(ppu, rom);
let result = interconnect.find_reset_vector_address();
assert!(result.to_u16() == 0xbeef, "expected 0xbeef, got: {:#x}", result.to_u16());
}
#[test]
fn test_read_rom_single_bank() {
let mut mock_rom = vec![0; 16*1024];
mock_rom[0] = 0xFF;
mock_rom[0x10] = 0xFF;
mock_rom[0xa0] = 0xFF;
mock_rom[0x3FFF] = 0xFF;
let rom = Rom::new_single_bank(Bank::new(&mock_rom));
let ppu = Ppu::new();
let mut interconnect = Interconnect::new(ppu, rom);
// Lower bank
assert_eq!(0xFF, interconnect.read_word(0x8000));
assert_eq!(0xFF, interconnect.read_word(0x8010));
assert_eq!(0xFF, interconnect.read_word(0xbfff));
assert_eq!(0, interconnect.read_word(0x8001));
// Upper bank
assert_eq!(0xFF, interconnect.read_word(0xc000));
assert_eq!(0xFF, interconnect.read_word(0xc010));
assert_eq!(0xFF, interconnect.read_word(0xffff));
assert_eq!(0, interconnect.read_word(0xc001));
}
#[test]
fn test_read_rom_double_bank() {
let mut mock_rom = vec![0; 32*1024];
mock_rom[0] = 0xFF; // beginning of bank
mock_rom[0x10] = 0xFF;
mock_rom[0xa0] = 0xFF;
mock_rom[0x3FFF] = 0xFF; // end of bank
mock_rom[0x4000] = 0xAA; // beginning of bank
mock_rom[0x4010] = 0xAA;
mock_rom[0x40a0] = 0xAA;
mock_rom[0x7FFF] = 0xAA; // end of bank
let rom = Rom::new_double_bank(Bank::new(&mock_rom[0..16 * 1024]), Bank::new(&mock_rom[16 * 1024..]));
let ppu = Ppu::new();
let mut interconnect = Interconnect::new(ppu, rom);
// Lower bank
assert_eq!(0xFF, interconnect.read_word(0x8000));
assert_eq!(0xFF, interconnect.read_word(0x8010));
assert_eq!(0xFF, interconnect.read_word(0xbfff));
assert_eq!(0, interconnect.read_word(0x8001));
// Upper bank
assert_eq!(0xAA, interconnect.read_word(0xc000));
assert_eq!(0xAA, interconnect.read_word(0xc010));
assert_eq!(0xAA, interconnect.read_word(0xffff));
assert_eq!(0, interconnect.read_word(0xc001));
}
}
| 33.913793 | 110 | 0.519065 |
e9b9585d5986fa5ce3353d0059b1c7ea12e81ac8
| 558 |
use crate::wrapper::{ErlNifPid, NIF_ENV, NIF_TERM};
use std::mem;
pub unsafe fn get_local_pid(env: NIF_ENV, term: NIF_TERM) -> Option<ErlNifPid> {
let mut pid: ErlNifPid = mem::uninitialized();
if rustler_sys::enif_get_local_pid(env, term, &mut pid) == 0 {
return None;
}
Some(pid)
}
// pub unsafe fn is_process_alive(env: NIF_ENV, pid: &ErlNifPid) -> bool {
// rustler_sys::enif_is_process_alive(env, pid) != 0
// }
pub unsafe fn make_pid(env: NIF_ENV, pid: ErlNifPid) -> NIF_TERM {
rustler_sys::enif_make_pid(env, pid)
}
| 29.368421 | 80 | 0.673835 |
75e3328a85a5b08461faed676b68c1dfcc80241d
| 23,728 |
//! Collects OpenTelemetry spans and reports them to a given Jaeger
//! `agent` or `collector` endpoint. See the [Jaeger Docs] for details
//! about Jaeger and deployment information.
//!
//! *Compiler support: [requires `rustc` 1.46+][msrv]*
//!
//! [Jaeger Docs]: https://www.jaegertracing.io/docs/
//! [msrv]: #supported-rust-versions
//!
//! ### Quickstart
//!
//! First make sure you have a running version of the Jaeger instance
//! you want to send data to:
//!
//! ```shell
//! $ docker run -d -p6831:6831/udp -p6832:6832/udp -p16686:16686 -p14268:14268 jaegertracing/all-in-one:latest
//! ```
//!
//! Then install a new jaeger pipeline with the recommended defaults to start
//! exporting telemetry:
//!
//! ```no_run
//! use opentelemetry::trace::Tracer;
//! use opentelemetry::global;
//!
//! fn main() -> Result<(), opentelemetry::trace::TraceError> {
//! global::set_text_map_propagator(opentelemetry_jaeger::Propagator::new());
//! let tracer = opentelemetry_jaeger::new_pipeline().install_simple()?;
//!
//! tracer.in_span("doing_work", |cx| {
//! // Traced app logic here...
//! });
//!
//! global::shutdown_tracer_provider(); // export remaining spans
//!
//! Ok(())
//! }
//! ```
//!
//! ## Performance
//!
//! For optimal performance, a batch exporter is recommended as the simple exporter
//! will export each span synchronously on drop. You can enable the [`rt-tokio`],
//! [`rt-tokio-current-thread`] or [`rt-async-std`] features and specify a runtime
//! on the pipeline builder to have a batch exporter configured for you
//! automatically.
//!
//! ```toml
//! [dependencies]
//! opentelemetry = { version = "*", features = ["rt-tokio"] }
//! opentelemetry-jaeger = { version = "*", features = ["rt-tokio"] }
//! ```
//!
//! ```no_run
//! # fn main() -> Result<(), opentelemetry::trace::TraceError> {
//! let tracer = opentelemetry_jaeger::new_pipeline()
//! .install_batch(opentelemetry::runtime::Tokio)?;
//! # Ok(())
//! # }
//! ```
//!
//! [`tokio`]: https://tokio.rs
//! [`async-std`]: https://async.rs
//!
//! ### Jaeger Exporter From Environment Variables
//!
//! The jaeger pipeline builder can be configured dynamically via environment
//! variables. All variables are optional, a full list of accepted options can
//! be found in the [jaeger variables spec].
//!
//! [jaeger variables spec]: https://github.com/open-telemetry/opentelemetry-specification/blob/master/specification/sdk-environment-variables.md#jaeger-exporter
//!
//! ### Jaeger Collector Example
//!
//! If you want to skip the agent and submit spans directly to a Jaeger collector,
//! you can enable the optional `collector_client` feature for this crate. This
//! example expects a Jaeger collector running on `http://localhost:14268`.
//!
//! ```toml
//! [dependencies]
//! opentelemetry-jaeger = { version = "..", features = ["collector_client", "isahc"] }
//! ```
//!
//! Then you can use the [`with_collector_endpoint`] method to specify the endpoint:
//!
//! [`with_collector_endpoint`]: PipelineBuilder::with_collector_endpoint()
//!
//! ```ignore
//! // Note that this requires the `collector_client` feature.
//! // We enabled the `isahc` feature for a default isahc http client.
//! // You can also provide your own implementation via new_pipeline().with_http_client() method.
//! use opentelemetry::trace::{Tracer, TraceError};
//!
//! fn main() -> Result<(), TraceError> {
//! let tracer = opentelemetry_jaeger::new_pipeline()
//! .with_collector_endpoint("http://localhost:14268/api/traces")
//! // optionally set username and password as well.
//! .with_collector_username("username")
//! .with_collector_password("s3cr3t")
//! .install_simple()?;
//!
//! tracer.in_span("doing_work", |cx| {
//! // Traced app logic here...
//! });
//!
//! Ok(())
//! }
//! ```
//! ## Resource, tags and service name
//! In order to export the spans in different format. opentelemetry uses its own
//! model internally. Most of the jaeger spans' concept can be found in this model.
//! The full list of this mapping can be found in [OpenTelemetry to Jaeger Transformation].
//!
//! The **process tags** in jaeger spans will be mapped as resource in opentelemetry. You can
//! set it through `OTEL_RESOURCE_ATTRIBUTES` environment variable or using [`PipelineBuilder::with_trace_config`].
//!
//! Note that to avoid copying data multiple times. Jaeger exporter will uses resource stored in [`Exporter`].
//!
//! The **tags** in jaeger spans will be mapped as attributes in opentelemetry spans. You can
//! set it through [`set_attribute`] method.
//!
//! Each jaeger span requires a **service name**. This will be mapped as a resource with `service.name` key.
//! You can set it using one of the following methods from highest priority to lowest priority.
//! 1. [`PipelineBuilder::with_service_name`].
//! 2. include a `service.name` key value pairs when configure resource using [`PipelineBuilder::with_trace_config`].
//! 3. set the service name as `OTEL_SERVCE_NAME` environment variable.
//! 4. set the `service.name` attributes in `OTEL_RESOURCE_ATTRIBUTES`.
//! 5. if the service name is not provided by the above method. `unknown_service` will be used.
//!
//! Based on the service name, we update/append the `service.name` process tags in jaeger spans.
//!
//! [`set_attribute`]: https://docs.rs/opentelemetry/0.16.0/opentelemetry/trace/trait.Span.html#tymethod.set_attribute
//!
//! [OpenTelemetry to Jaeger Transformation]:https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/sdk_exporters/jaeger.md
//!
//! ## Kitchen Sink Full Configuration
//!
//! Example showing how to override all configuration options. See the
//! [`PipelineBuilder`] docs for details of each option.
//!
//!
//! ```no_run
//! use opentelemetry::{KeyValue, trace::{Tracer, TraceError}};
//! use opentelemetry::sdk::{trace::{self, IdGenerator, Sampler}, Resource};
//! use opentelemetry::global;
//!
//! fn main() -> Result<(), TraceError> {
//! global::set_text_map_propagator(opentelemetry_jaeger::Propagator::new());
//! let tracer = opentelemetry_jaeger::new_pipeline()
//! .with_agent_endpoint("localhost:6831")
//! .with_service_name("my_app")
//! .with_max_packet_size(9_216)
//! .with_trace_config(
//! trace::config()
//! .with_sampler(Sampler::AlwaysOn)
//! .with_id_generator(IdGenerator::default())
//! .with_max_events_per_span(64)
//! .with_max_attributes_per_span(16)
//! .with_max_events_per_span(16)
//! .with_resource(Resource::new(vec![KeyValue::new("key", "value"),
//! KeyValue::new("process_key", "process_value")])),
//! )
//! .install_batch(opentelemetry::runtime::Tokio)?;
//!
//! tracer.in_span("doing_work", |cx| {
//! // Traced app logic here...
//! });
//!
//! global::shutdown_tracer_provider(); // export remaining spans
//!
//! Ok(())
//! }
//! ```
//!
//! ## Crate Feature Flags
//!
//! The following crate feature flags are available:
//!
//! * `collector_client`: Export span data directly to a Jaeger collector. User MUST provide the http client.
//!
//! * `surf_collector_client`: Export span data with Jaeger collector backed by a surf default http client.
//!
//! * `reqwest_collector_client`: Export span data with Jaeger collector backed by a reqwest http client.
//!
//! * `reqwest_blocking_collector_client`: Export span data with Jaeger collector backed by a reqwest blocking http client.
//!
//! * `isahc_collector_client`: Export span data with Jaeger collector backed by a isahc http client.
//!
//! * `wasm_collector_client`: Enable collector in wasm.
//!
//! Support for recording and exporting telemetry asynchronously can be added
//! via the following flags, it extends the [`opentelemetry`] feature:
//!
//! * `rt-tokio`: Enable sending UDP packets to Jaeger agent asynchronously when [`Tokio`] runtime is used.
//!
//! * `rt-tokio-current-thread`: Enable sending UDP packets to Jaeger agent asynchronously when [`TokioCurrentThread`] runtime is used.
//!
//! * `rt-async-std`: Enable sending UDP packets to Jaeger agent asynchronously when [`AsyncStd`] runtime is used.
//!
//! [`opentelemetry`]: https://crates.io/crates/opentelemetry
//!
//! ## Supported Rust Versions
//!
//! OpenTelemetry is built against the latest stable release. The minimum
//! supported version is 1.46. The current OpenTelemetry version is not
//! guaranteed to build on Rust versions earlier than the minimum supported
//! version.
//!
//! The current stable Rust compiler and the three most recent minor versions
//! before it will always be supported. For example, if the current stable
//! compiler version is 1.49, the minimum supported version will not be
//! increased past 1.46, three minor versions prior. Increasing the minimum
//! supported compiler version is not considered a semver breaking change as
//! long as doing so complies with this policy.
#![warn(
future_incompatible,
missing_debug_implementations,
missing_docs,
nonstandard_style,
rust_2018_idioms,
unreachable_pub,
unused
)]
#![cfg_attr(docsrs, feature(doc_cfg), deny(rustdoc::broken_intra_doc_links))]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/open-telemetry/opentelemetry-rust/main/assets/logo.svg"
)]
#![cfg_attr(test, deny(warnings))]
mod exporter;
mod propagator {
//! # Jaeger Propagator
//!
//! Extract and inject values from Jaeger's `uber-trace-id` header.
//!
//! See [`Jaeger documentation`] for detail of Jaeger propagation format.
//!
//! [`Jaeger documentation`]: https://www.jaegertracing.io/docs/1.18/client-libraries/#propagation-format
use opentelemetry::{
global::{self, Error},
propagation::{text_map_propagator::FieldIter, Extractor, Injector, TextMapPropagator},
trace::{
SpanContext, SpanId, TraceContextExt, TraceError, TraceFlags, TraceId, TraceState,
},
Context,
};
use std::borrow::Cow;
use std::str::FromStr;
const JAEGER_HEADER: &str = "uber-trace-id";
const JAEGER_BAGGAGE_PREFIX: &str = "uberctx-";
const DEPRECATED_PARENT_SPAN: &str = "0";
const TRACE_FLAG_DEBUG: TraceFlags = TraceFlags::new(0x04);
lazy_static::lazy_static! {
static ref JAEGER_HEADER_FIELD: [String; 1] = [JAEGER_HEADER.to_string()];
}
/// The Jaeger propagator propagates span contexts in jaeger's propagation format.
///
/// See [`Jaeger documentation`] for format details.
///
/// Note that jaeger header can be set in http header or encoded as url
///
/// [`Jaeger documentation`]: https://www.jaegertracing.io/docs/1.18/client-libraries/#propagation-format
#[derive(Clone, Debug, Default)]
pub struct Propagator {
_private: (),
}
impl Propagator {
/// Create a Jaeger propagator
pub fn new() -> Self {
Propagator::default()
}
/// Extract span context from header value
fn extract_span_context(&self, extractor: &dyn Extractor) -> Result<SpanContext, ()> {
let mut header_value = Cow::from(extractor.get(JAEGER_HEADER).unwrap_or(""));
// if there is no :, it means header_value could be encoded as url, try decode first
if !header_value.contains(':') {
header_value = Cow::from(header_value.replace("%3A", ":"));
}
let parts = header_value.split_terminator(':').collect::<Vec<&str>>();
if parts.len() != 4 {
return Err(());
}
// extract trace id
let trace_id = self.extract_trace_id(parts[0])?;
let span_id = self.extract_span_id(parts[1])?;
// Ignore parent span id since it's deprecated.
let flags = self.extract_trace_flags(parts[3])?;
let state = self.extract_trace_state(extractor)?;
Ok(SpanContext::new(trace_id, span_id, flags, true, state))
}
/// Extract trace id from the header.
fn extract_trace_id(&self, trace_id: &str) -> Result<TraceId, ()> {
if trace_id.len() > 32 {
return Err(());
}
// allow variable length, padding 0 when length is less than 32
let padded_trace_id = format!("{:0>32}", trace_id);
u128::from_str_radix(padded_trace_id.as_str(), 16)
.map(TraceId::from_u128)
.map_err(|_| ())
}
/// Extract span id from the header.
fn extract_span_id(&self, span_id: &str) -> Result<SpanId, ()> {
if span_id.len() != 16 {
return Err(());
}
u64::from_str_radix(span_id, 16)
.map(SpanId::from_u64)
.map_err(|_| ())
}
/// Extract flag from the header
///
/// First bit control whether to sample
/// Second bit control whether it's a debug trace
/// Third bit is not used.
/// Forth bit is firehose flag, which is not supported in OT now.
fn extract_trace_flags(&self, flag: &str) -> Result<TraceFlags, ()> {
if flag.len() > 2 {
return Err(());
}
let flag = u8::from_str(flag).map_err(|_| ())?;
if flag & 0x01 == 0x01 {
if flag & 0x02 == 0x02 {
Ok(TraceFlags::SAMPLED | TRACE_FLAG_DEBUG)
} else {
Ok(TraceFlags::SAMPLED)
}
} else {
// Debug flag should only be set when sampled flag is set.
// So if debug flag is set alone. We will just use not sampled flag
Ok(TraceFlags::default())
}
}
fn extract_trace_state(&self, extractor: &dyn Extractor) -> Result<TraceState, ()> {
let uber_context_keys = extractor
.keys()
.into_iter()
.filter(|key| key.starts_with(JAEGER_BAGGAGE_PREFIX))
.filter_map(|key| {
extractor
.get(key)
.map(|value| (key.to_string(), value.to_string()))
});
match TraceState::from_key_value(uber_context_keys) {
Ok(trace_state) => Ok(trace_state),
Err(trace_state_err) => {
global::handle_error(Error::Trace(TraceError::Other(Box::new(
trace_state_err,
))));
Err(()) //todo: assign an error type instead of using ()
}
}
}
}
impl TextMapPropagator for Propagator {
fn inject_context(&self, cx: &Context, injector: &mut dyn Injector) {
let span = cx.span();
let span_context = span.span_context();
if span_context.is_valid() {
let flag: u8 = if span_context.is_sampled() {
if span_context.trace_flags() & TRACE_FLAG_DEBUG == TRACE_FLAG_DEBUG {
0x03
} else {
0x01
}
} else {
0x00
};
let header_value = format!(
"{:032x}:{:016x}:{:01}:{:01}",
span_context.trace_id().to_u128(),
span_context.span_id().to_u64(),
DEPRECATED_PARENT_SPAN,
flag,
);
injector.set(JAEGER_HEADER, header_value);
}
}
fn extract_with_context(&self, cx: &Context, extractor: &dyn Extractor) -> Context {
cx.with_remote_span_context(
self.extract_span_context(extractor)
.unwrap_or_else(|_| SpanContext::empty_context()),
)
}
fn fields(&self) -> FieldIter<'_> {
FieldIter::new(JAEGER_HEADER_FIELD.as_ref())
}
}
#[cfg(test)]
mod tests {
use super::*;
use opentelemetry::{
propagation::{Injector, TextMapPropagator},
testing::trace::TestSpan,
trace::{SpanContext, SpanId, TraceContextExt, TraceFlags, TraceId, TraceState},
Context,
};
use std::collections::HashMap;
const LONG_TRACE_ID_STR: &str = "000000000000004d0000000000000016";
const SHORT_TRACE_ID_STR: &str = "4d0000000000000016";
const TRACE_ID: u128 = 0x0000_0000_0000_004d_0000_0000_0000_0016;
const SPAN_ID_STR: &str = "0000000000017c29";
const SPAN_ID: u64 = 0x0000_0000_0001_7c29;
fn get_extract_data() -> Vec<(&'static str, &'static str, u8, SpanContext)> {
vec![
(
LONG_TRACE_ID_STR,
SPAN_ID_STR,
1,
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TraceFlags::SAMPLED,
true,
TraceState::default(),
),
),
(
SHORT_TRACE_ID_STR,
SPAN_ID_STR,
1,
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TraceFlags::SAMPLED,
true,
TraceState::default(),
),
),
(
LONG_TRACE_ID_STR,
SPAN_ID_STR,
3,
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TRACE_FLAG_DEBUG | TraceFlags::SAMPLED,
true,
TraceState::default(),
),
),
(
LONG_TRACE_ID_STR,
SPAN_ID_STR,
0,
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TraceFlags::default(),
true,
TraceState::default(),
),
),
(
"invalidtractid",
SPAN_ID_STR,
0,
SpanContext::empty_context(),
),
(
LONG_TRACE_ID_STR,
"invalidspanID",
0,
SpanContext::empty_context(),
),
(
LONG_TRACE_ID_STR,
SPAN_ID_STR,
120,
SpanContext::empty_context(),
),
]
}
fn get_inject_data() -> Vec<(SpanContext, String)> {
vec![
(
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TraceFlags::SAMPLED,
true,
TraceState::default(),
),
format!("{}:{}:0:1", LONG_TRACE_ID_STR, SPAN_ID_STR),
),
(
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TraceFlags::default(),
true,
TraceState::default(),
),
format!("{}:{}:0:0", LONG_TRACE_ID_STR, SPAN_ID_STR),
),
(
SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TRACE_FLAG_DEBUG | TraceFlags::SAMPLED,
true,
TraceState::default(),
),
format!("{}:{}:0:3", LONG_TRACE_ID_STR, SPAN_ID_STR),
),
]
}
#[test]
fn test_extract_empty() {
let map: HashMap<String, String> = HashMap::new();
let propagator = Propagator::new();
let context = propagator.extract(&map);
assert_eq!(context.span().span_context(), &SpanContext::empty_context())
}
#[test]
fn test_extract() {
for (trace_id, span_id, flag, expected) in get_extract_data() {
let mut map: HashMap<String, String> = HashMap::new();
map.set(
JAEGER_HEADER,
format!("{}:{}:0:{}", trace_id, span_id, flag),
);
let propagator = Propagator::new();
let context = propagator.extract(&map);
assert_eq!(context.span().span_context(), &expected);
}
}
#[test]
fn test_extract_too_many_parts() {
let mut map: HashMap<String, String> = HashMap::new();
map.set(
JAEGER_HEADER,
format!("{}:{}:0:1:aa", LONG_TRACE_ID_STR, SPAN_ID_STR),
);
let propagator = Propagator::new();
let context = propagator.extract(&map);
assert_eq!(context.span().span_context(), &SpanContext::empty_context());
}
#[test]
fn test_extract_invalid_flag() {
let mut map: HashMap<String, String> = HashMap::new();
map.set(
JAEGER_HEADER,
format!("{}:{}:0:aa", LONG_TRACE_ID_STR, SPAN_ID_STR),
);
let propagator = Propagator::new();
let context = propagator.extract(&map);
assert_eq!(context.span().span_context(), &SpanContext::empty_context());
}
#[test]
fn test_extract_from_url() {
let mut map: HashMap<String, String> = HashMap::new();
map.set(
JAEGER_HEADER,
format!("{}%3A{}%3A0%3A1", LONG_TRACE_ID_STR, SPAN_ID_STR),
);
let propagator = Propagator::new();
let context = propagator.extract(&map);
assert_eq!(
context.span().span_context(),
&SpanContext::new(
TraceId::from_u128(TRACE_ID),
SpanId::from_u64(SPAN_ID),
TraceFlags::SAMPLED,
true,
TraceState::default(),
)
);
}
#[test]
fn test_inject() {
let propagator = Propagator::new();
for (span_context, header_value) in get_inject_data() {
let mut injector = HashMap::new();
propagator.inject_context(
&Context::current_with_span(TestSpan(span_context)),
&mut injector,
);
assert_eq!(injector.get(JAEGER_HEADER), Some(&header_value));
}
}
}
}
pub use exporter::{
new_pipeline, runtime::JaegerTraceRuntime, Error, Exporter, PipelineBuilder, Process,
};
pub use propagator::Propagator;
| 38.332795 | 161 | 0.554282 |
0a26d2e5fd28395cf8000209f9ecb83ff22e1350
| 103,436 |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Defines the logical data types of Arrow arrays.
//!
//! The most important things you might be looking for are:
//! * [`Schema`](crate::datatypes::Schema) to describe a schema.
//! * [`Field`](crate::datatypes::Field) to describe one field within a schema.
//! * [`DataType`](crate::datatypes::DataType) to describe the type of a field.
use std::collections::HashMap;
use std::default::Default;
use std::fmt;
use std::mem::size_of;
#[cfg(feature = "simd")]
use std::ops::{Add, Div, Mul, Sub};
use std::slice::from_raw_parts;
use std::str::FromStr;
use std::sync::Arc;
#[cfg(feature = "simd")]
use packed_simd::*;
use serde_derive::{Deserialize, Serialize};
use serde_json::{
json, Number, Value, Value::Number as VNumber, Value::String as VString,
};
use crate::error::{ArrowError, Result};
use crate::util::bit_util;
/// The set of datatypes that are supported by this implementation of Apache Arrow.
///
/// The Arrow specification on data types includes some more types.
/// See also [`Schema.fbs`](https://github.com/apache/arrow/blob/master/format/Schema.fbs)
/// for Arrow's specification.
///
/// The variants of this enum include primitive fixed size types as well as parametric or
/// nested types.
/// Currently the Rust implementation supports the following nested types:
/// - `List<T>`
/// - `Struct<T, U, V, ...>`
///
/// Nested types can themselves be nested within other arrays.
/// For more information on these types please see
/// [the physical memory layout of Apache Arrow](https://arrow.apache.org/docs/format/Columnar.html#physical-memory-layout).
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum DataType {
/// Null type
Null,
/// A boolean datatype representing the values `true` and `false`.
Boolean,
/// A signed 8-bit integer.
Int8,
/// A signed 16-bit integer.
Int16,
/// A signed 32-bit integer.
Int32,
/// A signed 64-bit integer.
Int64,
/// An unsigned 8-bit integer.
UInt8,
/// An unsigned 16-bit integer.
UInt16,
/// An unsigned 32-bit integer.
UInt32,
/// An unsigned 64-bit integer.
UInt64,
/// A 16-bit floating point number.
Float16,
/// A 32-bit floating point number.
Float32,
/// A 64-bit floating point number.
Float64,
/// A timestamp with an optional timezone.
///
/// Time is measured as a Unix epoch, counting the seconds from
/// 00:00:00.000 on 1 January 1970, excluding leap seconds,
/// as a 64-bit integer.
///
/// The time zone is a string indicating the name of a time zone, one of:
///
/// * As used in the Olson time zone database (the "tz database" or
/// "tzdata"), such as "America/New_York"
/// * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30
Timestamp(TimeUnit, Option<Arc<String>>),
/// A 32-bit date representing the elapsed time since UNIX epoch (1970-01-01)
/// in days (32 bits).
Date32(DateUnit),
/// A 64-bit date representing the elapsed time since UNIX epoch (1970-01-01)
/// in milliseconds (64 bits).
Date64(DateUnit),
/// A 32-bit time representing the elapsed time since midnight in the unit of `TimeUnit`.
Time32(TimeUnit),
/// A 64-bit time representing the elapsed time since midnight in the unit of `TimeUnit`.
Time64(TimeUnit),
/// Measure of elapsed time in either seconds, milliseconds, microseconds or nanoseconds.
Duration(TimeUnit),
/// A "calendar" interval which models types that don't necessarily
/// have a precise duration without the context of a base timestamp (e.g.
/// days can differ in length during day light savings time transitions).
Interval(IntervalUnit),
/// Opaque binary data of variable length.
Binary,
/// Opaque binary data of fixed size.
/// Enum parameter specifies the number of bytes per value.
FixedSizeBinary(i32),
/// Opaque binary data of variable length and 64-bit offsets.
LargeBinary,
/// A variable-length string in Unicode with UTF-8 encoding.
Utf8,
/// A variable-length string in Unicode with UFT-8 encoding and 64-bit offsets.
LargeUtf8,
/// A list of some logical data type with variable length.
List(Box<Field>),
/// A list of some logical data type with fixed length.
FixedSizeList(Box<Field>, i32),
/// A list of some logical data type with variable length and 64-bit offsets.
LargeList(Box<Field>),
/// A nested datatype that contains a number of sub-fields.
Struct(Vec<Field>),
/// A nested datatype that can represent slots of differing types.
Union(Vec<Field>),
/// A dictionary encoded array (`key_type`, `value_type`), where
/// each array element is an index of `key_type` into an
/// associated dictionary of `value_type`.
///
/// Dictionary arrays are used to store columns of `value_type`
/// that contain many repeated values using less memory, but with
/// a higher CPU overhead for some operations.
///
/// This type mostly used to represent low cardinality string
/// arrays or a limited set of primitive types as integers.
Dictionary(Box<DataType>, Box<DataType>),
/// Decimal value with precision and scale
Decimal(usize, usize),
}
/// Date is either a 32-bit or 64-bit type representing elapsed time since UNIX
/// epoch (1970-01-01) in days or milliseconds.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum DateUnit {
/// Days since the UNIX epoch.
Day,
/// Milliseconds indicating UNIX time elapsed since the epoch (no
/// leap seconds), where the values are evenly divisible by 86400000.
Millisecond,
}
/// An absolute length of time in seconds, milliseconds, microseconds or nanoseconds.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum TimeUnit {
/// Time in seconds.
Second,
/// Time in milliseconds.
Millisecond,
/// Time in microseconds.
Microsecond,
/// Time in nanoseconds.
Nanosecond,
}
/// YEAR_MONTH or DAY_TIME interval in SQL style.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum IntervalUnit {
/// Indicates the number of elapsed whole months, stored as 4-byte integers.
YearMonth,
/// Indicates the number of elapsed days and milliseconds,
/// stored as 2 contiguous 32-bit integers (8-bytes in total).
DayTime,
}
/// Contains the meta-data for a single relative type.
///
/// The `Schema` object is an ordered collection of `Field` objects.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Field {
name: String,
data_type: DataType,
nullable: bool,
dict_id: i64,
dict_is_ordered: bool,
}
pub trait ArrowNativeType:
fmt::Debug + Send + Sync + Copy + PartialOrd + FromStr + Default + 'static
{
fn into_json_value(self) -> Option<Value>;
/// Convert native type from usize.
fn from_usize(_: usize) -> Option<Self> {
None
}
/// Convert native type to usize.
fn to_usize(&self) -> Option<usize> {
None
}
}
/// Trait indicating a primitive fixed-width type (bool, ints and floats).
pub trait ArrowPrimitiveType: 'static {
/// Corresponding Rust native type for the primitive type.
type Native: ArrowNativeType;
/// the corresponding Arrow data type of this primitive type.
const DATA_TYPE: DataType;
/// Returns the bit width of this primitive type.
fn get_bit_width() -> usize {
size_of::<Self::Native>() * 8
}
/// Returns a default value of this primitive type.
///
/// This is useful for aggregate array ops like `sum()`, `mean()`.
fn default_value() -> Self::Native {
Default::default()
}
/// Returns a value offset from the given pointer by the given index. The default
/// implementation (used for all non-boolean types) is simply equivalent to pointer-arithmetic.
/// # Safety
/// Just like array-access in C: the raw_ptr must be the start of a valid array, and the index
/// must be less than the size of the array.
unsafe fn index(raw_ptr: *const Self::Native, i: usize) -> Self::Native {
*(raw_ptr.add(i))
}
}
impl ArrowNativeType for bool {
fn into_json_value(self) -> Option<Value> {
Some(self.into())
}
}
impl ArrowNativeType for i8 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for i16 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for i32 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for i64 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u8 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u16 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u32 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for u64 {
fn into_json_value(self) -> Option<Value> {
Some(VNumber(Number::from(self)))
}
fn from_usize(v: usize) -> Option<Self> {
num::FromPrimitive::from_usize(v)
}
fn to_usize(&self) -> Option<usize> {
num::ToPrimitive::to_usize(self)
}
}
impl ArrowNativeType for f32 {
fn into_json_value(self) -> Option<Value> {
Number::from_f64(f64::round(self as f64 * 1000.0) / 1000.0).map(VNumber)
}
}
impl ArrowNativeType for f64 {
fn into_json_value(self) -> Option<Value> {
Number::from_f64(self).map(VNumber)
}
}
// BooleanType is special: its bit-width is not the size of the primitive type, and its `index`
// operation assumes bit-packing.
#[derive(Debug)]
pub struct BooleanType {}
impl ArrowPrimitiveType for BooleanType {
type Native = bool;
const DATA_TYPE: DataType = DataType::Boolean;
fn get_bit_width() -> usize {
1
}
/// # Safety
/// The pointer must be part of a bit-packed boolean array, and the index must be less than the
/// size of the array.
unsafe fn index(raw_ptr: *const Self::Native, i: usize) -> Self::Native {
bit_util::get_bit_raw(raw_ptr as *const u8, i)
}
}
macro_rules! make_type {
($name:ident, $native_ty:ty, $data_ty:expr) => {
#[derive(Debug)]
pub struct $name {}
impl ArrowPrimitiveType for $name {
type Native = $native_ty;
const DATA_TYPE: DataType = $data_ty;
}
};
}
make_type!(Int8Type, i8, DataType::Int8);
make_type!(Int16Type, i16, DataType::Int16);
make_type!(Int32Type, i32, DataType::Int32);
make_type!(Int64Type, i64, DataType::Int64);
make_type!(UInt8Type, u8, DataType::UInt8);
make_type!(UInt16Type, u16, DataType::UInt16);
make_type!(UInt32Type, u32, DataType::UInt32);
make_type!(UInt64Type, u64, DataType::UInt64);
make_type!(Float32Type, f32, DataType::Float32);
make_type!(Float64Type, f64, DataType::Float64);
make_type!(
TimestampSecondType,
i64,
DataType::Timestamp(TimeUnit::Second, None)
);
make_type!(
TimestampMillisecondType,
i64,
DataType::Timestamp(TimeUnit::Millisecond, None)
);
make_type!(
TimestampMicrosecondType,
i64,
DataType::Timestamp(TimeUnit::Microsecond, None)
);
make_type!(
TimestampNanosecondType,
i64,
DataType::Timestamp(TimeUnit::Nanosecond, None)
);
make_type!(Date32Type, i32, DataType::Date32(DateUnit::Day));
make_type!(Date64Type, i64, DataType::Date64(DateUnit::Millisecond));
make_type!(Time32SecondType, i32, DataType::Time32(TimeUnit::Second));
make_type!(
Time32MillisecondType,
i32,
DataType::Time32(TimeUnit::Millisecond)
);
make_type!(
Time64MicrosecondType,
i64,
DataType::Time64(TimeUnit::Microsecond)
);
make_type!(
Time64NanosecondType,
i64,
DataType::Time64(TimeUnit::Nanosecond)
);
make_type!(
IntervalYearMonthType,
i32,
DataType::Interval(IntervalUnit::YearMonth)
);
make_type!(
IntervalDayTimeType,
i64,
DataType::Interval(IntervalUnit::DayTime)
);
make_type!(
DurationSecondType,
i64,
DataType::Duration(TimeUnit::Second)
);
make_type!(
DurationMillisecondType,
i64,
DataType::Duration(TimeUnit::Millisecond)
);
make_type!(
DurationMicrosecondType,
i64,
DataType::Duration(TimeUnit::Microsecond)
);
make_type!(
DurationNanosecondType,
i64,
DataType::Duration(TimeUnit::Nanosecond)
);
/// A subtype of primitive type that represents legal dictionary keys.
/// See https://arrow.apache.org/docs/format/Columnar.html
pub trait ArrowDictionaryKeyType: ArrowPrimitiveType {}
impl ArrowDictionaryKeyType for Int8Type {}
impl ArrowDictionaryKeyType for Int16Type {}
impl ArrowDictionaryKeyType for Int32Type {}
impl ArrowDictionaryKeyType for Int64Type {}
impl ArrowDictionaryKeyType for UInt8Type {}
impl ArrowDictionaryKeyType for UInt16Type {}
impl ArrowDictionaryKeyType for UInt32Type {}
impl ArrowDictionaryKeyType for UInt64Type {}
/// A subtype of primitive type that represents numeric values.
///
/// SIMD operations are defined in this trait if available on the target system.
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
pub trait ArrowNumericType: ArrowPrimitiveType
where
Self::Simd: Add<Output = Self::Simd>
+ Sub<Output = Self::Simd>
+ Mul<Output = Self::Simd>
+ Div<Output = Self::Simd>
+ Copy,
{
/// Defines the SIMD type that should be used for this numeric type
type Simd;
/// Defines the SIMD Mask type that should be used for this numeric type
type SimdMask;
/// The number of SIMD lanes available
fn lanes() -> usize;
/// Initializes a SIMD register to a constant value
fn init(value: Self::Native) -> Self::Simd;
/// Loads a slice into a SIMD register
fn load(slice: &[Self::Native]) -> Self::Simd;
/// Creates a new SIMD mask for this SIMD type filling it with `value`
fn mask_init(value: bool) -> Self::SimdMask;
/// Creates a new SIMD mask for this SIMD type from the lower-most bits of the given `mask`
fn mask_from_u64(mask: u64) -> Self::SimdMask;
/// Gets the value of a single lane in a SIMD mask
fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool;
/// Gets the bitmask for a SimdMask as a byte slice and passes it to the closure used as the action parameter
fn bitmask<T>(mask: &Self::SimdMask, action: T)
where
T: FnMut(&[u8]);
/// Sets the value of a single lane of a SIMD mask
fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask;
/// Selects elements of `a` and `b` using `mask`
fn mask_select(mask: Self::SimdMask, a: Self::Simd, b: Self::Simd) -> Self::Simd;
/// Returns `true` if any of the lanes in the mask are `true`
fn mask_any(mask: Self::SimdMask) -> bool;
/// Performs a SIMD binary operation
fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>(
left: Self::Simd,
right: Self::Simd,
op: F,
) -> Self::Simd;
/// SIMD version of equal
fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of not equal
fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of less than
fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of less than or equal to
fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of greater than
fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// SIMD version of greater than or equal to
fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask;
/// Writes a SIMD result back to a slice
fn write(simd_result: Self::Simd, slice: &mut [Self::Native]);
}
#[cfg(any(
not(any(target_arch = "x86", target_arch = "x86_64")),
not(feature = "simd")
))]
pub trait ArrowNumericType: ArrowPrimitiveType {}
macro_rules! make_numeric_type {
($impl_ty:ty, $native_ty:ty, $simd_ty:ident, $simd_mask_ty:ident) => {
#[cfg(all(any(target_arch = "x86", target_arch = "x86_64"), feature = "simd"))]
impl ArrowNumericType for $impl_ty {
type Simd = $simd_ty;
type SimdMask = $simd_mask_ty;
#[inline]
fn lanes() -> usize {
Self::Simd::lanes()
}
#[inline]
fn init(value: Self::Native) -> Self::Simd {
Self::Simd::splat(value)
}
#[inline]
fn load(slice: &[Self::Native]) -> Self::Simd {
unsafe { Self::Simd::from_slice_unaligned_unchecked(slice) }
}
#[inline]
fn mask_init(value: bool) -> Self::SimdMask {
Self::SimdMask::splat(value)
}
#[inline]
fn mask_from_u64(mask: u64) -> Self::SimdMask {
match Self::lanes() {
8 => {
let vecidx = i64x8::new(128, 64, 32, 16, 8, 4, 2, 1);
let vecmask = i64x8::splat((mask & 0xFF) as i64);
let vecmask = (vecidx & vecmask).eq(vecidx);
unsafe { std::mem::transmute(vecmask) }
}
16 => {
let vecidx = i32x16::new(
32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32,
16, 8, 4, 2, 1,
);
let vecmask = i32x16::splat((mask & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
unsafe { std::mem::transmute(vecmask) }
}
32 => {
let tmp = &mut [0_i16; 32];
let vecidx = i32x16::new(
32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32,
16, 8, 4, 2, 1,
);
let vecmask = i32x16::splat((mask & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i16x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[0..16]);
let vecmask = i32x16::splat(((mask >> 16) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i16x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[16..32]);
unsafe { std::mem::transmute(i16x32::from_slice_unaligned(tmp)) }
}
64 => {
let tmp = &mut [0_i8; 64];
let vecidx = i32x16::new(
32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32,
16, 8, 4, 2, 1,
);
let vecmask = i32x16::splat((mask & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[0..16]);
let vecmask = i32x16::splat(((mask >> 16) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[16..32]);
let vecmask = i32x16::splat(((mask >> 32) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[32..48]);
let vecmask = i32x16::splat(((mask >> 48) & 0xFFFF) as i32);
let vecmask = (vecidx & vecmask).eq(vecidx);
i8x16::from_cast(vecmask)
.write_to_slice_unaligned(&mut tmp[48..64]);
unsafe { std::mem::transmute(i8x64::from_slice_unaligned(tmp)) }
}
_ => panic!("Invalid number of vector lanes"),
}
}
#[inline]
fn mask_get(mask: &Self::SimdMask, idx: usize) -> bool {
unsafe { mask.extract_unchecked(idx) }
}
fn bitmask<T>(mask: &Self::SimdMask, mut action: T)
where
T: FnMut(&[u8]),
{
action(mask.bitmask().to_byte_slice());
}
#[inline]
fn mask_set(mask: Self::SimdMask, idx: usize, value: bool) -> Self::SimdMask {
unsafe { mask.replace_unchecked(idx, value) }
}
/// Selects elements of `a` and `b` using `mask`
#[inline]
fn mask_select(
mask: Self::SimdMask,
a: Self::Simd,
b: Self::Simd,
) -> Self::Simd {
mask.select(a, b)
}
#[inline]
fn mask_any(mask: Self::SimdMask) -> bool {
mask.any()
}
#[inline]
fn bin_op<F: Fn(Self::Simd, Self::Simd) -> Self::Simd>(
left: Self::Simd,
right: Self::Simd,
op: F,
) -> Self::Simd {
op(left, right)
}
#[inline]
fn eq(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.eq(right)
}
#[inline]
fn ne(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.ne(right)
}
#[inline]
fn lt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.lt(right)
}
#[inline]
fn le(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.le(right)
}
#[inline]
fn gt(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.gt(right)
}
#[inline]
fn ge(left: Self::Simd, right: Self::Simd) -> Self::SimdMask {
left.ge(right)
}
#[inline]
fn write(simd_result: Self::Simd, slice: &mut [Self::Native]) {
unsafe { simd_result.write_to_slice_unaligned_unchecked(slice) };
}
}
#[cfg(any(
not(any(target_arch = "x86", target_arch = "x86_64")),
not(feature = "simd")
))]
impl ArrowNumericType for $impl_ty {}
};
}
make_numeric_type!(Int8Type, i8, i8x64, m8x64);
make_numeric_type!(Int16Type, i16, i16x32, m16x32);
make_numeric_type!(Int32Type, i32, i32x16, m32x16);
make_numeric_type!(Int64Type, i64, i64x8, m64x8);
make_numeric_type!(UInt8Type, u8, u8x64, m8x64);
make_numeric_type!(UInt16Type, u16, u16x32, m16x32);
make_numeric_type!(UInt32Type, u32, u32x16, m32x16);
make_numeric_type!(UInt64Type, u64, u64x8, m64x8);
make_numeric_type!(Float32Type, f32, f32x16, m32x16);
make_numeric_type!(Float64Type, f64, f64x8, m64x8);
make_numeric_type!(TimestampSecondType, i64, i64x8, m64x8);
make_numeric_type!(TimestampMillisecondType, i64, i64x8, m64x8);
make_numeric_type!(TimestampMicrosecondType, i64, i64x8, m64x8);
make_numeric_type!(TimestampNanosecondType, i64, i64x8, m64x8);
make_numeric_type!(Date32Type, i32, i32x16, m32x16);
make_numeric_type!(Date64Type, i64, i64x8, m64x8);
make_numeric_type!(Time32SecondType, i32, i32x16, m32x16);
make_numeric_type!(Time32MillisecondType, i32, i32x16, m32x16);
make_numeric_type!(Time64MicrosecondType, i64, i64x8, m64x8);
make_numeric_type!(Time64NanosecondType, i64, i64x8, m64x8);
make_numeric_type!(IntervalYearMonthType, i32, i32x16, m32x16);
make_numeric_type!(IntervalDayTimeType, i64, i64x8, m64x8);
make_numeric_type!(DurationSecondType, i64, i64x8, m64x8);
make_numeric_type!(DurationMillisecondType, i64, i64x8, m64x8);
make_numeric_type!(DurationMicrosecondType, i64, i64x8, m64x8);
make_numeric_type!(DurationNanosecondType, i64, i64x8, m64x8);
/// A subtype of primitive type that represents temporal values.
pub trait ArrowTemporalType: ArrowPrimitiveType {}
impl ArrowTemporalType for TimestampSecondType {}
impl ArrowTemporalType for TimestampMillisecondType {}
impl ArrowTemporalType for TimestampMicrosecondType {}
impl ArrowTemporalType for TimestampNanosecondType {}
impl ArrowTemporalType for Date32Type {}
impl ArrowTemporalType for Date64Type {}
impl ArrowTemporalType for Time32SecondType {}
impl ArrowTemporalType for Time32MillisecondType {}
impl ArrowTemporalType for Time64MicrosecondType {}
impl ArrowTemporalType for Time64NanosecondType {}
// impl ArrowTemporalType for IntervalYearMonthType {}
// impl ArrowTemporalType for IntervalDayTimeType {}
/// A timestamp type allows us to create array builders that take a timestamp.
pub trait ArrowTimestampType: ArrowTemporalType {
/// Returns the `TimeUnit` of this timestamp.
fn get_time_unit() -> TimeUnit;
}
impl ArrowTimestampType for TimestampSecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Second
}
}
impl ArrowTimestampType for TimestampMillisecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Millisecond
}
}
impl ArrowTimestampType for TimestampMicrosecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Microsecond
}
}
impl ArrowTimestampType for TimestampNanosecondType {
fn get_time_unit() -> TimeUnit {
TimeUnit::Nanosecond
}
}
/// Allows conversion from supported Arrow types to a byte slice.
pub trait ToByteSlice {
/// Converts this instance into a byte slice
fn to_byte_slice(&self) -> &[u8];
}
impl<T: ArrowNativeType> ToByteSlice for [T] {
fn to_byte_slice(&self) -> &[u8] {
let raw_ptr = self.as_ptr() as *const T as *const u8;
unsafe { from_raw_parts(raw_ptr, self.len() * size_of::<T>()) }
}
}
impl<T: ArrowNativeType> ToByteSlice for T {
fn to_byte_slice(&self) -> &[u8] {
let raw_ptr = self as *const T as *const u8;
unsafe { from_raw_parts(raw_ptr, size_of::<T>()) }
}
}
impl DataType {
/// Parse a data type from a JSON representation
pub(crate) fn from(json: &Value) -> Result<DataType> {
let default_field = Field::new("", DataType::Boolean, true);
match *json {
Value::Object(ref map) => match map.get("name") {
Some(s) if s == "null" => Ok(DataType::Null),
Some(s) if s == "bool" => Ok(DataType::Boolean),
Some(s) if s == "binary" => Ok(DataType::Binary),
Some(s) if s == "largebinary" => Ok(DataType::LargeBinary),
Some(s) if s == "utf8" => Ok(DataType::Utf8),
Some(s) if s == "largeutf8" => Ok(DataType::LargeUtf8),
Some(s) if s == "fixedsizebinary" => {
// return a list with any type as its child isn't defined in the map
if let Some(Value::Number(size)) = map.get("byteWidth") {
Ok(DataType::FixedSizeBinary(size.as_i64().unwrap() as i32))
} else {
Err(ArrowError::ParseError(
"Expecting a byteWidth for fixedsizebinary".to_string(),
))
}
}
Some(s) if s == "floatingpoint" => match map.get("precision") {
Some(p) if p == "HALF" => Ok(DataType::Float16),
Some(p) if p == "SINGLE" => Ok(DataType::Float32),
Some(p) if p == "DOUBLE" => Ok(DataType::Float64),
_ => Err(ArrowError::ParseError(
"floatingpoint precision missing or invalid".to_string(),
)),
},
Some(s) if s == "timestamp" => {
let unit = match map.get("unit") {
Some(p) if p == "SECOND" => Ok(TimeUnit::Second),
Some(p) if p == "MILLISECOND" => Ok(TimeUnit::Millisecond),
Some(p) if p == "MICROSECOND" => Ok(TimeUnit::Microsecond),
Some(p) if p == "NANOSECOND" => Ok(TimeUnit::Nanosecond),
_ => Err(ArrowError::ParseError(
"timestamp unit missing or invalid".to_string(),
)),
};
let tz = match map.get("timezone") {
None => Ok(None),
Some(VString(tz)) => Ok(Some(Arc::new(tz.to_string()))),
_ => Err(ArrowError::ParseError(
"timezone must be a string".to_string(),
)),
};
Ok(DataType::Timestamp(unit?, tz?))
}
Some(s) if s == "date" => match map.get("unit") {
Some(p) if p == "DAY" => Ok(DataType::Date32(DateUnit::Day)),
Some(p) if p == "MILLISECOND" => {
Ok(DataType::Date64(DateUnit::Millisecond))
}
_ => Err(ArrowError::ParseError(
"date unit missing or invalid".to_string(),
)),
},
Some(s) if s == "time" => {
let unit = match map.get("unit") {
Some(p) if p == "SECOND" => Ok(TimeUnit::Second),
Some(p) if p == "MILLISECOND" => Ok(TimeUnit::Millisecond),
Some(p) if p == "MICROSECOND" => Ok(TimeUnit::Microsecond),
Some(p) if p == "NANOSECOND" => Ok(TimeUnit::Nanosecond),
_ => Err(ArrowError::ParseError(
"time unit missing or invalid".to_string(),
)),
};
match map.get("bitWidth") {
Some(p) if p == 32 => Ok(DataType::Time32(unit?)),
Some(p) if p == 64 => Ok(DataType::Time64(unit?)),
_ => Err(ArrowError::ParseError(
"time bitWidth missing or invalid".to_string(),
)),
}
}
Some(s) if s == "duration" => match map.get("unit") {
Some(p) if p == "SECOND" => Ok(DataType::Duration(TimeUnit::Second)),
Some(p) if p == "MILLISECOND" => {
Ok(DataType::Duration(TimeUnit::Millisecond))
}
Some(p) if p == "MICROSECOND" => {
Ok(DataType::Duration(TimeUnit::Microsecond))
}
Some(p) if p == "NANOSECOND" => {
Ok(DataType::Duration(TimeUnit::Nanosecond))
}
_ => Err(ArrowError::ParseError(
"time unit missing or invalid".to_string(),
)),
},
Some(s) if s == "interval" => match map.get("unit") {
Some(p) if p == "DAY_TIME" => {
Ok(DataType::Interval(IntervalUnit::DayTime))
}
Some(p) if p == "YEAR_MONTH" => {
Ok(DataType::Interval(IntervalUnit::YearMonth))
}
_ => Err(ArrowError::ParseError(
"interval unit missing or invalid".to_string(),
)),
},
Some(s) if s == "int" => match map.get("isSigned") {
Some(&Value::Bool(true)) => match map.get("bitWidth") {
Some(&Value::Number(ref n)) => match n.as_u64() {
Some(8) => Ok(DataType::Int8),
Some(16) => Ok(DataType::Int16),
Some(32) => Ok(DataType::Int32),
Some(64) => Ok(DataType::Int64),
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
Some(&Value::Bool(false)) => match map.get("bitWidth") {
Some(&Value::Number(ref n)) => match n.as_u64() {
Some(8) => Ok(DataType::UInt8),
Some(16) => Ok(DataType::UInt16),
Some(32) => Ok(DataType::UInt32),
Some(64) => Ok(DataType::UInt64),
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
_ => Err(ArrowError::ParseError(
"int bitWidth missing or invalid".to_string(),
)),
},
_ => Err(ArrowError::ParseError(
"int signed missing or invalid".to_string(),
)),
},
Some(s) if s == "list" => {
// return a list with any type as its child isn't defined in the map
Ok(DataType::List(Box::new(default_field)))
}
Some(s) if s == "largelist" => {
// return a largelist with any type as its child isn't defined in the map
Ok(DataType::LargeList(Box::new(default_field)))
}
Some(s) if s == "fixedsizelist" => {
// return a list with any type as its child isn't defined in the map
if let Some(Value::Number(size)) = map.get("listSize") {
Ok(DataType::FixedSizeList(
Box::new(default_field),
size.as_i64().unwrap() as i32,
))
} else {
Err(ArrowError::ParseError(
"Expecting a listSize for fixedsizelist".to_string(),
))
}
}
Some(s) if s == "struct" => {
// return an empty `struct` type as its children aren't defined in the map
Ok(DataType::Struct(vec![]))
}
Some(other) => Err(ArrowError::ParseError(format!(
"invalid or unsupported type name: {} in {:?}",
other, json
))),
None => Err(ArrowError::ParseError("type name missing".to_string())),
},
_ => Err(ArrowError::ParseError(
"invalid json value type".to_string(),
)),
}
}
/// Generate a JSON representation of the data type
pub fn to_json(&self) -> Value {
match self {
DataType::Null => json!({"name": "null"}),
DataType::Boolean => json!({"name": "bool"}),
DataType::Int8 => json!({"name": "int", "bitWidth": 8, "isSigned": true}),
DataType::Int16 => json!({"name": "int", "bitWidth": 16, "isSigned": true}),
DataType::Int32 => json!({"name": "int", "bitWidth": 32, "isSigned": true}),
DataType::Int64 => json!({"name": "int", "bitWidth": 64, "isSigned": true}),
DataType::UInt8 => json!({"name": "int", "bitWidth": 8, "isSigned": false}),
DataType::UInt16 => json!({"name": "int", "bitWidth": 16, "isSigned": false}),
DataType::UInt32 => json!({"name": "int", "bitWidth": 32, "isSigned": false}),
DataType::UInt64 => json!({"name": "int", "bitWidth": 64, "isSigned": false}),
DataType::Float16 => json!({"name": "floatingpoint", "precision": "HALF"}),
DataType::Float32 => json!({"name": "floatingpoint", "precision": "SINGLE"}),
DataType::Float64 => json!({"name": "floatingpoint", "precision": "DOUBLE"}),
DataType::Utf8 => json!({"name": "utf8"}),
DataType::LargeUtf8 => json!({"name": "largeutf8"}),
DataType::Binary => json!({"name": "binary"}),
DataType::LargeBinary => json!({"name": "largebinary"}),
DataType::FixedSizeBinary(byte_width) => {
json!({"name": "fixedsizebinary", "byteWidth": byte_width})
}
DataType::Struct(_) => json!({"name": "struct"}),
DataType::Union(_) => json!({"name": "union"}),
DataType::List(_) => json!({ "name": "list"}),
DataType::LargeList(_) => json!({ "name": "largelist"}),
DataType::FixedSizeList(_, length) => {
json!({"name":"fixedsizelist", "listSize": length})
}
DataType::Time32(unit) => {
json!({"name": "time", "bitWidth": 32, "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}})
}
DataType::Time64(unit) => {
json!({"name": "time", "bitWidth": 64, "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}})
}
DataType::Date32(unit) | DataType::Date64(unit) => {
json!({"name": "date", "unit": match unit {
DateUnit::Day => "DAY",
DateUnit::Millisecond => "MILLISECOND",
}})
}
DataType::Timestamp(unit, None) => {
json!({"name": "timestamp", "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}})
}
DataType::Timestamp(unit, Some(tz)) => {
json!({"name": "timestamp", "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}, "timezone": tz})
}
DataType::Interval(unit) => json!({"name": "interval", "unit": match unit {
IntervalUnit::YearMonth => "YEAR_MONTH",
IntervalUnit::DayTime => "DAY_TIME",
}}),
DataType::Duration(unit) => json!({"name": "duration", "unit": match unit {
TimeUnit::Second => "SECOND",
TimeUnit::Millisecond => "MILLISECOND",
TimeUnit::Microsecond => "MICROSECOND",
TimeUnit::Nanosecond => "NANOSECOND",
}}),
DataType::Dictionary(_, _) => json!({ "name": "dictionary"}),
DataType::Decimal(precision, scale) => {
json!({"name": "decimal", "precision": precision, "scale": scale})
}
}
}
/// Returns true if this type is numeric: (UInt*, Unit*, or Float*)
pub fn is_numeric(t: &DataType) -> bool {
use DataType::*;
matches!(
t,
UInt8
| UInt16
| UInt32
| UInt64
| Int8
| Int16
| Int32
| Int64
| Float32
| Float64
)
}
}
impl Field {
/// Creates a new field
pub fn new(name: &str, data_type: DataType, nullable: bool) -> Self {
Field {
name: name.to_string(),
data_type,
nullable,
dict_id: 0,
dict_is_ordered: false,
}
}
/// Creates a new field
pub fn new_dict(
name: &str,
data_type: DataType,
nullable: bool,
dict_id: i64,
dict_is_ordered: bool,
) -> Self {
Field {
name: name.to_string(),
data_type,
nullable,
dict_id,
dict_is_ordered,
}
}
/// Returns an immutable reference to the `Field`'s name
#[inline]
pub const fn name(&self) -> &String {
&self.name
}
/// Returns an immutable reference to the `Field`'s data-type
#[inline]
pub const fn data_type(&self) -> &DataType {
&self.data_type
}
/// Indicates whether this `Field` supports null values
#[inline]
pub const fn is_nullable(&self) -> bool {
self.nullable
}
/// Returns the dictionary ID
#[inline]
pub const fn dict_id(&self) -> i64 {
self.dict_id
}
/// Indicates whether this `Field`'s dictionary is ordered
#[inline]
pub const fn dict_is_ordered(&self) -> bool {
self.dict_is_ordered
}
/// Parse a `Field` definition from a JSON representation
pub fn from(json: &Value) -> Result<Self> {
match *json {
Value::Object(ref map) => {
let name = match map.get("name") {
Some(&Value::String(ref name)) => name.to_string(),
_ => {
return Err(ArrowError::ParseError(
"Field missing 'name' attribute".to_string(),
));
}
};
let nullable = match map.get("nullable") {
Some(&Value::Bool(b)) => b,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'nullable' attribute".to_string(),
));
}
};
let data_type = match map.get("type") {
Some(t) => DataType::from(t)?,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'type' attribute".to_string(),
));
}
};
// if data_type is a struct or list, get its children
let data_type = match data_type {
DataType::List(_)
| DataType::LargeList(_)
| DataType::FixedSizeList(_, _) => match map.get("children") {
Some(Value::Array(values)) => {
if values.len() != 1 {
return Err(ArrowError::ParseError(
"Field 'children' must have one element for a list data type".to_string(),
));
}
match data_type {
DataType::List(_) => DataType::List(Box::new(
Self::from(&values[0])?,
)),
DataType::LargeList(_) => DataType::LargeList(Box::new(
Self::from(&values[0])?,
)),
DataType::FixedSizeList(_, int) => {
DataType::FixedSizeList(
Box::new(Self::from(&values[0])?),
int,
)
}
_ => unreachable!(
"Data type should be a list, largelist or fixedsizelist"
),
}
}
Some(_) => {
return Err(ArrowError::ParseError(
"Field 'children' must be an array".to_string(),
))
}
None => {
return Err(ArrowError::ParseError(
"Field missing 'children' attribute".to_string(),
));
}
},
DataType::Struct(mut fields) => match map.get("children") {
Some(Value::Array(values)) => {
let struct_fields: Result<Vec<Field>> =
values.iter().map(|v| Field::from(v)).collect();
fields.append(&mut struct_fields?);
DataType::Struct(fields)
}
Some(_) => {
return Err(ArrowError::ParseError(
"Field 'children' must be an array".to_string(),
))
}
None => {
return Err(ArrowError::ParseError(
"Field missing 'children' attribute".to_string(),
));
}
},
_ => data_type,
};
let mut dict_id = 0;
let mut dict_is_ordered = false;
let data_type = match map.get("dictionary") {
Some(dictionary) => {
let index_type = match dictionary.get("indexType") {
Some(t) => DataType::from(t)?,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'indexType' attribute".to_string(),
));
}
};
dict_id = match dictionary.get("id") {
Some(Value::Number(n)) => n.as_i64().unwrap(),
_ => {
return Err(ArrowError::ParseError(
"Field missing 'id' attribute".to_string(),
));
}
};
dict_is_ordered = match dictionary.get("isOrdered") {
Some(&Value::Bool(n)) => n,
_ => {
return Err(ArrowError::ParseError(
"Field missing 'isOrdered' attribute".to_string(),
));
}
};
DataType::Dictionary(Box::new(index_type), Box::new(data_type))
}
_ => data_type,
};
Ok(Field {
name,
nullable,
data_type,
dict_id,
dict_is_ordered,
})
}
_ => Err(ArrowError::ParseError(
"Invalid json value type for field".to_string(),
)),
}
}
/// Generate a JSON representation of the `Field`
pub fn to_json(&self) -> Value {
let children: Vec<Value> = match self.data_type() {
DataType::Struct(fields) => fields.iter().map(|f| f.to_json()).collect(),
DataType::List(field) => vec![field.to_json()],
DataType::LargeList(field) => vec![field.to_json()],
DataType::FixedSizeList(field, _) => vec![field.to_json()],
_ => vec![],
};
match self.data_type() {
DataType::Dictionary(ref index_type, ref value_type) => json!({
"name": self.name,
"nullable": self.nullable,
"type": value_type.to_json(),
"children": children,
"dictionary": {
"id": self.dict_id,
"indexType": index_type.to_json(),
"isOrdered": self.dict_is_ordered
}
}),
_ => json!({
"name": self.name,
"nullable": self.nullable,
"type": self.data_type.to_json(),
"children": children
}),
}
}
/// Merge field into self if it is compatible. Struct will be merged recursively.
///
/// Example:
///
/// ```
/// use arrow::datatypes::*;
///
/// let mut field = Field::new("c1", DataType::Int64, false);
/// assert!(field.try_merge(&Field::new("c1", DataType::Int64, true)).is_ok());
/// assert!(field.is_nullable());
/// ```
pub fn try_merge(&mut self, from: &Field) -> Result<()> {
if from.dict_id != self.dict_id {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting dict_id".to_string(),
));
}
if from.dict_is_ordered != self.dict_is_ordered {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting dict_is_ordered"
.to_string(),
));
}
match &mut self.data_type {
DataType::Struct(nested_fields) => match &from.data_type {
DataType::Struct(from_nested_fields) => {
for from_field in from_nested_fields {
let mut is_new_field = true;
for self_field in nested_fields.iter_mut() {
if self_field.name != from_field.name {
continue;
}
is_new_field = false;
self_field.try_merge(&from_field)?;
}
if is_new_field {
nested_fields.push(from_field.clone());
}
}
}
_ => {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting datatype"
.to_string(),
));
}
},
DataType::Union(nested_fields) => match &from.data_type {
DataType::Union(from_nested_fields) => {
for from_field in from_nested_fields {
let mut is_new_field = true;
for self_field in nested_fields.iter_mut() {
if from_field == self_field {
is_new_field = false;
break;
}
}
if is_new_field {
nested_fields.push(from_field.clone());
}
}
}
_ => {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting datatype"
.to_string(),
));
}
},
DataType::Null
| DataType::Boolean
| DataType::Int8
| DataType::Int16
| DataType::Int32
| DataType::Int64
| DataType::UInt8
| DataType::UInt16
| DataType::UInt32
| DataType::UInt64
| DataType::Float16
| DataType::Float32
| DataType::Float64
| DataType::Timestamp(_, _)
| DataType::Date32(_)
| DataType::Date64(_)
| DataType::Time32(_)
| DataType::Time64(_)
| DataType::Duration(_)
| DataType::Binary
| DataType::LargeBinary
| DataType::Interval(_)
| DataType::LargeList(_)
| DataType::List(_)
| DataType::Dictionary(_, _)
| DataType::FixedSizeList(_, _)
| DataType::FixedSizeBinary(_)
| DataType::Utf8
| DataType::LargeUtf8
| DataType::Decimal(_, _) => {
if self.data_type != from.data_type {
return Err(ArrowError::SchemaError(
"Fail to merge schema Field due to conflicting datatype"
.to_string(),
));
}
}
}
if from.nullable {
self.nullable = from.nullable;
}
Ok(())
}
}
impl fmt::Display for Field {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}: {:?}", self.name, self.data_type)
}
}
/// Describes the meta-data of an ordered sequence of relative types.
///
/// Note that this information is only part of the meta-data and not part of the physical
/// memory layout.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Schema {
pub(crate) fields: Vec<Field>,
/// A map of key-value pairs containing additional meta data.
#[serde(skip_serializing_if = "HashMap::is_empty")]
pub(crate) metadata: HashMap<String, String>,
}
impl Schema {
/// Creates an empty `Schema`
pub fn empty() -> Self {
Self {
fields: vec![],
metadata: HashMap::new(),
}
}
/// Creates a new `Schema` from a sequence of `Field` values
///
/// # Example
///
/// ```
/// # extern crate arrow;
/// # use arrow::datatypes::{Field, DataType, Schema};
/// let field_a = Field::new("a", DataType::Int64, false);
/// let field_b = Field::new("b", DataType::Boolean, false);
///
/// let schema = Schema::new(vec![field_a, field_b]);
/// ```
pub fn new(fields: Vec<Field>) -> Self {
Self::new_with_metadata(fields, HashMap::new())
}
/// Creates a new `Schema` from a sequence of `Field` values
/// and adds additional metadata in form of key value pairs.
///
/// # Example
///
/// ```
/// # extern crate arrow;
/// # use arrow::datatypes::{Field, DataType, Schema};
/// # use std::collections::HashMap;
/// let field_a = Field::new("a", DataType::Int64, false);
/// let field_b = Field::new("b", DataType::Boolean, false);
///
/// let mut metadata: HashMap<String, String> = HashMap::new();
/// metadata.insert("row_count".to_string(), "100".to_string());
///
/// let schema = Schema::new_with_metadata(vec![field_a, field_b], metadata);
/// ```
#[inline]
pub const fn new_with_metadata(
fields: Vec<Field>,
metadata: HashMap<String, String>,
) -> Self {
Self { fields, metadata }
}
/// Merge schema into self if it is compatible. Struct fields will be merged recursively.
///
/// Example:
///
/// ```
/// use arrow::datatypes::*;
///
/// let merged = Schema::try_merge(&vec![
/// Schema::new(vec![
/// Field::new("c1", DataType::Int64, false),
/// Field::new("c2", DataType::Utf8, false),
/// ]),
/// Schema::new(vec![
/// Field::new("c1", DataType::Int64, true),
/// Field::new("c2", DataType::Utf8, false),
/// Field::new("c3", DataType::Utf8, false),
/// ]),
/// ]).unwrap();
///
/// assert_eq!(
/// merged,
/// Schema::new(vec![
/// Field::new("c1", DataType::Int64, true),
/// Field::new("c2", DataType::Utf8, false),
/// Field::new("c3", DataType::Utf8, false),
/// ]),
/// );
/// ```
pub fn try_merge(schemas: &[Self]) -> Result<Self> {
let mut merged = Self::empty();
for schema in schemas {
for (key, value) in schema.metadata.iter() {
// merge metadata
match merged.metadata.get(key) {
Some(old_val) => {
if old_val != value {
return Err(ArrowError::SchemaError(
"Fail to merge schema due to conflicting metadata"
.to_string(),
));
}
}
None => {
merged.metadata.insert(key.clone(), value.clone());
}
}
}
// merge fileds
for field in &schema.fields {
let mut new_field = true;
for merged_field in &mut merged.fields {
if field.name != merged_field.name {
continue;
}
new_field = false;
merged_field.try_merge(field)?
}
// found a new field, add to field list
if new_field {
merged.fields.push(field.clone());
}
}
}
Ok(merged)
}
/// Returns an immutable reference of the vector of `Field` instances
#[inline]
pub const fn fields(&self) -> &Vec<Field> {
&self.fields
}
/// Returns an immutable reference of a specific `Field` instance selected using an
/// offset within the internal `fields` vector
pub fn field(&self, i: usize) -> &Field {
&self.fields[i]
}
/// Returns an immutable reference of a specific `Field` instance selected by name
pub fn field_with_name(&self, name: &str) -> Result<&Field> {
Ok(&self.fields[self.index_of(name)?])
}
/// Find the index of the column with the given name
pub fn index_of(&self, name: &str) -> Result<usize> {
for i in 0..self.fields.len() {
if self.fields[i].name == name {
return Ok(i);
}
}
let valid_fields: Vec<String> =
self.fields.iter().map(|f| f.name().clone()).collect();
Err(ArrowError::InvalidArgumentError(format!(
"Unable to get field named \"{}\". Valid fields: {:?}",
name, valid_fields
)))
}
/// Returns an immutable reference to the Map of custom metadata key-value pairs.
#[inline]
pub const fn metadata(&self) -> &HashMap<String, String> {
&self.metadata
}
/// Look up a column by name and return a immutable reference to the column along with
/// it's index
pub fn column_with_name(&self, name: &str) -> Option<(usize, &Field)> {
self.fields
.iter()
.enumerate()
.find(|&(_, c)| c.name == name)
}
/// Generate a JSON representation of the `Schema`
pub fn to_json(&self) -> Value {
json!({
"fields": self.fields.iter().map(|field| field.to_json()).collect::<Vec<Value>>(),
"metadata": serde_json::to_value(&self.metadata).unwrap()
})
}
/// Parse a `Schema` definition from a JSON representation
pub fn from(json: &Value) -> Result<Self> {
match *json {
Value::Object(ref schema) => {
let fields = if let Some(Value::Array(fields)) = schema.get("fields") {
fields
.iter()
.map(|f| Field::from(f))
.collect::<Result<_>>()?
} else {
return Err(ArrowError::ParseError(
"Schema fields should be an array".to_string(),
));
};
let metadata = if let Some(value) = schema.get("metadata") {
Self::from_metadata(value)?
} else {
HashMap::default()
};
Ok(Self { fields, metadata })
}
_ => Err(ArrowError::ParseError(
"Invalid json value type for schema".to_string(),
)),
}
}
/// Parse a `metadata` definition from a JSON representation
/// The JSON can either be an Object or an Array of Objects
fn from_metadata(json: &Value) -> Result<HashMap<String, String>> {
match json {
Value::Array(_) => {
let mut hashmap = HashMap::new();
let values: Vec<MetadataKeyValue> = serde_json::from_value(json.clone())
.map_err(|_| {
ArrowError::JsonError(
"Unable to parse object into key-value pair".to_string(),
)
})?;
for meta in values {
hashmap.insert(meta.key.clone(), meta.value);
}
Ok(hashmap)
}
Value::Object(md) => md
.iter()
.map(|(k, v)| {
if let Value::String(v) = v {
Ok((k.to_string(), v.to_string()))
} else {
Err(ArrowError::ParseError(
"metadata `value` field must be a string".to_string(),
))
}
})
.collect::<Result<_>>(),
_ => Err(ArrowError::ParseError(
"`metadata` field must be an object".to_string(),
)),
}
}
}
impl fmt::Display for Schema {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(
&self
.fields
.iter()
.map(|c| c.to_string())
.collect::<Vec<String>>()
.join(", "),
)
}
}
/// A reference-counted reference to a [`Schema`](crate::datatypes::Schema).
pub type SchemaRef = Arc<Schema>;
#[derive(Deserialize)]
struct MetadataKeyValue {
key: String,
value: String,
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::Number;
use serde_json::Value::{Bool, Number as VNumber};
use std::f32::NAN;
#[test]
fn create_struct_type() {
let _person = DataType::Struct(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
),
]);
}
#[test]
fn serde_struct_type() {
let person = DataType::Struct(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
),
]);
let serialized = serde_json::to_string(&person).unwrap();
// NOTE that this is testing the default (derived) serialization format, not the
// JSON format specified in metadata.md
assert_eq!(
"{\"Struct\":[\
{\"name\":\"first_name\",\"data_type\":\"Utf8\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false},\
{\"name\":\"last_name\",\"data_type\":\"Utf8\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false},\
{\"name\":\"address\",\"data_type\":{\"Struct\":\
[{\"name\":\"street\",\"data_type\":\"Utf8\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false},\
{\"name\":\"zip\",\"data_type\":\"UInt16\",\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false}\
]},\"nullable\":false,\"dict_id\":0,\"dict_is_ordered\":false}]}",
serialized
);
let deserialized = serde_json::from_str(&serialized).unwrap();
assert_eq!(person, deserialized);
}
#[test]
fn struct_field_to_json() {
let f = Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
);
let value: Value = serde_json::from_str(
r#"{
"name": "address",
"nullable": false,
"type": {
"name": "struct"
},
"children": [
{
"name": "street",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
},
{
"name": "zip",
"nullable": false,
"type": {
"name": "int",
"bitWidth": 16,
"isSigned": false
},
"children": []
}
]
}"#,
)
.unwrap();
assert_eq!(value, f.to_json());
}
#[test]
fn primitive_field_to_json() {
let f = Field::new("first_name", DataType::Utf8, false);
let value: Value = serde_json::from_str(
r#"{
"name": "first_name",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
}"#,
)
.unwrap();
assert_eq!(value, f.to_json());
}
#[test]
fn parse_struct_from_json() {
let json = r#"
{
"name": "address",
"type": {
"name": "struct"
},
"nullable": false,
"children": [
{
"name": "street",
"type": {
"name": "utf8"
},
"nullable": false,
"children": []
},
{
"name": "zip",
"type": {
"name": "int",
"isSigned": false,
"bitWidth": 16
},
"nullable": false,
"children": []
}
]
}
"#;
let value: Value = serde_json::from_str(json).unwrap();
let dt = Field::from(&value).unwrap();
let expected = Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
);
assert_eq!(expected, dt);
}
#[test]
fn parse_utf8_from_json() {
let json = "{\"name\":\"utf8\"}";
let value: Value = serde_json::from_str(json).unwrap();
let dt = DataType::from(&value).unwrap();
assert_eq!(DataType::Utf8, dt);
}
#[test]
fn parse_int32_from_json() {
let json = "{\"name\": \"int\", \"isSigned\": true, \"bitWidth\": 32}";
let value: Value = serde_json::from_str(json).unwrap();
let dt = DataType::from(&value).unwrap();
assert_eq!(DataType::Int32, dt);
}
#[test]
fn schema_json() {
// Add some custom metadata
let metadata: HashMap<String, String> =
[("Key".to_string(), "Value".to_string())]
.iter()
.cloned()
.collect();
let schema = Schema::new_with_metadata(
vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Binary, false),
Field::new("c3", DataType::FixedSizeBinary(3), false),
Field::new("c4", DataType::Boolean, false),
Field::new("c5", DataType::Date32(DateUnit::Day), false),
Field::new("c6", DataType::Date64(DateUnit::Millisecond), false),
Field::new("c7", DataType::Time32(TimeUnit::Second), false),
Field::new("c8", DataType::Time32(TimeUnit::Millisecond), false),
Field::new("c9", DataType::Time32(TimeUnit::Microsecond), false),
Field::new("c10", DataType::Time32(TimeUnit::Nanosecond), false),
Field::new("c11", DataType::Time64(TimeUnit::Second), false),
Field::new("c12", DataType::Time64(TimeUnit::Millisecond), false),
Field::new("c13", DataType::Time64(TimeUnit::Microsecond), false),
Field::new("c14", DataType::Time64(TimeUnit::Nanosecond), false),
Field::new("c15", DataType::Timestamp(TimeUnit::Second, None), false),
Field::new(
"c16",
DataType::Timestamp(
TimeUnit::Millisecond,
Some(Arc::new("UTC".to_string())),
),
false,
),
Field::new(
"c17",
DataType::Timestamp(
TimeUnit::Microsecond,
Some(Arc::new("Africa/Johannesburg".to_string())),
),
false,
),
Field::new(
"c18",
DataType::Timestamp(TimeUnit::Nanosecond, None),
false,
),
Field::new("c19", DataType::Interval(IntervalUnit::DayTime), false),
Field::new("c20", DataType::Interval(IntervalUnit::YearMonth), false),
Field::new(
"c21",
DataType::List(Box::new(Field::new("item", DataType::Boolean, true))),
false,
),
Field::new(
"c22",
DataType::FixedSizeList(
Box::new(Field::new("bools", DataType::Boolean, false)),
5,
),
false,
),
Field::new(
"c23",
DataType::List(Box::new(Field::new(
"inner_list",
DataType::List(Box::new(Field::new(
"struct",
DataType::Struct(vec![]),
true,
))),
false,
))),
true,
),
Field::new(
"c24",
DataType::Struct(vec![
Field::new("a", DataType::Utf8, false),
Field::new("b", DataType::UInt16, false),
]),
false,
),
Field::new("c25", DataType::Interval(IntervalUnit::YearMonth), true),
Field::new("c26", DataType::Interval(IntervalUnit::DayTime), true),
Field::new("c27", DataType::Duration(TimeUnit::Second), false),
Field::new("c28", DataType::Duration(TimeUnit::Millisecond), false),
Field::new("c29", DataType::Duration(TimeUnit::Microsecond), false),
Field::new("c30", DataType::Duration(TimeUnit::Nanosecond), false),
Field::new_dict(
"c31",
DataType::Dictionary(
Box::new(DataType::Int32),
Box::new(DataType::Utf8),
),
true,
123,
true,
),
Field::new("c32", DataType::LargeBinary, true),
Field::new("c33", DataType::LargeUtf8, true),
Field::new(
"c34",
DataType::LargeList(Box::new(Field::new(
"inner_large_list",
DataType::LargeList(Box::new(Field::new(
"struct",
DataType::Struct(vec![]),
false,
))),
true,
))),
true,
),
],
metadata,
);
let expected = schema.to_json();
let json = r#"{
"fields": [
{
"name": "c1",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
},
{
"name": "c2",
"nullable": false,
"type": {
"name": "binary"
},
"children": []
},
{
"name": "c3",
"nullable": false,
"type": {
"name": "fixedsizebinary",
"byteWidth": 3
},
"children": []
},
{
"name": "c4",
"nullable": false,
"type": {
"name": "bool"
},
"children": []
},
{
"name": "c5",
"nullable": false,
"type": {
"name": "date",
"unit": "DAY"
},
"children": []
},
{
"name": "c6",
"nullable": false,
"type": {
"name": "date",
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c7",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "SECOND"
},
"children": []
},
{
"name": "c8",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c9",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "MICROSECOND"
},
"children": []
},
{
"name": "c10",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 32,
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c11",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "SECOND"
},
"children": []
},
{
"name": "c12",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c13",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "MICROSECOND"
},
"children": []
},
{
"name": "c14",
"nullable": false,
"type": {
"name": "time",
"bitWidth": 64,
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c15",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "SECOND"
},
"children": []
},
{
"name": "c16",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "MILLISECOND",
"timezone": "UTC"
},
"children": []
},
{
"name": "c17",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "MICROSECOND",
"timezone": "Africa/Johannesburg"
},
"children": []
},
{
"name": "c18",
"nullable": false,
"type": {
"name": "timestamp",
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c19",
"nullable": false,
"type": {
"name": "interval",
"unit": "DAY_TIME"
},
"children": []
},
{
"name": "c20",
"nullable": false,
"type": {
"name": "interval",
"unit": "YEAR_MONTH"
},
"children": []
},
{
"name": "c21",
"nullable": false,
"type": {
"name": "list"
},
"children": [
{
"name": "item",
"nullable": true,
"type": {
"name": "bool"
},
"children": []
}
]
},
{
"name": "c22",
"nullable": false,
"type": {
"name": "fixedsizelist",
"listSize": 5
},
"children": [
{
"name": "bools",
"nullable": false,
"type": {
"name": "bool"
},
"children": []
}
]
},
{
"name": "c23",
"nullable": true,
"type": {
"name": "list"
},
"children": [
{
"name": "inner_list",
"nullable": false,
"type": {
"name": "list"
},
"children": [
{
"name": "struct",
"nullable": true,
"type": {
"name": "struct"
},
"children": []
}
]
}
]
},
{
"name": "c24",
"nullable": false,
"type": {
"name": "struct"
},
"children": [
{
"name": "a",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
},
{
"name": "b",
"nullable": false,
"type": {
"name": "int",
"bitWidth": 16,
"isSigned": false
},
"children": []
}
]
},
{
"name": "c25",
"nullable": true,
"type": {
"name": "interval",
"unit": "YEAR_MONTH"
},
"children": []
},
{
"name": "c26",
"nullable": true,
"type": {
"name": "interval",
"unit": "DAY_TIME"
},
"children": []
},
{
"name": "c27",
"nullable": false,
"type": {
"name": "duration",
"unit": "SECOND"
},
"children": []
},
{
"name": "c28",
"nullable": false,
"type": {
"name": "duration",
"unit": "MILLISECOND"
},
"children": []
},
{
"name": "c29",
"nullable": false,
"type": {
"name": "duration",
"unit": "MICROSECOND"
},
"children": []
},
{
"name": "c30",
"nullable": false,
"type": {
"name": "duration",
"unit": "NANOSECOND"
},
"children": []
},
{
"name": "c31",
"nullable": true,
"children": [],
"type": {
"name": "utf8"
},
"dictionary": {
"id": 123,
"indexType": {
"name": "int",
"bitWidth": 32,
"isSigned": true
},
"isOrdered": true
}
},
{
"name": "c32",
"nullable": true,
"type": {
"name": "largebinary"
},
"children": []
},
{
"name": "c33",
"nullable": true,
"type": {
"name": "largeutf8"
},
"children": []
},
{
"name": "c34",
"nullable": true,
"type": {
"name": "largelist"
},
"children": [
{
"name": "inner_large_list",
"nullable": true,
"type": {
"name": "largelist"
},
"children": [
{
"name": "struct",
"nullable": false,
"type": {
"name": "struct"
},
"children": []
}
]
}
]
}
],
"metadata" : {
"Key": "Value"
}
}"#;
let value: Value = serde_json::from_str(&json).unwrap();
assert_eq!(expected, value);
// convert back to a schema
let value: Value = serde_json::from_str(&json).unwrap();
let schema2 = Schema::from(&value).unwrap();
assert_eq!(schema, schema2);
// Check that empty metadata produces empty value in JSON and can be parsed
let json = r#"{
"fields": [
{
"name": "c1",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
}
],
"metadata": {}
}"#;
let value: Value = serde_json::from_str(&json).unwrap();
let schema = Schema::from(&value).unwrap();
assert!(schema.metadata.is_empty());
// Check that metadata field is not required in the JSON.
let json = r#"{
"fields": [
{
"name": "c1",
"nullable": false,
"type": {
"name": "utf8"
},
"children": []
}
]
}"#;
let value: Value = serde_json::from_str(&json).unwrap();
let schema = Schema::from(&value).unwrap();
assert!(schema.metadata.is_empty());
}
#[test]
fn create_schema_string() {
let schema = person_schema();
assert_eq!(schema.to_string(), "first_name: Utf8, \
last_name: Utf8, \
address: Struct([\
Field { name: \"street\", data_type: Utf8, nullable: false, dict_id: 0, dict_is_ordered: false }, \
Field { name: \"zip\", data_type: UInt16, nullable: false, dict_id: 0, dict_is_ordered: false }])")
}
#[test]
fn schema_field_accessors() {
let schema = person_schema();
// test schema accessors
assert_eq!(schema.fields().len(), 3);
// test field accessors
let first_name = &schema.fields()[0];
assert_eq!(first_name.name(), "first_name");
assert_eq!(first_name.data_type(), &DataType::Utf8);
assert_eq!(first_name.is_nullable(), false);
}
#[test]
#[should_panic(
expected = "Unable to get field named \\\"nickname\\\". Valid fields: [\\\"first_name\\\", \\\"last_name\\\", \\\"address\\\"]"
)]
fn schema_index_of() {
let schema = person_schema();
assert_eq!(schema.index_of("first_name").unwrap(), 0);
assert_eq!(schema.index_of("last_name").unwrap(), 1);
schema.index_of("nickname").unwrap();
}
#[test]
#[should_panic(
expected = "Unable to get field named \\\"nickname\\\". Valid fields: [\\\"first_name\\\", \\\"last_name\\\", \\\"address\\\"]"
)]
fn schema_field_with_name() {
let schema = person_schema();
assert_eq!(
schema.field_with_name("first_name").unwrap().name(),
"first_name"
);
assert_eq!(
schema.field_with_name("last_name").unwrap().name(),
"last_name"
);
schema.field_with_name("nickname").unwrap();
}
#[test]
fn schema_equality() {
let schema1 = Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Float64, true),
Field::new("c3", DataType::LargeBinary, true),
]);
let schema2 = Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Float64, true),
Field::new("c3", DataType::LargeBinary, true),
]);
assert_eq!(schema1, schema2);
let schema3 = Schema::new(vec![
Field::new("c1", DataType::Utf8, false),
Field::new("c2", DataType::Float32, true),
]);
let schema4 = Schema::new(vec![
Field::new("C1", DataType::Utf8, false),
Field::new("C2", DataType::Float64, true),
]);
assert!(schema1 != schema3);
assert!(schema1 != schema4);
assert!(schema2 != schema3);
assert!(schema2 != schema4);
assert!(schema3 != schema4);
}
#[test]
fn test_arrow_native_type_to_json() {
assert_eq!(Some(Bool(true)), true.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i8.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i16.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i32.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1i64.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u8.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u16.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u32.into_json_value());
assert_eq!(Some(VNumber(Number::from(1))), 1u64.into_json_value());
assert_eq!(
Some(VNumber(Number::from_f64(0.01f64).unwrap())),
0.01.into_json_value()
);
assert_eq!(
Some(VNumber(Number::from_f64(0.01f64).unwrap())),
0.01f64.into_json_value()
);
assert_eq!(None, NAN.into_json_value());
}
fn person_schema() -> Schema {
Schema::new(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![
Field::new("street", DataType::Utf8, false),
Field::new("zip", DataType::UInt16, false),
]),
false,
),
])
}
#[test]
fn test_schema_merge() -> Result<()> {
let merged = Schema::try_merge(&[
Schema::new(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
Field::new(
"address",
DataType::Struct(vec![Field::new("zip", DataType::UInt16, false)]),
false,
),
]),
Schema::new_with_metadata(
vec![
// nullable merge
Field::new("last_name", DataType::Utf8, true),
Field::new(
"address",
DataType::Struct(vec![
// add new nested field
Field::new("street", DataType::Utf8, false),
// nullable merge on nested field
Field::new("zip", DataType::UInt16, true),
]),
false,
),
// new field
Field::new("number", DataType::Utf8, true),
],
[("foo".to_string(), "bar".to_string())]
.iter()
.cloned()
.collect::<HashMap<String, String>>(),
),
])?;
assert_eq!(
merged,
Schema::new_with_metadata(
vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, true),
Field::new(
"address",
DataType::Struct(vec![
Field::new("zip", DataType::UInt16, true),
Field::new("street", DataType::Utf8, false),
]),
false,
),
Field::new("number", DataType::Utf8, true),
],
[("foo".to_string(), "bar".to_string())]
.iter()
.cloned()
.collect::<HashMap<String, String>>()
)
);
// support merge union fields
assert_eq!(
Schema::try_merge(&[
Schema::new(vec![Field::new(
"c1",
DataType::Union(vec![
Field::new("c11", DataType::Utf8, true),
Field::new("c12", DataType::Utf8, true),
]),
false
),]),
Schema::new(vec![Field::new(
"c1",
DataType::Union(vec![
Field::new("c12", DataType::Utf8, true),
Field::new("c13", DataType::Time64(TimeUnit::Second), true),
]),
false
),])
])?,
Schema::new(vec![Field::new(
"c1",
DataType::Union(vec![
Field::new("c11", DataType::Utf8, true),
Field::new("c12", DataType::Utf8, true),
Field::new("c13", DataType::Time64(TimeUnit::Second), true),
]),
false
),]),
);
// incompatible field should throw error
assert!(Schema::try_merge(&[
Schema::new(vec![
Field::new("first_name", DataType::Utf8, false),
Field::new("last_name", DataType::Utf8, false),
]),
Schema::new(vec![Field::new("last_name", DataType::Int64, false),])
])
.is_err());
// incompatible metadata should throw error
assert!(Schema::try_merge(&[
Schema::new_with_metadata(
vec![Field::new("first_name", DataType::Utf8, false)],
[("foo".to_string(), "bar".to_string()),]
.iter()
.cloned()
.collect::<HashMap<String, String>>()
),
Schema::new_with_metadata(
vec![Field::new("last_name", DataType::Utf8, false)],
[("foo".to_string(), "baz".to_string()),]
.iter()
.cloned()
.collect::<HashMap<String, String>>()
)
])
.is_err());
Ok(())
}
}
#[cfg(all(
test,
any(target_arch = "x86", target_arch = "x86_64"),
feature = "simd"
))]
mod arrow_numeric_type_tests {
use crate::datatypes::{
ArrowNumericType, Float32Type, Float64Type, Int32Type, Int64Type, Int8Type,
UInt16Type,
};
use packed_simd::*;
use FromCast;
#[test]
fn test_mask_f64() {
let mask = Float64Type::mask_from_u64(0b10101010);
let expected =
m64x8::from_cast(i64x8::from_slice_unaligned(&[-1, 0, -1, 0, -1, 0, -1, 0]));
assert_eq!(expected, mask);
}
#[test]
fn test_mask_u64() {
let mask = Int64Type::mask_from_u64(0b01010101);
let expected =
m64x8::from_cast(i64x8::from_slice_unaligned(&[0, -1, 0, -1, 0, -1, 0, -1]));
assert_eq!(expected, mask);
}
#[test]
fn test_mask_f32() {
let mask = Float32Type::mask_from_u64(0b10101010_10101010);
let expected = m32x16::from_cast(i32x16::from_slice_unaligned(&[
-1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0,
]));
assert_eq!(expected, mask);
}
#[test]
fn test_mask_i32() {
let mask = Int32Type::mask_from_u64(0b01010101_01010101);
let expected = m32x16::from_cast(i32x16::from_slice_unaligned(&[
0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1,
]));
assert_eq!(expected, mask);
}
#[test]
fn test_mask_u16() {
let mask = UInt16Type::mask_from_u64(0b01010101_01010101_10101010_10101010);
let expected = m16x32::from_cast(i16x32::from_slice_unaligned(&[
-1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, 0, -1, 0, -1, 0, -1,
0, -1, 0, -1, 0, -1, 0, -1, 0, -1,
]));
assert_eq!(expected, mask);
}
#[test]
fn test_mask_i8() {
let mask = Int8Type::mask_from_u64(
0b01010101_01010101_10101010_10101010_01010101_01010101_10101010_10101010,
);
let expected = m8x64::from_cast(i8x64::from_slice_unaligned(&[
-1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, 0, -1, 0, -1, 0, -1,
0, -1, 0, -1, 0, -1, 0, -1, 0, -1, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0,
-1, 0, -1, 0, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1, 0, -1,
]));
assert_eq!(expected, mask);
}
}
| 36.511119 | 135 | 0.444507 |
871743c9e6072576bd90c44ba02ca13584431a40
| 816 |
#[macro_use]
extern crate lazy_static;
mod metrics;
mod system_time_slot_clock;
mod testing_slot_clock;
use std::time::Duration;
pub use crate::system_time_slot_clock::{Error as SystemTimeSlotClockError, SystemTimeSlotClock};
pub use crate::testing_slot_clock::{Error as TestingSlotClockError, TestingSlotClock};
pub use metrics::scrape_for_metrics;
pub use types::Slot;
pub trait SlotClock: Send + Sync + Sized {
type Error;
/// Create a new `SlotClock`.
///
/// Returns an Error if `slot_duration_seconds == 0`.
fn new(genesis_slot: Slot, genesis_seconds: u64, slot_duration_seconds: u64) -> Self;
fn present_slot(&self) -> Result<Option<Slot>, Self::Error>;
fn duration_to_next_slot(&self) -> Result<Option<Duration>, Self::Error>;
fn slot_duration_millis(&self) -> u64;
}
| 28.137931 | 96 | 0.732843 |
ddde0730d84cde92beb4d161d319f37dc2a03dde
| 1,979 |
use crate::bls::Engine;
use ff_cl_gen as ffgen;
// Instead of having a very large OpenCL program written for a specific curve, with a lot of
// rudandant codes (As OpenCL doesn't have generic types or templates), this module will dynamically
// generate OpenCL codes given different PrimeFields and curves.
static FFT_SRC: &str = include_str!("fft/fft.cl");
static FIELD2_SRC: &str = include_str!("multiexp/field2.cl");
static EC_SRC: &str = include_str!("multiexp/ec.cl");
static MULTIEXP_SRC: &str = include_str!("multiexp/multiexp.cl");
fn field2(field2: &str, field: &str) -> String {
String::from(FIELD2_SRC)
.replace("FIELD2", field2)
.replace("FIELD", field)
}
fn fft(field: &str) -> String {
String::from(FFT_SRC).replace("FIELD", field)
}
#[cfg(not(feature = "blstrs"))]
const BLSTRS_DEF: &str = "";
#[cfg(feature = "blstrs")]
const BLSTRS_DEF: &str = "#define BLSTRS";
fn ec(field: &str, point: &str) -> String {
String::from(EC_SRC)
.replace("FIELD", field)
.replace("POINT", point)
.replace("__BLSTRS__", BLSTRS_DEF)
}
fn multiexp(point: &str, exp: &str) -> String {
String::from(MULTIEXP_SRC)
.replace("POINT", point)
.replace("EXPONENT", exp)
}
// WARNING: This function works only with Short Weierstrass Jacobian curves with Fq2 extension field.
pub fn kernel<E>(limb64: bool) -> String
where
E: Engine,
{
vec![
if limb64 {
ffgen::field::<E::Fr, ffgen::Limb64>("Fr")
} else {
ffgen::field::<E::Fr, ffgen::Limb32>("Fr")
},
fft("Fr"),
if limb64 {
ffgen::field::<E::Fq, ffgen::Limb64>("Fq")
} else {
ffgen::field::<E::Fq, ffgen::Limb32>("Fq")
},
ec("Fq", "G1"),
multiexp("G1", "Fr"),
field2("Fq2", "Fq"),
ec("Fq2", "G2"),
multiexp("G2", "Fr"),
]
.join("\n\n")
}
| 29.984848 | 102 | 0.574027 |
4b02d0b60b8b59a43fc9baff50df77d7fc962b4c
| 3,112 |
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use LinkerFlavor;
use std::io;
use std::process::Command;
use target::{LinkArgs, TargetOptions};
use self::Arch::*;
#[allow(non_camel_case_types)]
#[derive(Copy, Clone)]
pub enum Arch {
Armv7,
Armv7s,
Arm64,
I386,
X86_64
}
impl Arch {
pub fn to_string(&self) -> &'static str {
match self {
&Armv7 => "armv7",
&Armv7s => "armv7s",
&Arm64 => "arm64",
&I386 => "i386",
&X86_64 => "x86_64"
}
}
}
pub fn get_sdk_root(sdk_name: &str) -> Result<String, String> {
let res = Command::new("xcrun")
.arg("--show-sdk-path")
.arg("-sdk")
.arg(sdk_name)
.output()
.and_then(|output| {
if output.status.success() {
Ok(String::from_utf8(output.stdout).unwrap())
} else {
let error = String::from_utf8(output.stderr);
let error = format!("process exit with error: {}",
error.unwrap());
Err(io::Error::new(io::ErrorKind::Other,
&error[..]))
}
});
match res {
Ok(output) => Ok(output.trim().to_string()),
Err(e) => Err(format!("failed to get {} SDK path: {}", sdk_name, e))
}
}
fn build_pre_link_args(arch: Arch) -> Result<LinkArgs, String> {
let sdk_name = match arch {
Armv7 | Armv7s | Arm64 => "iphoneos",
I386 | X86_64 => "iphonesimulator"
};
let arch_name = arch.to_string();
let sdk_root = get_sdk_root(sdk_name)?;
let mut args = LinkArgs::new();
args.insert(LinkerFlavor::Gcc,
vec!["-arch".to_string(),
arch_name.to_string(),
"-Wl,-syslibroot".to_string(),
sdk_root]);
Ok(args)
}
fn target_cpu(arch: Arch) -> String {
match arch {
Armv7 => "cortex-a8", // iOS7 is supported on iPhone 4 and higher
Armv7s => "cortex-a9",
Arm64 => "cyclone",
I386 => "yonah",
X86_64 => "core2",
}.to_string()
}
pub fn opts(arch: Arch) -> Result<TargetOptions, String> {
let pre_link_args = build_pre_link_args(arch)?;
Ok(TargetOptions {
cpu: target_cpu(arch),
dynamic_linking: false,
executables: true,
pre_link_args,
has_elf_tls: false,
.. super::apple_base::opts()
})
}
| 29.638095 | 80 | 0.512532 |
9b076131e8115e48d56bb06c1981fe82a9ad4ecf
| 6,668 |
//! Environment Mapping
#![cfg_attr(feature = "cargo-clippy", allow(use_self))]
#![feature(try_from)]
#[macro_use]
extern crate derive_builder;
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate getset;
extern crate dirs;
mod error;
pub use error::{Error, ErrorKind, Result};
use std::collections::HashMap;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use std::{env, fmt};
/// Suffix for environment variables file name.
const ENV_SUFFIX: &str = ".env";
/// Environment Kinds
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum Kind {
/// `Common` or shared environment variables.
Common,
/// `Development` specific environment variables.
Development,
/// `Test` specific environment variables.
Test,
/// `Integration` specific environment variables.
Integration,
/// `Staging` specific environment variables.
Staging,
/// `Production` specific environment variables.
Production,
}
impl fmt::Display for Kind {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let kind_str: String = (*self).into();
write!(f, "{}", kind_str)
}
}
impl<'a> TryFrom<&'a str> for Kind {
type Error = Error;
fn try_from(name: &'a str) -> Result<Kind> {
let kind = match name {
"common" => Kind::Common,
"dev" => Kind::Development,
"test" => Kind::Test,
"int" => Kind::Integration,
"stage" => Kind::Staging,
"prod" => Kind::Production,
_ => return Err(ErrorKind::InvalidKind(name.to_string()).into()),
};
Ok(kind)
}
}
impl<'a> From<Kind> for &'a str {
fn from(kind: Kind) -> &'a str {
match kind {
Kind::Common => "common",
Kind::Development => "dev",
Kind::Test => "test",
Kind::Integration => "int",
Kind::Staging => "stage",
Kind::Production => "prod",
}
}
}
impl From<Kind> for String {
fn from(kind: Kind) -> String {
String::from(match kind {
Kind::Common => "common",
Kind::Development => "dev",
Kind::Test => "test",
Kind::Integration => "int",
Kind::Staging => "stage",
Kind::Production => "prod",
})
}
}
/// A `Config` used when loading environment properties.
#[derive(Builder, Clone, Debug, Eq, Getters, PartialEq, Setters)]
#[builder(setter(into))]
pub struct Config {
/// The environment `Kind` we are loading.
#[get = "pub"]
#[set = "pub"]
#[builder(default = "self.default_kind()")]
kind: Kind,
/// The application name.
#[get = "pub"]
#[set = "pub"]
app_name: String,
/// Should we read from a `common.env` file?
#[get = "pub"]
#[set = "pub"]
#[builder(default = "false")]
common: bool,
/// Does the property file have comments?
#[get = "pub"]
#[set = "pub"]
#[builder(default = "false")]
comments: bool,
/// The comment character.
#[get = "pub"]
#[set = "pub"]
#[builder(default = "'#'")]
comment_char: char,
/// Should we pull the OS environment into our props?
#[get = "pub"]
#[set = "pub"]
#[builder(default = "false")]
os: bool,
}
impl ConfigBuilder {
/// Setup the default `Kind` for a `Config`.
fn default_kind(&self) -> Kind {
Kind::Development
}
}
/// The `Environment` of the given kind.
#[derive(Clone, Debug, Eq, Getters, PartialEq, Setters)]
pub struct Environment {
/// The `Kind` of this environment.
#[get = "pub"]
current: Kind,
/// The key-value pairs for this environment (common + kind).
#[get = "pub"]
props: HashMap<String, String>,
}
impl Environment {}
/// Get the default file path.
#[cfg(unix)]
pub fn get_config_path() -> Result<PathBuf> {
let mut config_path = PathBuf::new();
match env::var("XDG_CONFIG_HOME") {
Ok(val) => {
config_path.push(val);
}
Err(_e) => {
if let Some(home_dir) = dirs::home_dir() {
config_path.push(home_dir);
config_path.push(".config");
} else {
return Err(ErrorKind::ConfigPath.into());
}
}
}
Ok(config_path)
}
/// Get the default file path.
#[cfg(windows)]
pub fn get_config_path() -> Result<PathBuf> {
let mut config_path = PathBuf::new();
match env::var("APPDATA") {
Ok(val) => {
config_path.push(val);
}
Err(_e) => {
if let Some(home_dir) = env::home_dir() {
config_path.push(home_dir);
config_path.push(".config");
} else {
return Err(ErrorKind::ConfigPath.into());
}
}
}
Ok(config_path)
}
/// Read a property file into a `HashMap`.
fn read_props_file(config: &Config, props: &mut HashMap<String, String>) -> Result<()> {
let mut file_path = get_config_path()?;
file_path.push(config.app_name());
let mut common_filename: String = (*config.kind()).into();
common_filename.push_str(ENV_SUFFIX);
file_path.push(common_filename);
let common_file = File::open(file_path)?;
let common_reader = BufReader::new(common_file);
for line_res in common_reader.lines() {
match line_res {
Ok(line) => {
if *config.comments() && line.starts_with(*config.comment_char()) {
continue;
}
let mut kv = Vec::new();
for tok in line.split('=') {
kv.push(tok);
}
if kv.len() != 2 {
return Err(ErrorKind::InvalidProperty.into());
}
props.insert(kv[0].to_string(), kv[1].to_string());
}
Err(e) => return Err(e.into()),
}
}
Ok(())
}
impl TryFrom<Config> for Environment {
type Error = Error;
fn try_from(config: Config) -> Result<Environment> {
let mut props: HashMap<String, String> = HashMap::new();
if *config.os() {
props.extend(env::vars());
}
if *config.common() {
let common_config = ConfigBuilder::default()
.app_name(config.app_name().to_string())
.kind(Kind::Common)
.build()?;
read_props_file(&common_config, &mut props)?;
}
read_props_file(&config, &mut props)?;
Ok(Environment {
current: *config.kind(),
props,
})
}
}
| 27.105691 | 88 | 0.541992 |
0352419e3f04089077bfd69916a45406139e6511
| 2,990 |
use log::warn;
use serde::Deserialize;
use serde::Serialize;
const MAX_ATTRIBUTE: u8 = 5;
const MAX_SKILL: u8 = 5;
const MAX_DISCIPLINE: u8 = 5;
#[derive(Serialize, Deserialize, Debug, Default)]
pub(crate) struct Character {
name: String,
concept: String,
predator_type: String,
chronicle: String,
ambition: String,
clan: String,
sire: String,
desire: String,
generation: u8,
attributes: std::collections::HashMap<String, Attribute>,
skills: std::collections::HashMap<String, Skill>,
disciplines: std::collections::HashMap<String, Discipline>,
hunger: u8,
humanity: u8,
health: u8,
health_damage: u8,
aggravated_health_damage: u8,
willpower: u8,
willpower_damage: u8,
aggravated_willpower_damage: u8,
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub(crate) struct Attribute {
id: String,
text_id: String,
points: u8,
}
impl Attribute {
pub(crate) fn id(&self) -> &str {
&self.id
}
pub(crate) fn text_id(&self) -> &str {
&self.text_id
}
pub(crate) fn points(&self) -> u8 {
if 0 <= self.points && self.points <= MAX_ATTRIBUTE {
return self.points;
} else {
warn!(
"The attribute \"{}\" has the unreasonable value of \"{}\".",
self.id, self.points
);
return 0;
}
}
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub(crate) struct Skill {
id: String,
text_id: String,
points: u8,
}
impl Skill {
pub(crate) fn id(&self) -> &str {
&self.id
}
pub(crate) fn text_id(&self) -> &str {
&self.text_id
}
pub(crate) fn points(&self) -> u8 {
if 0 <= self.points && self.points <= MAX_SKILL {
return self.points;
} else {
warn!(
"The skill \"{}\" has the unreasonable value of \"{}\".",
self.id, self.points
);
return 0;
}
}
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub(crate) struct Discipline {
id: String,
text_id: String,
points: u8,
powers: std::collections::HashMap<String, Power>,
}
impl Discipline {
pub(crate) fn id(&self) -> &str {
&self.id
}
pub(crate) fn text_id(&self) -> &str {
&self.text_id
}
pub(crate) fn points(&self) -> u8 {
if 0 <= self.points && self.points <= MAX_DISCIPLINE {
return self.points;
} else {
warn!(
"The discipline \"{}\" has the unreasonable value of \"{}\".",
self.id, self.points
);
return 0;
}
}
pub(crate) fn power_by_id(&self, id: String) -> &Power {
self.powers
.get(&id)
.expect(&format!("Unknown power with id \"{}\"", id))
}
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub(crate) struct Power {
id: String,
text_id: String,
}
| 25.12605 | 78 | 0.549498 |
ccff040d228fad003f135b48b6612d0db8209710
| 1,399 |
// Copyright © 2020-2022 The Smelling Salts Contributors.
//
// Licensed under any of:
// - Apache License, Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0)
// - Boost Software License, Version 1.0 (https://www.boost.org/LICENSE_1_0.txt)
// - MIT License (https://mit-license.org/)
// At your choosing (See accompanying files LICENSE_APACHE_2_0.txt,
// LICENSE_MIT.txt and LICENSE_BOOST_1_0.txt).
//!
//! ## Getting Started
//! Add the following to your `Cargo.toml`.
//!
//! ```toml
//! ##################
//! ## For Libraries #
//! ##################
//!
//! [dependencies.smelling_salts]
//! version = "0.6"
//!
//! #####################
//! ## For Applications #
//! #####################
//!
//! [dependencies.pasts]
//! version = "0.8"
//! ```
#![doc(
html_logo_url = "https://libcala.github.io/logo.svg",
html_favicon_url = "https://libcala.github.io/icon.svg",
html_root_url = "https://docs.rs/smelling_salts"
)]
#![deny(unsafe_code)]
#![warn(
anonymous_parameters,
missing_copy_implementations,
missing_debug_implementations,
missing_docs,
nonstandard_style,
rust_2018_idioms,
single_use_lifetimes,
trivial_casts,
trivial_numeric_casts,
unreachable_pub,
unused_extern_crates,
unused_qualifications,
variant_size_differences
)]
#[cfg(target_os = "linux")]
mod watcher;
#[cfg(target_os = "linux")]
pub mod linux;
| 24.982143 | 81 | 0.634739 |
dd08f238c7fc6787afd334519f364a2fd85021f5
| 24,649 |
#![doc = "generated by AutoRust"]
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Amount {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub currency: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub value: Option<f64>,
}
impl Amount {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscription {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingSubscriptionProperties>,
}
impl BillingSubscription {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionAlias {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<BillingSubscriptionAliasProperties>,
}
impl BillingSubscriptionAlias {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionAliasListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<BillingSubscriptionAlias>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl BillingSubscriptionAliasListResult {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionAliasProperties {
#[serde(flatten)]
pub billing_subscription_properties: BillingSubscriptionProperties,
#[serde(rename = "billingSubscriptionId", default, skip_serializing_if = "Option::is_none")]
pub billing_subscription_id: Option<String>,
}
impl BillingSubscriptionAliasProperties {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionMergeRequest {
#[serde(rename = "targetBillingSubscriptionName", default, skip_serializing_if = "Option::is_none")]
pub target_billing_subscription_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quantity: Option<i32>,
}
impl BillingSubscriptionMergeRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionProperties {
#[serde(rename = "autoRenew", default, skip_serializing_if = "Option::is_none")]
pub auto_renew: Option<billing_subscription_properties::AutoRenew>,
#[serde(rename = "beneficiaryTenantId", default, skip_serializing_if = "Option::is_none")]
pub beneficiary_tenant_id: Option<String>,
#[serde(rename = "billingFrequency", default, skip_serializing_if = "Option::is_none")]
pub billing_frequency: Option<String>,
#[serde(rename = "billingProfileId", default, skip_serializing_if = "Option::is_none")]
pub billing_profile_id: Option<String>,
#[serde(rename = "billingPolicies", default, skip_serializing_if = "Option::is_none")]
pub billing_policies: Option<serde_json::Value>,
#[serde(rename = "billingProfileDisplayName", default, skip_serializing_if = "Option::is_none")]
pub billing_profile_display_name: Option<String>,
#[serde(rename = "billingProfileName", default, skip_serializing_if = "Option::is_none")]
pub billing_profile_name: Option<String>,
#[serde(rename = "consumptionCostCenter", default, skip_serializing_if = "Option::is_none")]
pub consumption_cost_center: Option<String>,
#[serde(rename = "customerId", default, skip_serializing_if = "Option::is_none")]
pub customer_id: Option<String>,
#[serde(rename = "customerDisplayName", default, skip_serializing_if = "Option::is_none")]
pub customer_display_name: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(rename = "enrollmentAccountId", default, skip_serializing_if = "Option::is_none")]
pub enrollment_account_id: Option<String>,
#[serde(rename = "enrollmentAccountDisplayName", default, skip_serializing_if = "Option::is_none")]
pub enrollment_account_display_name: Option<String>,
#[serde(rename = "enrollmentAccountSubscriptionDetails", default, skip_serializing_if = "Option::is_none")]
pub enrollment_account_subscription_details: Option<EnrollmentAccountSubscriptionDetails>,
#[serde(rename = "invoiceSectionId", default, skip_serializing_if = "Option::is_none")]
pub invoice_section_id: Option<String>,
#[serde(rename = "invoiceSectionDisplayName", default, skip_serializing_if = "Option::is_none")]
pub invoice_section_display_name: Option<String>,
#[serde(rename = "invoiceSectionName", default, skip_serializing_if = "Option::is_none")]
pub invoice_section_name: Option<String>,
#[serde(rename = "lastMonthCharges", default, skip_serializing_if = "Option::is_none")]
pub last_month_charges: Option<Amount>,
#[serde(rename = "monthToDateCharges", default, skip_serializing_if = "Option::is_none")]
pub month_to_date_charges: Option<Amount>,
#[serde(rename = "nextBillingCycleDetails", default, skip_serializing_if = "Option::is_none")]
pub next_billing_cycle_details: Option<NextBillingCycleDetails>,
#[serde(rename = "offerId", default, skip_serializing_if = "Option::is_none")]
pub offer_id: Option<String>,
#[serde(rename = "productCategory", default, skip_serializing_if = "Option::is_none")]
pub product_category: Option<String>,
#[serde(rename = "productType", default, skip_serializing_if = "Option::is_none")]
pub product_type: Option<String>,
#[serde(rename = "productTypeId", default, skip_serializing_if = "Option::is_none")]
pub product_type_id: Option<String>,
#[serde(rename = "purchaseDate", default, skip_serializing_if = "Option::is_none")]
pub purchase_date: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quantity: Option<i64>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub reseller: Option<Reseller>,
#[serde(rename = "renewalTermDetails", default, skip_serializing_if = "Option::is_none")]
pub renewal_term_details: Option<RenewalTermDetails>,
#[serde(rename = "skuDescription", default, skip_serializing_if = "Option::is_none")]
pub sku_description: Option<String>,
#[serde(rename = "skuId", default, skip_serializing_if = "Option::is_none")]
pub sku_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<billing_subscription_properties::Status>,
#[serde(rename = "subscriptionId", default, skip_serializing_if = "Option::is_none")]
pub subscription_id: Option<String>,
#[serde(rename = "suspensionReasons", default, skip_serializing_if = "Vec::is_empty")]
pub suspension_reasons: Vec<String>,
#[serde(rename = "termDuration", default, skip_serializing_if = "Option::is_none")]
pub term_duration: Option<String>,
#[serde(rename = "termStartDate", default, skip_serializing_if = "Option::is_none")]
pub term_start_date: Option<String>,
#[serde(rename = "termEndDate", default, skip_serializing_if = "Option::is_none")]
pub term_end_date: Option<String>,
}
impl BillingSubscriptionProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod billing_subscription_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum AutoRenew {
Off,
On,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
Unknown,
Active,
Disabled,
Deleted,
Warned,
Expiring,
Expired,
AutoRenew,
Cancelled,
Suspended,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionSplitRequest {
#[serde(rename = "billingFrequency", default, skip_serializing_if = "Option::is_none")]
pub billing_frequency: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quantity: Option<i32>,
#[serde(rename = "targetProductTypeId", default, skip_serializing_if = "Option::is_none")]
pub target_product_type_id: Option<String>,
#[serde(rename = "targetSkuId", default, skip_serializing_if = "Option::is_none")]
pub target_sku_id: Option<String>,
#[serde(rename = "termDuration", default, skip_serializing_if = "Option::is_none")]
pub term_duration: Option<String>,
}
impl BillingSubscriptionSplitRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct BillingSubscriptionsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<BillingSubscription>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl BillingSubscriptionsListResult {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct DetachPaymentMethodEligibilityError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub details: Vec<DetachPaymentMethodErrorDetails>,
}
impl DetachPaymentMethodEligibilityError {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum DetachPaymentMethodEligibilityErrorCode {
AzureSubscriptions,
RecurringCharges,
ReservedInstances,
OutstandingCharges,
PendingCharges,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct DetachPaymentMethodEligibilityResult {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<DetachPaymentMethodEligibilityError>,
}
impl DetachPaymentMethodEligibilityResult {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct DetachPaymentMethodErrorDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<DetachPaymentMethodEligibilityErrorCode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
impl DetachPaymentMethodErrorDetails {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct EnrollmentAccountSubscriptionDetails {
#[serde(rename = "subscriptionEnrollmentAccountStatus", default, skip_serializing_if = "Option::is_none")]
pub subscription_enrollment_account_status: Option<enrollment_account_subscription_details::SubscriptionEnrollmentAccountStatus>,
#[serde(rename = "enrollmentAccountStartDate", default, skip_serializing_if = "Option::is_none")]
pub enrollment_account_start_date: Option<String>,
}
impl EnrollmentAccountSubscriptionDetails {
pub fn new() -> Self {
Self::default()
}
}
pub mod enrollment_account_subscription_details {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum SubscriptionEnrollmentAccountStatus {
Active,
Cancelled,
Expired,
Deleted,
TransferredOut,
Transferring,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorDetails {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub target: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<ErrorSubDetails>,
}
impl ErrorDetails {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ErrorResponse {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub error: Option<ErrorDetails>,
}
impl ErrorResponse {
pub fn new() -> Self {
Self::default()
}
}
pub type ErrorSubDetails = Vec<serde_json::Value>;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum MoveBillingSubscriptionEligibilityErrorCode {
AccountIsLocked,
AssetNotActive,
AssetHasCap,
BillingAccountInactive,
BillingProfilePastDue,
CrossBillingAccountNotAllowed,
DestinationBillingProfileNotFound,
DestinationBillingProfileInactive,
DestinationBillingProfilePastDue,
DestinationInvoiceSectionNotFound,
DestinationInvoiceSectionInactive,
InvalidDestination,
InvalidSource,
InvoiceSectionIsRestricted,
InsufficientPermissionOnDestination,
InsufficientPermissionOnSource,
MarketplaceNotEnabledOnDestination,
ProductNotFound,
ProductInactive,
ProductTypeNotSupported,
SourceBillingProfilePastDue,
SourceInvoiceSectionInactive,
SubscriptionNotActive,
SubscriptionTypeNotSupported,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct MoveBillingSubscriptionRequest {
#[serde(rename = "destinationEnrollmentAccountId", default, skip_serializing_if = "Option::is_none")]
pub destination_enrollment_account_id: Option<String>,
#[serde(rename = "destinationInvoiceSectionId", default, skip_serializing_if = "Option::is_none")]
pub destination_invoice_section_id: Option<String>,
}
impl MoveBillingSubscriptionRequest {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct NextBillingCycleDetails {
#[serde(rename = "billingFrequency", default, skip_serializing_if = "Option::is_none")]
pub billing_frequency: Option<String>,
}
impl NextBillingCycleDetails {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Operation {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "isDataAction", default, skip_serializing_if = "Option::is_none")]
pub is_data_action: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub display: Option<operation::Display>,
}
impl Operation {
pub fn new() -> Self {
Self::default()
}
}
pub mod operation {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Display {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub provider: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub resource: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub operation: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
impl Display {
pub fn new() -> Self {
Self::default()
}
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct OperationListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<Operation>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl OperationListResult {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethod {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PaymentMethodProperties>,
}
impl PaymentMethod {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodLink {
#[serde(flatten)]
pub resource: Resource,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub properties: Option<PaymentMethodLinkProperties>,
}
impl PaymentMethodLink {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodLinkProperties {
#[serde(rename = "paymentMethod", default, skip_serializing_if = "Option::is_none")]
pub payment_method: Option<PaymentMethodProjectionProperties>,
}
impl PaymentMethodLinkProperties {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodLinksListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PaymentMethodLink>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl PaymentMethodLinksListResult {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodLogo {
#[serde(rename = "mimeType", default, skip_serializing_if = "Option::is_none")]
pub mime_type: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
}
impl PaymentMethodLogo {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodProjectionProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub family: Option<payment_method_projection_properties::Family>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "accountHolderName", default, skip_serializing_if = "Option::is_none")]
pub account_holder_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiration: Option<String>,
#[serde(rename = "lastFourDigits", default, skip_serializing_if = "Option::is_none")]
pub last_four_digits: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub logos: Vec<PaymentMethodLogo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<payment_method_projection_properties::Status>,
}
impl PaymentMethodProjectionProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod payment_method_projection_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Family {
CreditCard,
CheckWire,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "active")]
Active,
#[serde(rename = "inactive")]
Inactive,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodProperties {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub family: Option<payment_method_properties::Family>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
#[serde(rename = "accountHolderName", default, skip_serializing_if = "Option::is_none")]
pub account_holder_name: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expiration: Option<String>,
#[serde(rename = "lastFourDigits", default, skip_serializing_if = "Option::is_none")]
pub last_four_digits: Option<String>,
#[serde(rename = "displayName", default, skip_serializing_if = "Option::is_none")]
pub display_name: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub logos: Vec<PaymentMethodLogo>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub status: Option<payment_method_properties::Status>,
}
impl PaymentMethodProperties {
pub fn new() -> Self {
Self::default()
}
}
pub mod payment_method_properties {
use super::*;
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Family {
CreditCard,
CheckWire,
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Status {
#[serde(rename = "active")]
Active,
#[serde(rename = "inactive")]
Inactive,
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodResource {
#[serde(rename = "paymentMethodId", default, skip_serializing_if = "Option::is_none")]
pub payment_method_id: Option<String>,
}
impl PaymentMethodResource {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct PaymentMethodsListResult {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub value: Vec<PaymentMethod>,
#[serde(rename = "nextLink", default, skip_serializing_if = "Option::is_none")]
pub next_link: Option<String>,
}
impl PaymentMethodsListResult {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct RenewalTermDetails {
#[serde(rename = "billingFrequency", default, skip_serializing_if = "Option::is_none")]
pub billing_frequency: Option<String>,
#[serde(rename = "productTypeId", default, skip_serializing_if = "Option::is_none")]
pub product_type_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub quantity: Option<i64>,
#[serde(rename = "skuId", default, skip_serializing_if = "Option::is_none")]
pub sku_id: Option<String>,
#[serde(rename = "termDuration", default, skip_serializing_if = "Option::is_none")]
pub term_duration: Option<String>,
}
impl RenewalTermDetails {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Reseller {
#[serde(rename = "resellerId", default, skip_serializing_if = "Option::is_none")]
pub reseller_id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
}
impl Reseller {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct Resource {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "type", default, skip_serializing_if = "Option::is_none")]
pub type_: Option<String>,
}
impl Resource {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ValidateMoveBillingSubscriptionEligibilityError {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub code: Option<MoveBillingSubscriptionEligibilityErrorCode>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub details: Option<String>,
}
impl ValidateMoveBillingSubscriptionEligibilityError {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Default)]
pub struct ValidateMoveBillingSubscriptionEligibilityResult {
#[serde(rename = "isMoveEligible", default, skip_serializing_if = "Option::is_none")]
pub is_move_eligible: Option<bool>,
#[serde(rename = "errorDetails", default, skip_serializing_if = "Option::is_none")]
pub error_details: Option<ValidateMoveBillingSubscriptionEligibilityError>,
}
impl ValidateMoveBillingSubscriptionEligibilityResult {
pub fn new() -> Self {
Self::default()
}
}
| 39.3126 | 133 | 0.70441 |
899a5c83b54435da261ecfa9e7439242221772fa
| 1,428 |
#[doc = "Reader of register STATICWAITOEN%s"]
pub type R = crate::R<u32, super::STATICWAITOEN>;
#[doc = "Writer for register STATICWAITOEN%s"]
pub type W = crate::W<u32, super::STATICWAITOEN>;
#[doc = "Register STATICWAITOEN%s `reset()`'s with value 0"]
impl crate::ResetValue for super::STATICWAITOEN {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `WAITOEN`"]
pub type WAITOEN_R = crate::R<u8, u8>;
#[doc = "Write proxy for field `WAITOEN`"]
pub struct WAITOEN_W<'a> {
w: &'a mut W,
}
impl<'a> WAITOEN_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - Wait output enable. Delay from chip select assertion to output enable. 0x0 = No delay (POR reset value). 0x1 - 0xF = n cycle delay. The delay is WAITOEN x tCCLK."]
#[inline(always)]
pub fn waitoen(&self) -> WAITOEN_R {
WAITOEN_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bits 0:3 - Wait output enable. Delay from chip select assertion to output enable. 0x0 = No delay (POR reset value). 0x1 - 0xF = n cycle delay. The delay is WAITOEN x tCCLK."]
#[inline(always)]
pub fn waitoen(&mut self) -> WAITOEN_W {
WAITOEN_W { w: self }
}
}
| 34.829268 | 187 | 0.616246 |
2946613840134c6d8ef7d2943d95b45337c8b03a
| 1,755 |
extern crate walkdir;
use std::env;
use std::fs::{self, DirBuilder};
use std::path::{Path, PathBuf};
use walkdir::WalkDir;
fn main() {
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
// locate executable path even if the project is in workspace
let executable_path = locate_target_dir_from_output_dir(&out_dir)
.expect("failed to find target dir")
.join(env::var("PROFILE").unwrap());
copy(
&manifest_dir.join("assets"),
&executable_path.join("assets"),
);
}
fn locate_target_dir_from_output_dir(mut target_dir_search: &Path) -> Option<&Path> {
loop {
// if path ends with "target", we assume this is correct dir
if target_dir_search.ends_with("target") {
return Some(target_dir_search);
}
// otherwise, keep going up in tree until we find "target" dir
target_dir_search = match target_dir_search.parent() {
Some(path) => path,
None => break,
}
}
None
}
fn copy(from: &Path, to: &Path) {
let from_path: PathBuf = from.into();
let to_path: PathBuf = to.into();
for entry in WalkDir::new(from_path.clone()) {
let entry = entry.unwrap();
if let Ok(rel_path) = entry.path().strip_prefix(&from_path) {
let target_path = to_path.join(rel_path);
if entry.file_type().is_dir() {
DirBuilder::new()
.recursive(true)
.create(target_path).expect("failed to create target dir");
} else {
fs::copy(entry.path(), &target_path).expect("failed to copy");
}
}
}
}
| 29.25 | 85 | 0.589174 |
d6cb92072bc69fc56815c4ebd713f0ba1d29b562
| 31,690 |
use std::error;
use std::fmt;
use std::fmt::{Display, Formatter};
use std::num::ParseIntError;
use std::str;
use std::str::FromStr;
use bech32;
use bech32::{u5, FromBase32};
use bitcoin_hashes::Hash;
use bitcoin_hashes::sha256;
use num_traits::{CheckedAdd, CheckedMul};
use secp256k1;
use secp256k1::recovery::{RecoveryId, RecoverableSignature};
use secp256k1::key::PublicKey;
use super::*;
use self::hrp_sm::parse_hrp;
/// State machine to parse the hrp
mod hrp_sm {
use std::ops::Range;
#[derive(PartialEq, Eq, Debug)]
enum States {
Start,
ParseL,
ParseN,
ParseCurrencyPrefix,
ParseAmountNumber,
ParseAmountSiPrefix,
}
impl States {
fn next_state(&self, read_symbol: char) -> Result<States, super::ParseError> {
match *self {
States::Start => {
if read_symbol == 'l' {
Ok(States::ParseL)
} else {
Err(super::ParseError::MalformedHRP)
}
}
States::ParseL => {
if read_symbol == 'n' {
Ok(States::ParseN)
} else {
Err(super::ParseError::MalformedHRP)
}
},
States::ParseN => {
if !read_symbol.is_numeric() {
Ok(States::ParseCurrencyPrefix)
} else {
Ok(States::ParseAmountNumber)
}
},
States::ParseCurrencyPrefix => {
if !read_symbol.is_numeric() {
Ok(States::ParseCurrencyPrefix)
} else {
Ok(States::ParseAmountNumber)
}
},
States::ParseAmountNumber => {
if read_symbol.is_numeric() {
Ok(States::ParseAmountNumber)
} else if ['m', 'u', 'n', 'p'].contains(&read_symbol) {
Ok(States::ParseAmountSiPrefix)
} else {
Err(super::ParseError::MalformedHRP)
}
},
States::ParseAmountSiPrefix => Err(super::ParseError::MalformedHRP),
}
}
fn is_final(&self) -> bool {
!(*self == States::ParseL || *self == States::ParseN)
}
}
struct StateMachine {
state: States,
position: usize,
currency_prefix: Option<Range<usize>>,
amount_number: Option<Range<usize>>,
amount_si_prefix: Option<Range<usize>>,
}
impl StateMachine {
fn new() -> StateMachine {
StateMachine {
state: States::Start,
position: 0,
currency_prefix: None,
amount_number: None,
amount_si_prefix: None,
}
}
fn update_range(range: &mut Option<Range<usize>>, position: usize) {
let new_range = match *range {
None => Range {start: position, end: position + 1},
Some(ref r) => Range {start: r.start, end: r.end + 1},
};
*range = Some(new_range);
}
fn step(&mut self, c: char) -> Result<(), super::ParseError> {
let next_state = self.state.next_state(c)?;
match next_state {
States::ParseCurrencyPrefix => {
StateMachine::update_range(&mut self.currency_prefix, self.position)
}
States::ParseAmountNumber => {
StateMachine::update_range(&mut self.amount_number, self.position)
},
States::ParseAmountSiPrefix => {
StateMachine::update_range(&mut self.amount_si_prefix, self.position)
},
_ => {}
}
self.position += 1;
self.state = next_state;
Ok(())
}
fn is_final(&self) -> bool {
self.state.is_final()
}
fn currency_prefix(&self) -> &Option<Range<usize>> {
&self.currency_prefix
}
fn amount_number(&self) -> &Option<Range<usize>> {
&self.amount_number
}
fn amount_si_prefix(&self) -> &Option<Range<usize>> {
&self.amount_si_prefix
}
}
pub fn parse_hrp(input: &str) -> Result<(&str, &str, &str), super::ParseError> {
let mut sm = StateMachine::new();
for c in input.chars() {
sm.step(c)?;
}
if !sm.is_final() {
return Err(super::ParseError::MalformedHRP);
}
let currency = sm.currency_prefix().clone()
.map(|r| &input[r]).unwrap_or("");
let amount = sm.amount_number().clone()
.map(|r| &input[r]).unwrap_or("");
let si = sm.amount_si_prefix().clone()
.map(|r| &input[r]).unwrap_or("");
Ok((currency, amount, si))
}
}
impl FromStr for super::Currency {
type Err = ParseError;
fn from_str(currency_prefix: &str) -> Result<Self, ParseError> {
match currency_prefix {
"bc" => Ok(Currency::Bitcoin),
"tb" => Ok(Currency::BitcoinTestnet),
"bcrt" => Ok(Currency::Regtest),
"sb" => Ok(Currency::Simnet),
_ => Err(ParseError::UnknownCurrency)
}
}
}
impl FromStr for SiPrefix {
type Err = ParseError;
fn from_str(currency_prefix: &str) -> Result<Self, ParseError> {
use SiPrefix::*;
match currency_prefix {
"m" => Ok(Milli),
"u" => Ok(Micro),
"n" => Ok(Nano),
"p" => Ok(Pico),
_ => Err(ParseError::UnknownSiPrefix)
}
}
}
/// ```
/// use lightning_invoice::Invoice;
///
/// let invoice = "lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdp\
/// l2pkx2ctnv5sxxmmwwd5kgetjypeh2ursdae8g6twvus8g6rfwvs8qun0dfjkxaq8rkx3yf5tcsyz3d7\
/// 3gafnh3cax9rn449d9p5uxz9ezhhypd0elx87sjle52x86fux2ypatgddc6k63n7erqz25le42c4u4ec\
/// ky03ylcqca784w";
///
/// assert!(invoice.parse::<Invoice>().is_ok());
/// ```
impl FromStr for Invoice {
type Err = ParseOrSemanticError;
fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {
let signed = s.parse::<SignedRawInvoice>()?;
Ok(Invoice::from_signed(signed)?)
}
}
/// ```
/// use lightning_invoice::*;
///
/// let invoice = "lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdp\
/// l2pkx2ctnv5sxxmmwwd5kgetjypeh2ursdae8g6twvus8g6rfwvs8qun0dfjkxaq8rkx3yf5tcsyz3d7\
/// 3gafnh3cax9rn449d9p5uxz9ezhhypd0elx87sjle52x86fux2ypatgddc6k63n7erqz25le42c4u4ec\
/// ky03ylcqca784w";
///
/// let parsed_1 = invoice.parse::<Invoice>();
///
/// let parsed_2 = match invoice.parse::<SignedRawInvoice>() {
/// Ok(signed) => match Invoice::from_signed(signed) {
/// Ok(invoice) => Ok(invoice),
/// Err(e) => Err(ParseOrSemanticError::SemanticError(e)),
/// },
/// Err(e) => Err(ParseOrSemanticError::ParseError(e)),
/// };
///
/// assert!(parsed_1.is_ok());
/// assert_eq!(parsed_1, parsed_2);
/// ```
impl FromStr for SignedRawInvoice {
type Err = ParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (hrp, data) = bech32::decode(s)?;
if data.len() < 104 {
return Err(ParseError::TooShortDataPart);
}
let raw_hrp: RawHrp = hrp.parse()?;
let data_part = RawDataPart::from_base32(&data[..data.len()-104])?;
Ok(SignedRawInvoice {
raw_invoice: RawInvoice {
hrp: raw_hrp,
data: data_part,
},
hash: RawInvoice::hash_from_parts(
hrp.as_bytes(),
&data[..data.len()-104]
),
signature: Signature::from_base32(&data[data.len()-104..])?,
})
}
}
impl FromStr for RawHrp {
type Err = ParseError;
fn from_str(hrp: &str) -> Result<Self, <Self as FromStr>::Err> {
let parts = parse_hrp(hrp)?;
let currency = parts.0.parse::<Currency>()?;
let amount = if !parts.1.is_empty() {
Some(parts.1.parse::<u64>()?)
} else {
None
};
let si_prefix: Option<SiPrefix> = if parts.2.is_empty() {
None
} else {
let si: SiPrefix = parts.2.parse()?;
if let Some(amt) = amount {
if amt.checked_mul(si.multiplier()).is_none() {
return Err(ParseError::IntegerOverflowError);
}
}
Some(si)
};
Ok(RawHrp {
currency: currency,
raw_amount: amount,
si_prefix: si_prefix,
})
}
}
impl FromBase32 for RawDataPart {
type Err = ParseError;
fn from_base32(data: &[u5]) -> Result<Self, Self::Err> {
if data.len() < 7 { // timestamp length
return Err(ParseError::TooShortDataPart);
}
let timestamp = PositiveTimestamp::from_base32(&data[0..7])?;
let tagged = parse_tagged_parts(&data[7..])?;
Ok(RawDataPart {
timestamp: timestamp,
tagged_fields: tagged,
})
}
}
impl FromBase32 for PositiveTimestamp {
type Err = ParseError;
fn from_base32(b32: &[u5]) -> Result<Self, Self::Err> {
if b32.len() != 7 {
return Err(ParseError::InvalidSliceLength("PositiveTimestamp::from_base32()".into()));
}
let timestamp: u64 = parse_int_be(b32, 32)
.expect("7*5bit < 64bit, no overflow possible");
match PositiveTimestamp::from_unix_timestamp(timestamp) {
Ok(t) => Ok(t),
Err(CreationError::TimestampOutOfBounds) => Err(ParseError::TimestampOverflow),
Err(_) => unreachable!(),
}
}
}
impl FromBase32 for Signature {
type Err = ParseError;
fn from_base32(signature: &[u5]) -> Result<Self, Self::Err> {
if signature.len() != 104 {
return Err(ParseError::InvalidSliceLength("Signature::from_base32()".into()));
}
let recoverable_signature_bytes = Vec::<u8>::from_base32(signature)?;
let signature = &recoverable_signature_bytes[0..64];
let recovery_id = RecoveryId::from_i32(recoverable_signature_bytes[64] as i32)?;
Ok(Signature(RecoverableSignature::from_compact(
signature,
recovery_id
)?))
}
}
fn parse_int_be<T, U>(digits: &[U], base: T) -> Option<T>
where T: CheckedAdd + CheckedMul + From<u8> + Default,
U: Into<u8> + Copy
{
digits.iter().fold(Some(Default::default()), |acc, b|
acc
.and_then(|x| x.checked_mul(&base))
.and_then(|x| x.checked_add(&(Into::<u8>::into(*b)).into()))
)
}
fn parse_tagged_parts(data: &[u5]) -> Result<Vec<RawTaggedField>, ParseError> {
let mut parts = Vec::<RawTaggedField>::new();
let mut data = data;
while !data.is_empty() {
if data.len() < 3 {
return Err(ParseError::UnexpectedEndOfTaggedFields);
}
// Ignore tag at data[0], it will be handled in the TaggedField parsers and
// parse the length to find the end of the tagged field's data
let len = parse_int_be(&data[1..3], 32).expect("can't overflow");
let last_element = 3 + len;
if data.len() < last_element {
return Err(ParseError::UnexpectedEndOfTaggedFields);
}
// Get the tagged field's data slice
let field = &data[0..last_element];
// Set data slice to remaining data
data = &data[last_element..];
match TaggedField::from_base32(field) {
Ok(field) => {
parts.push(RawTaggedField::KnownSemantics(field))
},
Err(ParseError::Skip) => {
parts.push(RawTaggedField::UnknownSemantics(field.into()))
},
Err(e) => {return Err(e)}
}
}
Ok(parts)
}
impl FromBase32 for TaggedField {
type Err = ParseError;
fn from_base32(field: &[u5]) -> Result<TaggedField, ParseError> {
if field.len() < 3 {
return Err(ParseError::UnexpectedEndOfTaggedFields);
}
let tag = field[0];
let field_data = &field[3..];
match tag.to_u8() {
constants::TAG_PAYMENT_HASH =>
Ok(TaggedField::PaymentHash(Sha256::from_base32(field_data)?)),
constants::TAG_DESCRIPTION =>
Ok(TaggedField::Description(Description::from_base32(field_data)?)),
constants::TAG_PAYEE_PUB_KEY =>
Ok(TaggedField::PayeePubKey(PayeePubKey::from_base32(field_data)?)),
constants::TAG_DESCRIPTION_HASH =>
Ok(TaggedField::DescriptionHash(Sha256::from_base32(field_data)?)),
constants::TAG_EXPIRY_TIME =>
Ok(TaggedField::ExpiryTime(ExpiryTime::from_base32(field_data)?)),
constants::TAG_MIN_FINAL_CLTV_EXPIRY =>
Ok(TaggedField::MinFinalCltvExpiry(MinFinalCltvExpiry::from_base32(field_data)?)),
constants::TAG_FALLBACK =>
Ok(TaggedField::Fallback(Fallback::from_base32(field_data)?)),
constants::TAG_ROUTE =>
Ok(TaggedField::Route(Route::from_base32(field_data)?)),
constants::TAG_PAYMENT_SECRET =>
Ok(TaggedField::PaymentSecret(PaymentSecret::from_base32(field_data)?)),
_ => {
// "A reader MUST skip over unknown fields"
Err(ParseError::Skip)
}
}
}
}
impl FromBase32 for Sha256 {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<Sha256, ParseError> {
if field_data.len() != 52 {
// "A reader MUST skip over […] a p, [or] h […] field that does not have data_length 52 […]."
Err(ParseError::Skip)
} else {
Ok(Sha256(sha256::Hash::from_slice(&Vec::<u8>::from_base32(field_data)?)
.expect("length was checked before (52 u5 -> 32 u8)")))
}
}
}
impl FromBase32 for Description {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<Description, ParseError> {
let bytes = Vec::<u8>::from_base32(field_data)?;
let description = String::from(str::from_utf8(&bytes)?);
Ok(Description::new(description).expect(
"Max len is 639=floor(1023*5/8) since the len field is only 10bits long"
))
}
}
impl FromBase32 for PayeePubKey {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<PayeePubKey, ParseError> {
if field_data.len() != 53 {
// "A reader MUST skip over […] a n […] field that does not have data_length 53 […]."
Err(ParseError::Skip)
} else {
let data_bytes = Vec::<u8>::from_base32(field_data)?;
let pub_key = PublicKey::from_slice(&data_bytes)?;
Ok(pub_key.into())
}
}
}
impl FromBase32 for PaymentSecret {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<PaymentSecret, ParseError> {
if field_data.len() != 52 {
Err(ParseError::Skip)
} else {
let data_bytes = Vec::<u8>::from_base32(field_data)?;
let mut payment_secret = [0; 32];
payment_secret.copy_from_slice(&data_bytes);
Ok(PaymentSecret(payment_secret))
}
}
}
impl FromBase32 for ExpiryTime {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<ExpiryTime, ParseError> {
match parse_int_be::<u64, u5>(field_data, 32)
.and_then(|t| ExpiryTime::from_seconds(t).ok()) // ok, since the only error is out of bounds
{
Some(t) => Ok(t),
None => Err(ParseError::IntegerOverflowError),
}
}
}
impl FromBase32 for MinFinalCltvExpiry {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<MinFinalCltvExpiry, ParseError> {
let expiry = parse_int_be::<u64, u5>(field_data, 32);
if let Some(expiry) = expiry {
Ok(MinFinalCltvExpiry(expiry))
} else {
Err(ParseError::IntegerOverflowError)
}
}
}
impl FromBase32 for Fallback {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<Fallback, ParseError> {
if field_data.len() < 1 {
return Err(ParseError::UnexpectedEndOfTaggedFields);
}
let version = field_data[0];
let bytes = Vec::<u8>::from_base32(&field_data[1..])?;
match version.to_u8() {
0..=16 => {
if bytes.len() < 2 || bytes.len() > 40 {
return Err(ParseError::InvalidSegWitProgramLength);
}
Ok(Fallback::SegWitProgram {
version: version,
program: bytes
})
},
17 => {
if bytes.len() != 20 {
return Err(ParseError::InvalidPubKeyHashLength);
}
//TODO: refactor once const generics are available
let mut pkh = [0u8; 20];
pkh.copy_from_slice(&bytes);
Ok(Fallback::PubKeyHash(pkh))
}
18 => {
if bytes.len() != 20 {
return Err(ParseError::InvalidScriptHashLength);
}
let mut sh = [0u8; 20];
sh.copy_from_slice(&bytes);
Ok(Fallback::ScriptHash(sh))
}
_ => Err(ParseError::Skip)
}
}
}
impl FromBase32 for Route {
type Err = ParseError;
fn from_base32(field_data: &[u5]) -> Result<Route, ParseError> {
let bytes = Vec::<u8>::from_base32(field_data)?;
if bytes.len() % 51 != 0 {
return Err(ParseError::UnexpectedEndOfTaggedFields);
}
let mut route_hops = Vec::<RouteHop>::new();
let mut bytes = bytes.as_slice();
while !bytes.is_empty() {
let hop_bytes = &bytes[0..51];
bytes = &bytes[51..];
let mut channel_id: [u8; 8] = Default::default();
channel_id.copy_from_slice(&hop_bytes[33..41]);
let hop = RouteHop {
pubkey: PublicKey::from_slice(&hop_bytes[0..33])?,
short_channel_id: channel_id,
fee_base_msat: parse_int_be(&hop_bytes[41..45], 256).expect("slice too big?"),
fee_proportional_millionths: parse_int_be(&hop_bytes[45..49], 256).expect("slice too big?"),
cltv_expiry_delta: parse_int_be(&hop_bytes[49..51], 256).expect("slice too big?")
};
route_hops.push(hop);
}
Ok(Route(route_hops))
}
}
/// Errors that indicate what is wrong with the invoice. They have some granularity for debug
/// reasons, but should generally result in an "invalid BOLT11 invoice" message for the user.
#[allow(missing_docs)]
#[derive(PartialEq, Debug, Clone)]
pub enum ParseError {
Bech32Error(bech32::Error),
ParseAmountError(ParseIntError),
MalformedSignature(secp256k1::Error),
BadPrefix,
UnknownCurrency,
UnknownSiPrefix,
MalformedHRP,
TooShortDataPart,
UnexpectedEndOfTaggedFields,
DescriptionDecodeError(str::Utf8Error),
PaddingError,
IntegerOverflowError,
InvalidSegWitProgramLength,
InvalidPubKeyHashLength,
InvalidScriptHashLength,
InvalidRecoveryId,
InvalidSliceLength(String),
/// Not an error, but used internally to signal that a part of the invoice should be ignored
/// according to BOLT11
Skip,
TimestampOverflow,
}
/// Indicates that something went wrong while parsing or validating the invoice. Parsing errors
/// should be mostly seen as opaque and are only there for debugging reasons. Semantic errors
/// like wrong signatures, missing fields etc. could mean that someone tampered with the invoice.
#[derive(PartialEq, Debug, Clone)]
pub enum ParseOrSemanticError {
/// The invoice couldn't be decoded
ParseError(ParseError),
/// The invoice could be decoded but violates the BOLT11 standard
SemanticError(::SemanticError),
}
impl Display for ParseError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
// TODO: find a way to combine the first three arms (e as error::Error?)
ParseError::Bech32Error(ref e) => {
write!(f, "Invalid bech32: {}", e)
}
ParseError::ParseAmountError(ref e) => {
write!(f, "Invalid amount in hrp ({})", e)
}
ParseError::MalformedSignature(ref e) => {
write!(f, "Invalid secp256k1 signature: {}", e)
}
ParseError::DescriptionDecodeError(ref e) => {
write!(f, "Description is not a valid utf-8 string: {}", e)
}
ParseError::InvalidSliceLength(ref function) => {
write!(f, "Slice in function {} had the wrong length", function)
}
ParseError::BadPrefix => f.write_str("did not begin with 'ln'"),
ParseError::UnknownCurrency => f.write_str("currency code unknown"),
ParseError::UnknownSiPrefix => f.write_str("unknown SI prefix"),
ParseError::MalformedHRP => f.write_str("malformed human readable part"),
ParseError::TooShortDataPart => {
f.write_str("data part too short (should be at least 111 bech32 chars long)")
},
ParseError::UnexpectedEndOfTaggedFields => {
f.write_str("tagged fields part ended unexpectedly")
},
ParseError::PaddingError => f.write_str("some data field had bad padding"),
ParseError::IntegerOverflowError => {
f.write_str("parsed integer doesn't fit into receiving type")
},
ParseError::InvalidSegWitProgramLength => {
f.write_str("fallback SegWit program is too long or too short")
},
ParseError::InvalidPubKeyHashLength => {
f.write_str("fallback public key hash has a length unequal 20 bytes")
},
ParseError::InvalidScriptHashLength => {
f.write_str("fallback script hash has a length unequal 32 bytes")
},
ParseError::InvalidRecoveryId => {
f.write_str("recovery id is out of range (should be in [0,3])")
},
ParseError::Skip => {
f.write_str("the tagged field has to be skipped because of an unexpected, but allowed property")
},
ParseError::TimestampOverflow => {
f.write_str("the invoice's timestamp could not be represented as SystemTime")
},
}
}
}
impl Display for ParseOrSemanticError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match self {
ParseOrSemanticError::ParseError(err) => err.fmt(f),
ParseOrSemanticError::SemanticError(err) => err.fmt(f),
}
}
}
impl error::Error for ParseError {}
impl error::Error for ParseOrSemanticError {}
macro_rules! from_error {
($my_error:expr, $extern_error:ty) => {
impl From<$extern_error> for ParseError {
fn from(e: $extern_error) -> Self {
$my_error(e)
}
}
}
}
from_error!(ParseError::MalformedSignature, secp256k1::Error);
from_error!(ParseError::ParseAmountError, ParseIntError);
from_error!(ParseError::DescriptionDecodeError, str::Utf8Error);
impl From<bech32::Error> for ParseError {
fn from(e: bech32::Error) -> Self {
match e {
bech32::Error::InvalidPadding => ParseError::PaddingError,
_ => ParseError::Bech32Error(e)
}
}
}
impl From<ParseError> for ParseOrSemanticError {
fn from(e: ParseError) -> Self {
ParseOrSemanticError::ParseError(e)
}
}
impl From<::SemanticError> for ParseOrSemanticError {
fn from(e: SemanticError) -> Self {
ParseOrSemanticError::SemanticError(e)
}
}
#[cfg(test)]
mod test {
use de::ParseError;
use secp256k1::PublicKey;
use bech32::u5;
use bitcoin_hashes::hex::FromHex;
use bitcoin_hashes::sha256;
const CHARSET_REV: [i8; 128] = [
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1,
15, -1, 10, 17, 21, 20, 26, 30, 7, 5, -1, -1, -1, -1, -1, -1,
-1, 29, -1, 24, 13, 25, 9, 8, 23, -1, 18, 22, 31, 27, 19, -1,
1, 0, 3, 16, 11, 28, 12, 14, 6, 4, 2, -1, -1, -1, -1, -1,
-1, 29, -1, 24, 13, 25, 9, 8, 23, -1, 18, 22, 31, 27, 19, -1,
1, 0, 3, 16, 11, 28, 12, 14, 6, 4, 2, -1, -1, -1, -1, -1
];
fn from_bech32(bytes_5b: &[u8]) -> Vec<u5> {
bytes_5b
.iter()
.map(|c| u5::try_from_u8(CHARSET_REV[*c as usize] as u8).unwrap())
.collect()
}
#[test]
fn test_parse_currency_prefix() {
use Currency;
assert_eq!("bc".parse::<Currency>(), Ok(Currency::Bitcoin));
assert_eq!("tb".parse::<Currency>(), Ok(Currency::BitcoinTestnet));
assert_eq!("bcrt".parse::<Currency>(), Ok(Currency::Regtest));
assert_eq!("sb".parse::<Currency>(), Ok(Currency::Simnet));
assert_eq!("something_else".parse::<Currency>(), Err(ParseError::UnknownCurrency))
}
#[test]
fn test_parse_int_from_bytes_be() {
use de::parse_int_be;
assert_eq!(parse_int_be::<u32, u8>(&[1, 2, 3, 4], 256), Some(16909060));
assert_eq!(parse_int_be::<u32, u8>(&[1, 3], 32), Some(35));
assert_eq!(parse_int_be::<u32, u8>(&[255, 255, 255, 255], 256), Some(4294967295));
assert_eq!(parse_int_be::<u32, u8>(&[1, 0, 0, 0, 0], 256), None);
}
#[test]
fn test_parse_sha256_hash() {
use Sha256;
use bech32::FromBase32;
let input = from_bech32(
"qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypq".as_bytes()
);
let hash = sha256::Hash::from_hex(
"0001020304050607080900010203040506070809000102030405060708090102"
).unwrap();
let expected = Ok(Sha256(hash));
assert_eq!(Sha256::from_base32(&input), expected);
// make sure hashes of unknown length get skipped
let input_unexpected_length = from_bech32(
"qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypyq".as_bytes()
);
assert_eq!(Sha256::from_base32(&input_unexpected_length), Err(ParseError::Skip));
}
#[test]
fn test_parse_description() {
use ::Description;
use bech32::FromBase32;
let input = from_bech32("xysxxatsyp3k7enxv4js".as_bytes());
let expected = Ok(Description::new("1 cup coffee".to_owned()).unwrap());
assert_eq!(Description::from_base32(&input), expected);
}
#[test]
fn test_parse_payee_pub_key() {
use ::PayeePubKey;
use bech32::FromBase32;
let input = from_bech32("q0n326hr8v9zprg8gsvezcch06gfaqqhde2aj730yg0durunfhv66".as_bytes());
let pk_bytes = [
0x03, 0xe7, 0x15, 0x6a, 0xe3, 0x3b, 0x0a, 0x20, 0x8d, 0x07, 0x44, 0x19, 0x91, 0x63,
0x17, 0x7e, 0x90, 0x9e, 0x80, 0x17, 0x6e, 0x55, 0xd9, 0x7a, 0x2f, 0x22, 0x1e, 0xde,
0x0f, 0x93, 0x4d, 0xd9, 0xad
];
let expected = Ok(PayeePubKey(
PublicKey::from_slice(&pk_bytes[..]).unwrap()
));
assert_eq!(PayeePubKey::from_base32(&input), expected);
// expects 33 bytes
let input_unexpected_length = from_bech32(
"q0n326hr8v9zprg8gsvezcch06gfaqqhde2aj730yg0durunfhvq".as_bytes()
);
assert_eq!(PayeePubKey::from_base32(&input_unexpected_length), Err(ParseError::Skip));
}
#[test]
fn test_parse_expiry_time() {
use ::ExpiryTime;
use bech32::FromBase32;
let input = from_bech32("pu".as_bytes());
let expected = Ok(ExpiryTime::from_seconds(60).unwrap());
assert_eq!(ExpiryTime::from_base32(&input), expected);
let input_too_large = from_bech32("sqqqqqqqqqqqq".as_bytes());
assert_eq!(ExpiryTime::from_base32(&input_too_large), Err(ParseError::IntegerOverflowError));
}
#[test]
fn test_parse_min_final_cltv_expiry() {
use ::MinFinalCltvExpiry;
use bech32::FromBase32;
let input = from_bech32("pr".as_bytes());
let expected = Ok(MinFinalCltvExpiry(35));
assert_eq!(MinFinalCltvExpiry::from_base32(&input), expected);
}
#[test]
fn test_parse_fallback() {
use Fallback;
use bech32::FromBase32;
let cases = vec![
(
from_bech32("3x9et2e20v6pu37c5d9vax37wxq72un98".as_bytes()),
Ok(Fallback::PubKeyHash([
0x31, 0x72, 0xb5, 0x65, 0x4f, 0x66, 0x83, 0xc8, 0xfb, 0x14, 0x69, 0x59, 0xd3,
0x47, 0xce, 0x30, 0x3c, 0xae, 0x4c, 0xa7
]))
),
(
from_bech32("j3a24vwu6r8ejrss3axul8rxldph2q7z9".as_bytes()),
Ok(Fallback::ScriptHash([
0x8f, 0x55, 0x56, 0x3b, 0x9a, 0x19, 0xf3, 0x21, 0xc2, 0x11, 0xe9, 0xb9, 0xf3,
0x8c, 0xdf, 0x68, 0x6e, 0xa0, 0x78, 0x45
]))
),
(
from_bech32("qw508d6qejxtdg4y5r3zarvary0c5xw7k".as_bytes()),
Ok(Fallback::SegWitProgram {
version: u5::try_from_u8(0).unwrap(),
program: Vec::from(&[
0x75u8, 0x1e, 0x76, 0xe8, 0x19, 0x91, 0x96, 0xd4, 0x54, 0x94, 0x1c, 0x45,
0xd1, 0xb3, 0xa3, 0x23, 0xf1, 0x43, 0x3b, 0xd6
][..])
})
),
(
vec![u5::try_from_u8(21).unwrap(); 41],
Err(ParseError::Skip)
),
(
vec![],
Err(ParseError::UnexpectedEndOfTaggedFields)
),
(
vec![u5::try_from_u8(1).unwrap(); 81],
Err(ParseError::InvalidSegWitProgramLength)
),
(
vec![u5::try_from_u8(17).unwrap(); 1],
Err(ParseError::InvalidPubKeyHashLength)
),
(
vec![u5::try_from_u8(18).unwrap(); 1],
Err(ParseError::InvalidScriptHashLength)
)
];
for (input, expected) in cases.into_iter() {
assert_eq!(Fallback::from_base32(&input), expected);
}
}
#[test]
fn test_parse_route() {
use RouteHop;
use ::Route;
use bech32::FromBase32;
let input = from_bech32(
"q20q82gphp2nflc7jtzrcazrra7wwgzxqc8u7754cdlpfrmccae92qgzqvzq2ps8pqqqqqqpqqqqq9qqqvpeuqa\
fqxu92d8lr6fvg0r5gv0heeeqgcrqlnm6jhphu9y00rrhy4grqszsvpcgpy9qqqqqqgqqqqq7qqzq".as_bytes()
);
let mut expected = Vec::<RouteHop>::new();
expected.push(RouteHop {
pubkey: PublicKey::from_slice(
&[
0x02u8, 0x9e, 0x03, 0xa9, 0x01, 0xb8, 0x55, 0x34, 0xff, 0x1e, 0x92, 0xc4, 0x3c,
0x74, 0x43, 0x1f, 0x7c, 0xe7, 0x20, 0x46, 0x06, 0x0f, 0xcf, 0x7a, 0x95, 0xc3,
0x7e, 0x14, 0x8f, 0x78, 0xc7, 0x72, 0x55
][..]
).unwrap(),
short_channel_id: [0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08],
fee_base_msat: 1,
fee_proportional_millionths: 20,
cltv_expiry_delta: 3
});
expected.push(RouteHop {
pubkey: PublicKey::from_slice(
&[
0x03u8, 0x9e, 0x03, 0xa9, 0x01, 0xb8, 0x55, 0x34, 0xff, 0x1e, 0x92, 0xc4, 0x3c,
0x74, 0x43, 0x1f, 0x7c, 0xe7, 0x20, 0x46, 0x06, 0x0f, 0xcf, 0x7a, 0x95, 0xc3,
0x7e, 0x14, 0x8f, 0x78, 0xc7, 0x72, 0x55
][..]
).unwrap(),
short_channel_id: [0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a],
fee_base_msat: 2,
fee_proportional_millionths: 30,
cltv_expiry_delta: 4
});
assert_eq!(Route::from_base32(&input), Ok(Route(expected)));
assert_eq!(
Route::from_base32(&[u5::try_from_u8(0).unwrap(); 40][..]),
Err(ParseError::UnexpectedEndOfTaggedFields)
);
}
#[test]
fn test_payment_secret_deserialization() {
use bech32::CheckBase32;
use secp256k1::recovery::{RecoveryId, RecoverableSignature};
use TaggedField::*;
use {SiPrefix, SignedRawInvoice, Signature, RawInvoice, RawTaggedField, RawHrp, RawDataPart,
Currency, Sha256, PositiveTimestamp};
assert_eq!( // BOLT 11 payment secret invoice. The unknown fields are invoice features.
"lnbc25m1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdq5vdhkven9v5sxyetpdeessp5zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zyg3zygs9q5sqqqqqqqqqqqqqqqpqsq67gye39hfg3zd8rgc80k32tvy9xk2xunwm5lzexnvpx6fd77en8qaq424dxgt56cag2dpt359k3ssyhetktkpqh24jqnjyw6uqd08sgptq44qu".parse(),
Ok(SignedRawInvoice {
raw_invoice: RawInvoice {
hrp: RawHrp {
currency: Currency::Bitcoin,
raw_amount: Some(25),
si_prefix: Some(SiPrefix::Milli)
},
data: RawDataPart {
timestamp: PositiveTimestamp::from_unix_timestamp(1496314658).unwrap(),
tagged_fields: vec ! [
PaymentHash(Sha256(sha256::Hash::from_hex(
"0001020304050607080900010203040506070809000102030405060708090102"
).unwrap())).into(),
Description(::Description::new("coffee beans".to_owned()).unwrap()).into(),
PaymentSecret(::PaymentSecret([17; 32])).into(),
RawTaggedField::UnknownSemantics(vec![5, 0, 20, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 1, 0, 16,
0].check_base32().unwrap())],
}
},
hash: [0xb1, 0x96, 0x46, 0xc3, 0xbc, 0x56, 0x76, 0x1d, 0x20, 0x65, 0x6e, 0x0e, 0x32,
0xec, 0xd2, 0x69, 0x27, 0xb7, 0x62, 0x6e, 0x2a, 0x8b, 0xe6, 0x97, 0x71, 0x9f,
0xf8, 0x7e, 0x44, 0x54, 0x55, 0xb9],
signature: Signature(RecoverableSignature::from_compact(
&[0xd7, 0x90, 0x4c, 0xc4, 0xb7, 0x4a, 0x22, 0x26, 0x9c, 0x68, 0xc1, 0xdf, 0x68,
0xa9, 0x6c, 0x21, 0x4d, 0x65, 0x1b, 0x93, 0x76, 0xe9, 0xf1, 0x64, 0xd3, 0x60,
0x4d, 0xa4, 0xb7, 0xde, 0xcc, 0xce, 0x0e, 0x82, 0xaa, 0xab, 0x4c, 0x85, 0xd3,
0x58, 0xea, 0x14, 0xd0, 0xae, 0x34, 0x2d, 0xa3, 0x08, 0x12, 0xf9, 0x5d, 0x97,
0x60, 0x82, 0xea, 0xac, 0x81, 0x39, 0x11, 0xda, 0xe0, 0x1a, 0xf3, 0xc1],
RecoveryId::from_i32(1).unwrap()
).unwrap()),
})
)
}
#[test]
fn test_raw_signed_invoice_deserialization() {
use TaggedField::*;
use secp256k1::recovery::{RecoveryId, RecoverableSignature};
use {SignedRawInvoice, Signature, RawInvoice, RawHrp, RawDataPart, Currency, Sha256,
PositiveTimestamp};
assert_eq!(
"lnbc1pvjluezpp5qqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqqqsyqcyq5rqwzqfqypqdpl2pkx2ctnv5sxxmmw\
wd5kgetjypeh2ursdae8g6twvus8g6rfwvs8qun0dfjkxaq8rkx3yf5tcsyz3d73gafnh3cax9rn449d9p5uxz9\
ezhhypd0elx87sjle52x86fux2ypatgddc6k63n7erqz25le42c4u4ecky03ylcqca784w".parse(),
Ok(SignedRawInvoice {
raw_invoice: RawInvoice {
hrp: RawHrp {
currency: Currency::Bitcoin,
raw_amount: None,
si_prefix: None,
},
data: RawDataPart {
timestamp: PositiveTimestamp::from_unix_timestamp(1496314658).unwrap(),
tagged_fields: vec ! [
PaymentHash(Sha256(sha256::Hash::from_hex(
"0001020304050607080900010203040506070809000102030405060708090102"
).unwrap())).into(),
Description(
::Description::new(
"Please consider supporting this project".to_owned()
).unwrap()
).into(),
],
},
},
hash: [
0xc3, 0xd4, 0xe8, 0x3f, 0x64, 0x6f, 0xa7, 0x9a, 0x39, 0x3d, 0x75, 0x27,
0x7b, 0x1d, 0x85, 0x8d, 0xb1, 0xd1, 0xf7, 0xab, 0x71, 0x37, 0xdc, 0xb7,
0x83, 0x5d, 0xb2, 0xec, 0xd5, 0x18, 0xe1, 0xc9
],
signature: Signature(RecoverableSignature::from_compact(
& [
0x38u8, 0xec, 0x68, 0x91, 0x34, 0x5e, 0x20, 0x41, 0x45, 0xbe, 0x8a,
0x3a, 0x99, 0xde, 0x38, 0xe9, 0x8a, 0x39, 0xd6, 0xa5, 0x69, 0x43,
0x4e, 0x18, 0x45, 0xc8, 0xaf, 0x72, 0x05, 0xaf, 0xcf, 0xcc, 0x7f,
0x42, 0x5f, 0xcd, 0x14, 0x63, 0xe9, 0x3c, 0x32, 0x88, 0x1e, 0xad,
0x0d, 0x6e, 0x35, 0x6d, 0x46, 0x7e, 0xc8, 0xc0, 0x25, 0x53, 0xf9,
0xaa, 0xb1, 0x5e, 0x57, 0x38, 0xb1, 0x1f, 0x12, 0x7f
],
RecoveryId::from_i32(0).unwrap()
).unwrap()),
}
)
)
}
}
| 29.424327 | 295 | 0.664752 |
67c0e52d6649a6da95e5abb27b5f8bd1e7f77418
| 33,982 |
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This file actually contains two passes related to regions. The first
//! pass builds up the `scope_map`, which describes the parent links in
//! the region hierarchy. The second pass infers which types must be
//! region parameterized.
//!
//! Most of the documentation on regions can be found in
//! `middle/typeck/infer/region_inference.rs`
use session::Session;
use middle::ty::{self, Ty, FreeRegion};
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use util::common::can_reach;
use std::cell::RefCell;
use syntax::codemap::{self, Span};
use syntax::{ast, visit};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
use syntax::ast_util::{stmt_id};
use syntax::visit::{Visitor, FnKind};
/// CodeExtent represents a statically-describable extent that can be
/// used to bound the lifetime/region for values.
///
/// FIXME (pnkfelix): This currently derives `PartialOrd` and `Ord` to
/// placate the same deriving in `ty::FreeRegion`, but we may want to
/// actually attach a more meaningful ordering to scopes than the one
/// generated via deriving here.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
RustcDecodable, Show, Copy)]
pub enum CodeExtent {
Misc(ast::NodeId)
}
impl CodeExtent {
/// Creates a scope that represents the dynamic extent associated
/// with `node_id`.
pub fn from_node_id(node_id: ast::NodeId) -> CodeExtent {
CodeExtent::Misc(node_id)
}
/// Returns a node id associated with this scope.
///
/// NB: likely to be replaced as API is refined; e.g. pnkfelix
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
pub fn node_id(&self) -> ast::NodeId {
match *self {
CodeExtent::Misc(node_id) => node_id,
}
}
/// Maps this scope to a potentially new one according to the
/// NodeId transformer `f_id`.
pub fn map_id<F>(&self, f_id: F) -> CodeExtent where
F: FnOnce(ast::NodeId) -> ast::NodeId,
{
match *self {
CodeExtent::Misc(node_id) => CodeExtent::Misc(f_id(node_id)),
}
}
}
/// The region maps encode information about region relationships.
///
/// - `scope_map` maps from a scope id to the enclosing scope id; this is
/// usually corresponding to the lexical nesting, though in the case of
/// closures the parent scope is the innermost conditional expression or repeating
/// block
///
/// - `var_map` maps from a variable or binding id to the block in which
/// that variable is declared.
///
/// - `free_region_map` maps from a free region `a` to a list of free
/// regions `bs` such that `a <= b for all b in bs`
/// - the free region map is populated during type check as we check
/// each function. See the function `relate_free_regions` for
/// more information.
///
/// - `rvalue_scopes` includes entries for those expressions whose cleanup
/// scope is larger than the default. The map goes from the expression
/// id to the cleanup scope id. For rvalues not present in this table,
/// the appropriate cleanup scope is the innermost enclosing statement,
/// conditional expression, or repeating block (see `terminating_scopes`).
///
/// - `terminating_scopes` is a set containing the ids of each statement,
/// or conditional/repeating expression. These scopes are calling "terminating
/// scopes" because, when attempting to find the scope of a temporary, by
/// default we search up the enclosing scopes until we encounter the
/// terminating scope. A conditional/repeating
/// expression is one which is not guaranteed to execute exactly once
/// upon entering the parent scope. This could be because the expression
/// only executes conditionally, such as the expression `b` in `a && b`,
/// or because the expression may execute many times, such as a loop
/// body. The reason that we distinguish such expressions is that, upon
/// exiting the parent scope, we cannot statically know how many times
/// the expression executed, and thus if the expression creates
/// temporaries we cannot know statically how many such temporaries we
/// would have to cleanup. Therefore we ensure that the temporaries never
/// outlast the conditional/repeating expression, preventing the need
/// for dynamic checks and/or arbitrary amounts of stack space.
pub struct RegionMaps {
scope_map: RefCell<FnvHashMap<CodeExtent, CodeExtent>>,
var_map: RefCell<NodeMap<CodeExtent>>,
free_region_map: RefCell<FnvHashMap<FreeRegion, Vec<FreeRegion>>>,
rvalue_scopes: RefCell<NodeMap<CodeExtent>>,
terminating_scopes: RefCell<FnvHashSet<CodeExtent>>,
}
#[derive(Copy)]
pub struct Context {
var_parent: Option<ast::NodeId>,
// Innermost enclosing expression
parent: Option<ast::NodeId>,
}
struct RegionResolutionVisitor<'a> {
sess: &'a Session,
// Generated maps:
region_maps: &'a RegionMaps,
cx: Context
}
impl RegionMaps {
pub fn relate_free_regions(&self, sub: FreeRegion, sup: FreeRegion) {
match self.free_region_map.borrow_mut().get_mut(&sub) {
Some(sups) => {
if !sups.iter().any(|x| x == &sup) {
sups.push(sup);
}
return;
}
None => {}
}
debug!("relate_free_regions(sub={:?}, sup={:?})", sub, sup);
self.free_region_map.borrow_mut().insert(sub, vec!(sup));
}
pub fn record_encl_scope(&self, sub: CodeExtent, sup: CodeExtent) {
debug!("record_encl_scope(sub={:?}, sup={:?})", sub, sup);
assert!(sub != sup);
self.scope_map.borrow_mut().insert(sub, sup);
}
pub fn record_var_scope(&self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
self.var_map.borrow_mut().insert(var, lifetime);
}
pub fn record_rvalue_scope(&self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
self.rvalue_scopes.borrow_mut().insert(var, lifetime);
}
/// Records that a scope is a TERMINATING SCOPE. Whenever we create automatic temporaries --
/// e.g. by an expression like `a().f` -- they will be freed within the innermost terminating
/// scope.
pub fn mark_as_terminating_scope(&self, scope_id: CodeExtent) {
debug!("record_terminating_scope(scope_id={:?})", scope_id);
self.terminating_scopes.borrow_mut().insert(scope_id);
}
pub fn opt_encl_scope(&self, id: CodeExtent) -> Option<CodeExtent> {
//! Returns the narrowest scope that encloses `id`, if any.
self.scope_map.borrow().get(&id).map(|x| *x)
}
#[allow(dead_code)] // used in middle::cfg
pub fn encl_scope(&self, id: CodeExtent) -> CodeExtent {
//! Returns the narrowest scope that encloses `id`, if any.
match self.scope_map.borrow().get(&id) {
Some(&r) => r,
None => { panic!("no enclosing scope for id {:?}", id); }
}
}
/// Returns the lifetime of the local variable `var_id`
pub fn var_scope(&self, var_id: ast::NodeId) -> CodeExtent {
match self.var_map.borrow().get(&var_id) {
Some(&r) => r,
None => { panic!("no enclosing scope for id {:?}", var_id); }
}
}
pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option<CodeExtent> {
//! Returns the scope when temp created by expr_id will be cleaned up
// check for a designated rvalue scope
match self.rvalue_scopes.borrow().get(&expr_id) {
Some(&s) => {
debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s);
return Some(s);
}
None => { }
}
// else, locate the innermost terminating scope
// if there's one. Static items, for instance, won't
// have an enclosing scope, hence no scope will be
// returned.
let mut id = match self.opt_encl_scope(CodeExtent::from_node_id(expr_id)) {
Some(i) => i,
None => { return None; }
};
while !self.terminating_scopes.borrow().contains(&id) {
match self.opt_encl_scope(id) {
Some(p) => {
id = p;
}
None => {
debug!("temporary_scope({:?}) = None", expr_id);
return None;
}
}
}
debug!("temporary_scope({:?}) = {:?} [enclosing]", expr_id, id);
return Some(id);
}
pub fn var_region(&self, id: ast::NodeId) -> ty::Region {
//! Returns the lifetime of the variable `id`.
let scope = ty::ReScope(self.var_scope(id));
debug!("var_region({:?}) = {:?}", id, scope);
scope
}
pub fn scopes_intersect(&self, scope1: CodeExtent, scope2: CodeExtent)
-> bool {
self.is_subscope_of(scope1, scope2) ||
self.is_subscope_of(scope2, scope1)
}
/// Returns true if `subscope` is equal to or is lexically nested inside `superscope` and false
/// otherwise.
pub fn is_subscope_of(&self,
subscope: CodeExtent,
superscope: CodeExtent)
-> bool {
let mut s = subscope;
while superscope != s {
match self.scope_map.borrow().get(&s) {
None => {
debug!("is_subscope_of({:?}, {:?}, s={:?})=false",
subscope, superscope, s);
return false;
}
Some(&scope) => s = scope
}
}
debug!("is_subscope_of({:?}, {:?})=true",
subscope, superscope);
return true;
}
/// Determines whether two free regions have a subregion relationship
/// by walking the graph encoded in `free_region_map`. Note that
/// it is possible that `sub != sup` and `sub <= sup` and `sup <= sub`
/// (that is, the user can give two different names to the same lifetime).
pub fn sub_free_region(&self, sub: FreeRegion, sup: FreeRegion) -> bool {
can_reach(&*self.free_region_map.borrow(), sub, sup)
}
/// Determines whether one region is a subregion of another. This is intended to run *after
/// inference* and sadly the logic is somewhat duplicated with the code in infer.rs.
pub fn is_subregion_of(&self,
sub_region: ty::Region,
super_region: ty::Region)
-> bool {
debug!("is_subregion_of(sub_region={:?}, super_region={:?})",
sub_region, super_region);
sub_region == super_region || {
match (sub_region, super_region) {
(ty::ReEmpty, _) |
(_, ty::ReStatic) => {
true
}
(ty::ReScope(sub_scope), ty::ReScope(super_scope)) => {
self.is_subscope_of(sub_scope, super_scope)
}
(ty::ReScope(sub_scope), ty::ReFree(ref fr)) => {
self.is_subscope_of(sub_scope, fr.scope)
}
(ty::ReFree(sub_fr), ty::ReFree(super_fr)) => {
self.sub_free_region(sub_fr, super_fr)
}
(ty::ReEarlyBound(param_id_a, param_space_a, index_a, _),
ty::ReEarlyBound(param_id_b, param_space_b, index_b, _)) => {
// This case is used only to make sure that explicitly-
// specified `Self` types match the real self type in
// implementations.
param_id_a == param_id_b &&
param_space_a == param_space_b &&
index_a == index_b
}
_ => {
false
}
}
}
}
/// Finds the nearest common ancestor (if any) of two scopes. That is, finds the smallest
/// scope which is greater than or equal to both `scope_a` and `scope_b`.
pub fn nearest_common_ancestor(&self,
scope_a: CodeExtent,
scope_b: CodeExtent)
-> Option<CodeExtent> {
if scope_a == scope_b { return Some(scope_a); }
let a_ancestors = ancestors_of(self, scope_a);
let b_ancestors = ancestors_of(self, scope_b);
let mut a_index = a_ancestors.len() - 1u;
let mut b_index = b_ancestors.len() - 1u;
// Here, ~[ab]_ancestors is a vector going from narrow to broad.
// The end of each vector will be the item where the scope is
// defined; if there are any common ancestors, then the tails of
// the vector will be the same. So basically we want to walk
// backwards from the tail of each vector and find the first point
// where they diverge. If one vector is a suffix of the other,
// then the corresponding scope is a superscope of the other.
if a_ancestors[a_index] != b_ancestors[b_index] {
return None;
}
loop {
// Loop invariant: a_ancestors[a_index] == b_ancestors[b_index]
// for all indices between a_index and the end of the array
if a_index == 0u { return Some(scope_a); }
if b_index == 0u { return Some(scope_b); }
a_index -= 1u;
b_index -= 1u;
if a_ancestors[a_index] != b_ancestors[b_index] {
return Some(a_ancestors[a_index + 1]);
}
}
fn ancestors_of(this: &RegionMaps, scope: CodeExtent)
-> Vec<CodeExtent> {
// debug!("ancestors_of(scope={:?})", scope);
let mut result = vec!(scope);
let mut scope = scope;
loop {
match this.scope_map.borrow().get(&scope) {
None => return result,
Some(&superscope) => {
result.push(superscope);
scope = superscope;
}
}
// debug!("ancestors_of_loop(scope={:?})", scope);
}
}
}
}
/// Records the current parent (if any) as the parent of `child_id`.
fn record_superlifetime(visitor: &mut RegionResolutionVisitor,
child_id: ast::NodeId,
_sp: Span) {
match visitor.cx.parent {
Some(parent_id) => {
let child_scope = CodeExtent::from_node_id(child_id);
let parent_scope = CodeExtent::from_node_id(parent_id);
visitor.region_maps.record_encl_scope(child_scope, parent_scope);
}
None => {}
}
}
/// Records the lifetime of a local variable as `cx.var_parent`
fn record_var_lifetime(visitor: &mut RegionResolutionVisitor,
var_id: ast::NodeId,
_sp: Span) {
match visitor.cx.var_parent {
Some(parent_id) => {
let parent_scope = CodeExtent::from_node_id(parent_id);
visitor.region_maps.record_var_scope(var_id, parent_scope);
}
None => {
// this can happen in extern fn declarations like
//
// extern fn isalnum(c: c_int) -> c_int
}
}
}
fn resolve_block(visitor: &mut RegionResolutionVisitor, blk: &ast::Block) {
debug!("resolve_block(blk.id={:?})", blk.id);
// Record the parent of this block.
record_superlifetime(visitor, blk.id, blk.span);
// We treat the tail expression in the block (if any) somewhat
// differently from the statements. The issue has to do with
// temporary lifetimes. If the user writes:
//
// {
// ... (&foo()) ...
// }
//
let prev_cx = visitor.cx;
visitor.cx = Context {var_parent: Some(blk.id), parent: Some(blk.id)};
visit::walk_block(visitor, blk);
visitor.cx = prev_cx;
}
fn resolve_arm(visitor: &mut RegionResolutionVisitor, arm: &ast::Arm) {
let arm_body_scope = CodeExtent::from_node_id(arm.body.id);
visitor.region_maps.mark_as_terminating_scope(arm_body_scope);
match arm.guard {
Some(ref expr) => {
let guard_scope = CodeExtent::from_node_id(expr.id);
visitor.region_maps.mark_as_terminating_scope(guard_scope);
}
None => { }
}
visit::walk_arm(visitor, arm);
}
fn resolve_pat(visitor: &mut RegionResolutionVisitor, pat: &ast::Pat) {
record_superlifetime(visitor, pat.id, pat.span);
// If this is a binding (or maybe a binding, I'm too lazy to check
// the def map) then record the lifetime of that binding.
match pat.node {
ast::PatIdent(..) => {
record_var_lifetime(visitor, pat.id, pat.span);
}
_ => { }
}
visit::walk_pat(visitor, pat);
}
fn resolve_stmt(visitor: &mut RegionResolutionVisitor, stmt: &ast::Stmt) {
let stmt_id = stmt_id(stmt);
debug!("resolve_stmt(stmt.id={:?})", stmt_id);
let stmt_scope = CodeExtent::from_node_id(stmt_id);
visitor.region_maps.mark_as_terminating_scope(stmt_scope);
record_superlifetime(visitor, stmt_id, stmt.span);
let prev_parent = visitor.cx.parent;
visitor.cx.parent = Some(stmt_id);
visit::walk_stmt(visitor, stmt);
visitor.cx.parent = prev_parent;
}
fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) {
debug!("resolve_expr(expr.id={:?})", expr.id);
record_superlifetime(visitor, expr.id, expr.span);
let prev_cx = visitor.cx;
visitor.cx.parent = Some(expr.id);
{
let region_maps = &mut visitor.region_maps;
let terminating = |&: id| {
let scope = CodeExtent::from_node_id(id);
region_maps.mark_as_terminating_scope(scope)
};
match expr.node {
// Conditional or repeating scopes are always terminating
// scopes, meaning that temporaries cannot outlive them.
// This ensures fixed size stacks.
ast::ExprBinary(codemap::Spanned { node: ast::BiAnd, .. }, _, ref r) |
ast::ExprBinary(codemap::Spanned { node: ast::BiOr, .. }, _, ref r) => {
// For shortcircuiting operators, mark the RHS as a terminating
// scope since it only executes conditionally.
terminating(r.id);
}
ast::ExprIf(_, ref then, Some(ref otherwise)) => {
terminating(then.id);
terminating(otherwise.id);
}
ast::ExprIf(ref expr, ref then, None) => {
terminating(expr.id);
terminating(then.id);
}
ast::ExprLoop(ref body, _) => {
terminating(body.id);
}
ast::ExprWhile(ref expr, ref body, _) => {
terminating(expr.id);
terminating(body.id);
}
ast::ExprForLoop(ref _pat, ref _head, ref body, _) => {
terminating(body.id);
// The variable parent of everything inside (most importantly, the
// pattern) is the body.
visitor.cx.var_parent = Some(body.id);
}
ast::ExprMatch(..) => {
visitor.cx.var_parent = Some(expr.id);
}
ast::ExprAssignOp(..) | ast::ExprIndex(..) |
ast::ExprUnary(..) | ast::ExprCall(..) | ast::ExprMethodCall(..) => {
// FIXME(#6268) Nested method calls
//
// The lifetimes for a call or method call look as follows:
//
// call.id
// - arg0.id
// - ...
// - argN.id
// - call.callee_id
//
// The idea is that call.callee_id represents *the time when
// the invoked function is actually running* and call.id
// represents *the time to prepare the arguments and make the
// call*. See the section "Borrows in Calls" borrowck/doc.rs
// for an extended explanation of why this distinction is
// important.
//
// record_superlifetime(new_cx, expr.callee_id);
}
_ => {}
}
}
visit::walk_expr(visitor, expr);
visitor.cx = prev_cx;
}
fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) {
debug!("resolve_local(local.id={:?},local.init={:?})",
local.id,local.init.is_some());
let blk_id = match visitor.cx.var_parent {
Some(id) => id,
None => {
visitor.sess.span_bug(
local.span,
"local without enclosing block");
}
};
// For convenience in trans, associate with the local-id the var
// scope that will be used for any bindings declared in this
// pattern.
let blk_scope = CodeExtent::from_node_id(blk_id);
visitor.region_maps.record_var_scope(local.id, blk_scope);
// As an exception to the normal rules governing temporary
// lifetimes, initializers in a let have a temporary lifetime
// of the enclosing block. This means that e.g. a program
// like the following is legal:
//
// let ref x = HashMap::new();
//
// Because the hash map will be freed in the enclosing block.
//
// We express the rules more formally based on 3 grammars (defined
// fully in the helpers below that implement them):
//
// 1. `E&`, which matches expressions like `&<rvalue>` that
// own a pointer into the stack.
//
// 2. `P&`, which matches patterns like `ref x` or `(ref x, ref
// y)` that produce ref bindings into the value they are
// matched against or something (at least partially) owned by
// the value they are matched against. (By partially owned,
// I mean that creating a binding into a ref-counted or managed value
// would still count.)
//
// 3. `ET`, which matches both rvalues like `foo()` as well as lvalues
// based on rvalues like `foo().x[2].y`.
//
// A subexpression `<rvalue>` that appears in a let initializer
// `let pat [: ty] = expr` has an extended temporary lifetime if
// any of the following conditions are met:
//
// A. `pat` matches `P&` and `expr` matches `ET`
// (covers cases where `pat` creates ref bindings into an rvalue
// produced by `expr`)
// B. `ty` is a borrowed pointer and `expr` matches `ET`
// (covers cases where coercion creates a borrow)
// C. `expr` matches `E&`
// (covers cases `expr` borrows an rvalue that is then assigned
// to memory (at least partially) owned by the binding)
//
// Here are some examples hopefully giving an intuition where each
// rule comes into play and why:
//
// Rule A. `let (ref x, ref y) = (foo().x, 44)`. The rvalue `(22, 44)`
// would have an extended lifetime, but not `foo()`.
//
// Rule B. `let x: &[...] = [foo().x]`. The rvalue `[foo().x]`
// would have an extended lifetime, but not `foo()`.
//
// Rule C. `let x = &foo().x`. The rvalue ``foo()` would have extended
// lifetime.
//
// In some cases, multiple rules may apply (though not to the same
// rvalue). For example:
//
// let ref x = [&a(), &b()];
//
// Here, the expression `[...]` has an extended lifetime due to rule
// A, but the inner rvalues `a()` and `b()` have an extended lifetime
// due to rule C.
//
// FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST.
match local.init {
Some(ref expr) => {
record_rvalue_scope_if_borrow_expr(visitor, &**expr, blk_scope);
let is_borrow =
if let Some(ref ty) = local.ty { is_borrowed_ty(&**ty) } else { false };
if is_binding_pat(&*local.pat) || is_borrow {
record_rvalue_scope(visitor, &**expr, blk_scope);
}
}
None => { }
}
visit::walk_local(visitor, local);
/// True if `pat` match the `P&` nonterminal:
///
/// P& = ref X
/// | StructName { ..., P&, ... }
/// | VariantName(..., P&, ...)
/// | [ ..., P&, ... ]
/// | ( ..., P&, ... )
/// | box P&
fn is_binding_pat(pat: &ast::Pat) -> bool {
match pat.node {
ast::PatIdent(ast::BindByRef(_), _, _) => true,
ast::PatStruct(_, ref field_pats, _) => {
field_pats.iter().any(|fp| is_binding_pat(&*fp.node.pat))
}
ast::PatVec(ref pats1, ref pats2, ref pats3) => {
pats1.iter().any(|p| is_binding_pat(&**p)) ||
pats2.iter().any(|p| is_binding_pat(&**p)) ||
pats3.iter().any(|p| is_binding_pat(&**p))
}
ast::PatEnum(_, Some(ref subpats)) |
ast::PatTup(ref subpats) => {
subpats.iter().any(|p| is_binding_pat(&**p))
}
ast::PatBox(ref subpat) => {
is_binding_pat(&**subpat)
}
_ => false,
}
}
/// True if `ty` is a borrowed pointer type like `&int` or `&[...]`.
fn is_borrowed_ty(ty: &ast::Ty) -> bool {
match ty.node {
ast::TyRptr(..) => true,
_ => false
}
}
/// If `expr` matches the `E&` grammar, then records an extended rvalue scope as appropriate:
///
/// E& = & ET
/// | StructName { ..., f: E&, ... }
/// | [ ..., E&, ... ]
/// | ( ..., E&, ... )
/// | {...; E&}
/// | box E&
/// | E& as ...
/// | ( E& )
fn record_rvalue_scope_if_borrow_expr(visitor: &mut RegionResolutionVisitor,
expr: &ast::Expr,
blk_id: CodeExtent) {
match expr.node {
ast::ExprAddrOf(_, ref subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, &**subexpr, blk_id);
record_rvalue_scope(visitor, &**subexpr, blk_id);
}
ast::ExprStruct(_, ref fields, _) => {
for field in fields.iter() {
record_rvalue_scope_if_borrow_expr(
visitor, &*field.expr, blk_id);
}
}
ast::ExprVec(ref subexprs) |
ast::ExprTup(ref subexprs) => {
for subexpr in subexprs.iter() {
record_rvalue_scope_if_borrow_expr(
visitor, &**subexpr, blk_id);
}
}
ast::ExprUnary(ast::UnUniq, ref subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, &**subexpr, blk_id);
}
ast::ExprCast(ref subexpr, _) |
ast::ExprParen(ref subexpr) => {
record_rvalue_scope_if_borrow_expr(visitor, &**subexpr, blk_id)
}
ast::ExprBlock(ref block) => {
match block.expr {
Some(ref subexpr) => {
record_rvalue_scope_if_borrow_expr(
visitor, &**subexpr, blk_id);
}
None => { }
}
}
_ => {
}
}
}
/// Applied to an expression `expr` if `expr` -- or something owned or partially owned by
/// `expr` -- is going to be indirectly referenced by a variable in a let statement. In that
/// case, the "temporary lifetime" or `expr` is extended to be the block enclosing the `let`
/// statement.
///
/// More formally, if `expr` matches the grammar `ET`, record the rvalue scope of the matching
/// `<rvalue>` as `blk_id`:
///
/// ET = *ET
/// | ET[...]
/// | ET.f
/// | (ET)
/// | <rvalue>
///
/// Note: ET is intended to match "rvalues or lvalues based on rvalues".
fn record_rvalue_scope<'a>(visitor: &mut RegionResolutionVisitor,
expr: &'a ast::Expr,
blk_scope: CodeExtent) {
let mut expr = expr;
loop {
// Note: give all the expressions matching `ET` with the
// extended temporary lifetime, not just the innermost rvalue,
// because in trans if we must compile e.g. `*rvalue()`
// into a temporary, we request the temporary scope of the
// outer expression.
visitor.region_maps.record_rvalue_scope(expr.id, blk_scope);
match expr.node {
ast::ExprAddrOf(_, ref subexpr) |
ast::ExprUnary(ast::UnDeref, ref subexpr) |
ast::ExprField(ref subexpr, _) |
ast::ExprTupField(ref subexpr, _) |
ast::ExprIndex(ref subexpr, _) |
ast::ExprParen(ref subexpr) => {
expr = &**subexpr;
}
_ => {
return;
}
}
}
}
}
fn resolve_item(visitor: &mut RegionResolutionVisitor, item: &ast::Item) {
// Items create a new outer block scope as far as we're concerned.
let prev_cx = visitor.cx;
visitor.cx = Context {var_parent: None, parent: None};
visit::walk_item(visitor, item);
visitor.cx = prev_cx;
}
fn resolve_fn(visitor: &mut RegionResolutionVisitor,
fk: FnKind,
decl: &ast::FnDecl,
body: &ast::Block,
sp: Span,
id: ast::NodeId) {
debug!("region::resolve_fn(id={:?}, \
span={:?}, \
body.id={:?}, \
cx.parent={:?})",
id,
visitor.sess.codemap().span_to_string(sp),
body.id,
visitor.cx.parent);
let body_scope = CodeExtent::from_node_id(body.id);
visitor.region_maps.mark_as_terminating_scope(body_scope);
let outer_cx = visitor.cx;
// The arguments and `self` are parented to the body of the fn.
visitor.cx = Context { parent: Some(body.id),
var_parent: Some(body.id) };
visit::walk_fn_decl(visitor, decl);
// The body of the fn itself is either a root scope (top-level fn)
// or it continues with the inherited scope (closures).
match fk {
visit::FkItemFn(..) | visit::FkMethod(..) => {
visitor.cx = Context { parent: None, var_parent: None };
visitor.visit_block(body);
visitor.cx = outer_cx;
}
visit::FkFnBlock(..) => {
// FIXME(#3696) -- at present we are place the closure body
// within the region hierarchy exactly where it appears lexically.
// This is wrong because the closure may live longer
// than the enclosing expression. We should probably fix this,
// but the correct fix is a bit subtle, and I am also not sure
// that the present approach is unsound -- it may not permit
// any illegal programs. See issue for more details.
visitor.cx = outer_cx;
visitor.visit_block(body);
}
}
}
impl<'a, 'v> Visitor<'v> for RegionResolutionVisitor<'a> {
fn visit_block(&mut self, b: &Block) {
resolve_block(self, b);
}
fn visit_item(&mut self, i: &Item) {
resolve_item(self, i);
}
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl,
b: &'v Block, s: Span, n: NodeId) {
resolve_fn(self, fk, fd, b, s, n);
}
fn visit_arm(&mut self, a: &Arm) {
resolve_arm(self, a);
}
fn visit_pat(&mut self, p: &Pat) {
resolve_pat(self, p);
}
fn visit_stmt(&mut self, s: &Stmt) {
resolve_stmt(self, s);
}
fn visit_expr(&mut self, ex: &Expr) {
resolve_expr(self, ex);
}
fn visit_local(&mut self, l: &Local) {
resolve_local(self, l);
}
}
pub fn resolve_crate(sess: &Session, krate: &ast::Crate) -> RegionMaps {
let maps = RegionMaps {
scope_map: RefCell::new(FnvHashMap()),
var_map: RefCell::new(NodeMap()),
free_region_map: RefCell::new(FnvHashMap()),
rvalue_scopes: RefCell::new(NodeMap()),
terminating_scopes: RefCell::new(FnvHashSet()),
};
{
let mut visitor = RegionResolutionVisitor {
sess: sess,
region_maps: &maps,
cx: Context { parent: None, var_parent: None }
};
visit::walk_crate(&mut visitor, krate);
}
return maps;
}
pub fn resolve_inlined_item(sess: &Session,
region_maps: &RegionMaps,
item: &ast::InlinedItem) {
let mut visitor = RegionResolutionVisitor {
sess: sess,
region_maps: region_maps,
cx: Context { parent: None, var_parent: None }
};
visit::walk_inlined_item(&mut visitor, item);
}
| 37.017429 | 99 | 0.557472 |
61469ba83352f75c4fd2541b7683c649818aea2b
| 2,432 |
pub use self::asm::*;
mod asm;
pub mod cidr;
pub const PAGE_SIZE: usize = 4096; // Page size in bytes, not using huge pages here.
/// Round a given buffer to page size units.
#[inline]
pub fn round_to_pages(buffer_size: usize) -> usize {
(buffer_size + (PAGE_SIZE - 1)) & !(PAGE_SIZE - 1)
}
/// Round a 64-bit integer to its nearest power of 2.
#[inline]
pub fn round_to_power_of_2(mut size: usize) -> usize {
size = size.wrapping_sub(1);
size |= size >> 1;
size |= size >> 2;
size |= size >> 4;
size |= size >> 8;
size |= size >> 16;
size |= size >> 32;
size = size.wrapping_add(1);
size
}
/// clears a bit in a byte, pos is the zero-based position in the byte starting from the left
#[inline]
pub fn clear_bit(original: u8, pos: u8) -> u8 {
let swap_pos = 7 - pos;
let mask = 1 << swap_pos;
original & !mask
}
/// sets a bit in a byte, pos is the zero-based position in the byte starting from the left
#[inline]
pub fn set_bit(original: u8, pos: u8) -> u8 {
let swap_pos = 7 - pos;
let mask = 1 << swap_pos;
original | mask
}
/// gets a bit value as a bool in a byte, pos is the zero-based position in the byte starting from left
#[inline]
pub fn get_bit(original: u8, pos: u8) -> bool {
let swap_pos = 7 - pos;
let mask = 1 << swap_pos;
(original & mask) != 0
}
/// Flips a bit in a byte to on or off
///
/// # Arguments
///
/// * `original` - the byte to flip a bit on
/// * `pos` - the zero-based position of the bit in the byte to flip
/// * `on` - a boolean indicating whether the bit should be set (true) or cleared (false)
#[inline]
pub fn flip_bit(original: u8, pos: u8, on: bool) -> u8 {
if on {
set_bit(original, pos)
} else {
clear_bit(original, pos)
}
}
#[cfg(test)]
mod tests {
use super::*;
// test flipping a bit at a known position
#[test]
fn flippin_bits() {
let original: u8 = 0b0110_1000;
// we will clear the 3rd bit (2nd position)
let cleared: u8 = 0b0100_1000;
// lets turn off the bit, and check that it is cleared
let mut result = flip_bit(original, 2, false);
assert_eq!(result, cleared);
assert_eq!(get_bit(result, 2), false);
// turn the bit back on, make sure it is set
result = flip_bit(result, 2, true);
assert_eq!(result, original);
assert_eq!(get_bit(result, 2), true);
}
}
| 27.022222 | 103 | 0.608141 |
fcd4f4691a2f6eba302491226e861bc18a543a58
| 621 |
use super::super::{AstName, AstNodePayload, AstType, LexLocation};
#[derive(Clone)]
pub struct TypeReference {
prefix: Option<AstName>,
name: AstName,
parameters: Option<Vec<Box<AstType>>>,
}
impl TypeReference {
pub fn new(
location: LexLocation,
prefix: Option<AstName>,
name: AstName,
parameters: Option<Vec<Box<AstType>>>,
) -> Box<AstType> {
AstType::new(
location,
AstNodePayload::TypeReference(Box::new(TypeReference {
prefix,
name,
parameters,
})),
)
}
}
| 23 | 66 | 0.549114 |
8987222023dfd8c1ed343247d1c4445c9b141614
| 6,907 |
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
use anyhow::{ensure, Error, Result};
use bytes::Bytes;
use libra_crypto::{
hash::{CryptoHash, CryptoHasher},
HashValue,
};
use libra_crypto_derive::CryptoHasher;
#[cfg(any(test, feature = "fuzzing"))]
use proptest_derive::Arbitrary;
use rand::{rngs::OsRng, Rng};
use serde::{de::Error as _, Deserialize, Deserializer, Serialize, Serializer};
use std::{convert::TryFrom, fmt, str::FromStr};
const SHORT_STRING_LENGTH: usize = 4;
/// A struct that represents an account address.
#[derive(Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy, CryptoHasher)]
#[cfg_attr(any(test, feature = "fuzzing"), derive(Arbitrary))]
pub struct AccountAddress([u8; AccountAddress::LENGTH]);
impl AccountAddress {
pub const fn new(address: [u8; Self::LENGTH]) -> Self {
AccountAddress(address)
}
/// The number of bytes in an address.
pub const LENGTH: usize = 16;
pub const DEFAULT: Self = Self([0u8; AccountAddress::LENGTH]);
pub fn random() -> Self {
let mut rng = OsRng;
let buf: [u8; Self::LENGTH] = rng.gen();
AccountAddress::new(buf)
}
// Helpful in log messages
pub fn short_str(&self) -> String {
hex::encode(&self.0[..SHORT_STRING_LENGTH])
}
pub fn to_vec(&self) -> Vec<u8> {
self.0.to_vec()
}
pub fn from_hex_literal(literal: &str) -> Result<Self> {
ensure!(literal.starts_with("0x"), "literal must start with 0x.");
let hex_len = literal.len() - 2;
let mut result = if hex_len % 2 != 0 {
let mut hex_str = String::with_capacity(hex_len + 1);
hex_str.push('0');
hex_str.push_str(&literal[2..]);
hex::decode(&hex_str)?
} else {
hex::decode(&literal[2..])?
};
let len = result.len();
let padded_result = if len < Self::LENGTH {
let mut padded = Vec::with_capacity(Self::LENGTH);
padded.resize(Self::LENGTH - len, 0u8);
padded.append(&mut result);
padded
} else {
result
};
AccountAddress::try_from(padded_result)
}
}
impl Default for AccountAddress {
fn default() -> AccountAddress {
AccountAddress::DEFAULT
}
}
impl CryptoHash for AccountAddress {
type Hasher = AccountAddressHasher;
fn hash(&self) -> HashValue {
let mut state = Self::Hasher::default();
state.write(&self.0);
state.finish()
}
}
impl AsRef<[u8]> for AccountAddress {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl fmt::Display for AccountAddress {
fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result {
// Forward to the LowerHex impl with a "0x" prepended (the # flag).
write!(f, "{:#x}", self)
}
}
impl fmt::Debug for AccountAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Forward to the LowerHex impl with a "0x" prepended (the # flag).
write!(f, "{:#x}", self)
}
}
impl fmt::LowerHex for AccountAddress {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", hex::encode(&self.0))
}
}
impl TryFrom<&[u8]> for AccountAddress {
type Error = Error;
/// Tries to convert the provided byte array into Address.
fn try_from(bytes: &[u8]) -> Result<AccountAddress> {
ensure!(
bytes.len() == Self::LENGTH,
"The Address {:?} is of invalid length",
bytes
);
let mut addr = [0u8; Self::LENGTH];
addr.copy_from_slice(bytes);
Ok(AccountAddress(addr))
}
}
impl TryFrom<&[u8; AccountAddress::LENGTH]> for AccountAddress {
type Error = Error;
/// Tries to convert the provided byte array into Address.
fn try_from(bytes: &[u8; Self::LENGTH]) -> Result<AccountAddress> {
AccountAddress::try_from(&bytes[..])
}
}
impl TryFrom<Vec<u8>> for AccountAddress {
type Error = Error;
/// Tries to convert the provided byte buffer into Address.
fn try_from(bytes: Vec<u8>) -> Result<AccountAddress> {
AccountAddress::try_from(&bytes[..])
}
}
impl From<AccountAddress> for Vec<u8> {
fn from(addr: AccountAddress) -> Vec<u8> {
addr.0.to_vec()
}
}
impl From<&AccountAddress> for Vec<u8> {
fn from(addr: &AccountAddress) -> Vec<u8> {
addr.0.to_vec()
}
}
impl From<AccountAddress> for [u8; AccountAddress::LENGTH] {
fn from(addr: AccountAddress) -> Self {
addr.0
}
}
impl From<&AccountAddress> for [u8; AccountAddress::LENGTH] {
fn from(addr: &AccountAddress) -> Self {
addr.0
}
}
impl TryFrom<Bytes> for AccountAddress {
type Error = Error;
fn try_from(bytes: Bytes) -> Result<AccountAddress> {
AccountAddress::try_from(bytes.as_ref())
}
}
impl From<AccountAddress> for Bytes {
fn from(addr: AccountAddress) -> Bytes {
Bytes::copy_from_slice(addr.0.as_ref())
}
}
impl From<&AccountAddress> for String {
fn from(addr: &AccountAddress) -> String {
::hex::encode(addr.as_ref())
}
}
impl TryFrom<String> for AccountAddress {
type Error = Error;
fn try_from(s: String) -> Result<AccountAddress> {
assert!(!s.is_empty());
let bytes_out = ::hex::decode(s)?;
AccountAddress::try_from(bytes_out.as_slice())
}
}
impl FromStr for AccountAddress {
type Err = Error;
fn from_str(s: &str) -> Result<Self> {
assert!(!s.is_empty());
let bytes_out = ::hex::decode(s)?;
AccountAddress::try_from(bytes_out.as_slice())
}
}
impl<'de> Deserialize<'de> for AccountAddress {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: Deserializer<'de>,
{
if deserializer.is_human_readable() {
let s = <String>::deserialize(deserializer)?;
AccountAddress::try_from(s).map_err(D::Error::custom)
} else {
// In order to preserve the Serde data model and help analysis tools,
// make sure to wrap our value in a container with the same name
// as the original type.
#[derive(::serde::Deserialize)]
#[serde(rename = "AccountAddress")]
struct Value([u8; AccountAddress::LENGTH]);
let value = Value::deserialize(deserializer)?;
Ok(AccountAddress::new(value.0))
}
}
}
impl Serialize for AccountAddress {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
if serializer.is_human_readable() {
self.to_string().serialize(serializer)
} else {
// See comment in deserialize.
serializer.serialize_newtype_struct("AccountAddress", &self.0)
}
}
}
| 27.517928 | 81 | 0.59751 |
8a9a4e6586823ab9b4bd09cf778dd6613007ed5c
| 18,475 |
use graph::runtime::{
AscIndexId, AscPtr, AscType, AscValue, DeterministicHostError, IndexForAscTypeId,
};
use graph::semver::Version;
use graph_runtime_derive::AscType;
use graph_runtime_wasm::asc_abi::class::{Array, AscEnum, AscString, Uint8Array};
pub(crate) type AscBytes = Uint8Array;
pub(crate) type AscHash = Uint8Array;
pub(crate) type AscGas = i64;
pub struct AscEventTxArray(pub(crate) Array<AscPtr<AscEventTx>>);
impl AscType for AscEventTxArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscEventTxArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayEventTx;
}
pub struct AscCommitSigArray(pub(crate) Array<AscPtr<AscCommitSig>>);
impl AscType for AscCommitSigArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscCommitSigArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayCommitSig;
}
pub struct AscBytesArray(pub(crate) Array<AscPtr<AscBytes>>);
impl AscType for AscBytesArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscBytesArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayBytes;
}
pub struct AscEventAttributeArray(pub(crate) Array<AscPtr<AscEventAttribute>>);
impl AscType for AscEventAttributeArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscEventAttributeArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayEventAttribute;
}
pub struct AscValidatorArray(pub(crate) Array<AscPtr<AscValidator>>);
impl AscType for AscValidatorArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscValidatorArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayValidator;
}
pub struct AscEvidenceArray(pub(crate) Array<AscPtr<AscEvidence>>);
impl AscType for AscEvidenceArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscEvidenceArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayEvidence;
}
pub struct AscEventArray(pub(crate) Array<AscPtr<AscEvent>>);
impl AscType for AscEventArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscEventArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayEvent;
}
pub struct AscValidatorUpdateArray(pub(crate) Array<AscPtr<AscValidatorUpdate>>);
impl AscType for AscValidatorUpdateArray {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(Array::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscValidatorUpdateArray {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintArrayValidatorUpdate;
}
pub struct AscBlockIDFlagEnum(pub(crate) AscEnum<AscBlockIDFlag>);
impl AscType for AscBlockIDFlagEnum {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(AscEnum::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscBlockIDFlagEnum {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintBlockIDFlagEnum;
}
pub struct AscSignedMsgTypeEnum(pub(crate) AscEnum<AscSignedMsgType>);
impl AscType for AscSignedMsgTypeEnum {
fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {
self.0.to_asc_bytes()
}
fn from_asc_bytes(
asc_obj: &[u8],
api_version: &Version,
) -> Result<Self, DeterministicHostError> {
Ok(Self(AscEnum::from_asc_bytes(asc_obj, api_version)?))
}
}
impl AscIndexId for AscSignedMsgTypeEnum {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintSignedMsgTypeEnum;
}
#[repr(u32)]
#[derive(AscType, Copy, Clone)]
pub(crate) enum AscSignedMsgType {
SignedMsgTypeUnknown,
SignedMsgTypePrevote,
SignedMsgTypePrecommit,
SignedMsgTypeProposal,
}
impl AscValue for AscSignedMsgType {}
impl Default for AscSignedMsgType {
fn default() -> Self {
Self::SignedMsgTypeUnknown
}
}
#[repr(u32)]
#[derive(AscType, Copy, Clone)]
pub(crate) enum AscBlockIDFlag {
BlockIdFlagUnknown,
BlockIdFlagAbsent,
BlockIdFlagCommit,
BlockIdFlagNil,
}
impl AscValue for AscBlockIDFlag {}
impl Default for AscBlockIDFlag {
fn default() -> Self {
Self::BlockIdFlagUnknown
}
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventData {
pub event: AscPtr<AscEvent>,
pub block: AscPtr<AscEventBlock>,
}
impl AscIndexId for AscEventData {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEventData;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventList {
pub new_block: AscPtr<AscEventBlock>,
pub transaction: AscPtr<AscEventTxArray>,
pub validator_set_updates: AscPtr<AscEventValidatorSetUpdates>,
}
impl AscIndexId for AscEventList {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEventList;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscBlock {
pub header: AscPtr<AscHeader>,
pub data: AscPtr<AscData>,
pub evidence: AscPtr<AscEvidenceList>,
pub last_commit: AscPtr<AscCommit>,
}
impl AscIndexId for AscBlock {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintBlock;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscBlockID {
pub hash: AscPtr<AscHash>,
pub part_set_header: AscPtr<AscPartSetHeader>,
}
impl AscIndexId for AscBlockID {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintBlockID;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscBlockParams {
pub max_bytes: i64,
pub max_gas: AscGas,
}
impl AscIndexId for AscBlockParams {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintBlockParams;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscCommit {
pub height: i64,
pub round: i32,
pub block_id: AscPtr<AscBlockID>,
pub signatures: AscPtr<AscCommitSigArray>,
pub _padding: u32,
}
impl AscIndexId for AscCommit {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintCommit;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscCommitSig {
pub block_id_flag: u32,
pub validator_address: AscPtr<AscHash>,
pub timestamp: AscPtr<AscTimestamp>,
pub signature: AscPtr<AscBytes>,
}
impl AscIndexId for AscCommitSig {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintCommitSig;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscConsensus {
pub block: u64,
pub app: u64,
}
impl AscIndexId for AscConsensus {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintConsensus;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscConsensusParams {
pub block: AscPtr<AscBlockParams>,
pub evidence: AscPtr<AscEvidenceParams>,
pub validator: AscPtr<AscValidatorParams>,
pub version: AscPtr<AscVersionParams>,
}
impl AscIndexId for AscConsensusParams {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintConsensusParams;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscData {
pub txs: AscPtr<AscBytesArray>,
}
impl AscIndexId for AscData {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintData;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscDuration {
pub seconds: i64,
pub nanos: i32,
pub _padding: u32,
}
impl AscIndexId for AscDuration {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintDuration;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscDuplicateVoteEvidence {
pub vote_a: AscPtr<AscEventVote>,
pub vote_b: AscPtr<AscEventVote>,
pub total_voting_power: i64,
pub validator_power: i64,
pub timestamp: AscPtr<AscTimestamp>,
}
impl AscIndexId for AscDuplicateVoteEvidence {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintDuplicateVoteEvidence;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEvent {
pub event_type: AscPtr<AscString>,
pub attributes: AscPtr<AscEventAttributeArray>,
}
impl AscIndexId for AscEvent {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEvent;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventAttribute {
pub key: AscPtr<AscString>,
pub value: AscPtr<AscString>,
pub index: bool,
pub _padding: u8,
pub _padding2: u16,
}
impl AscIndexId for AscEventAttribute {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEventAttribute;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventBlock {
pub block: AscPtr<AscBlock>,
pub block_id: AscPtr<AscBlockID>,
pub result_begin_block: AscPtr<AscResponseBeginBlock>,
pub result_end_block: AscPtr<AscResponseEndBlock>,
}
impl AscIndexId for AscEventBlock {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEventBlock;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventTx {
pub tx_result: AscPtr<AscTxResult>,
}
impl AscIndexId for AscEventTx {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEventTx;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventValidatorSetUpdates {
pub validator_updates: AscPtr<AscValidatorArray>,
}
impl AscIndexId for AscEventValidatorSetUpdates {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId =
IndexForAscTypeId::TendermintEventValidatorSetUpdates;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEventVote {
pub event_vote_type: u32,
pub height: u64,
pub round: i32,
pub block_id: AscPtr<AscBlockID>,
pub timestamp: AscPtr<AscTimestamp>,
pub validator_address: AscPtr<AscHash>,
pub validator_index: i32,
pub signature: AscPtr<AscBytes>,
}
impl AscIndexId for AscEventVote {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEventVote;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEvidence {
pub duplicate_vote_evidence: AscPtr<AscDuplicateVoteEvidence>,
pub light_client_attack_evidence: AscPtr<AscLightClientAttackEvidence>,
}
impl AscIndexId for AscEvidence {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEvidence;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEvidenceList {
pub evidence: AscPtr<AscEvidenceArray>,
}
impl AscIndexId for AscEvidenceList {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEvidenceList;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscEvidenceParams {
pub max_age_num_blocks: i64,
pub max_age_duration: AscPtr<AscDuration>,
pub max_bytes: i64,
}
impl AscIndexId for AscEvidenceParams {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintEvidenceParams;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscHeader {
pub version: AscPtr<AscConsensus>,
pub chain_id: AscPtr<AscString>,
pub height: u64,
pub time: AscPtr<AscTimestamp>,
pub last_block_id: AscPtr<AscBlockID>,
pub last_commit_hash: AscPtr<AscHash>,
pub data_hash: AscPtr<AscHash>,
pub validators_hash: AscPtr<AscHash>,
pub next_validators_hash: AscPtr<AscHash>,
pub consensus_hash: AscPtr<AscHash>,
pub app_hash: AscPtr<AscHash>,
pub last_results_hash: AscPtr<AscHash>,
pub evidence_hash: AscPtr<AscHash>,
pub proposer_address: AscPtr<AscHash>,
pub _padding: u32,
}
impl AscIndexId for AscHeader {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintHeader;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscLightBlock {
pub signed_header: AscPtr<AscSignedHeader>,
pub validator_set: AscPtr<AscValidatorSet>,
}
impl AscIndexId for AscLightBlock {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintLightBlock;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscLightClientAttackEvidence {
pub conflicting_block: AscPtr<AscLightBlock>,
pub common_height: i64,
pub byzantine_validators: AscPtr<AscValidatorArray>,
pub total_voting_power: i64,
pub timestamp: AscPtr<AscTimestamp>,
}
impl AscIndexId for AscLightClientAttackEvidence {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId =
IndexForAscTypeId::TendermintLightClientAttackEvidence;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscPublicKey {
pub ed25519: AscPtr<AscBytes>,
pub secp256k1: AscPtr<AscBytes>,
}
impl AscIndexId for AscPublicKey {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintPublicKey;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscPartSetHeader {
pub total: u32,
pub hash: AscPtr<AscHash>,
}
impl AscIndexId for AscPartSetHeader {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintPartSetHeader;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscResponseBeginBlock {
pub events: AscPtr<AscEventArray>,
}
impl AscIndexId for AscResponseBeginBlock {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintResponseBeginBlock;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscResponseEndBlock {
pub validator_updates: AscPtr<AscValidatorUpdateArray>,
pub consensus_param_updates: AscPtr<AscConsensusParams>,
pub events: AscPtr<AscEventArray>,
}
impl AscIndexId for AscResponseEndBlock {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintResponseEndBlock;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscResponseDeliverTx {
pub code: u32,
pub data: AscPtr<AscBytes>,
pub log: AscPtr<AscString>,
pub info: AscPtr<AscString>,
pub gas_wanted: AscGas,
pub gas_used: AscGas,
pub events: AscPtr<AscEventArray>,
pub codespace: AscPtr<AscString>,
}
impl AscIndexId for AscResponseDeliverTx {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintResponseDeliverTx;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscSignedHeader {
pub header: AscPtr<AscHeader>,
pub commit: AscPtr<AscCommit>,
}
impl AscIndexId for AscSignedHeader {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintSignedHeader;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscTimestamp {
pub seconds: i64,
pub nanos: i32,
pub _padding: u32,
}
impl AscIndexId for AscTimestamp {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintTimestamp;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscTxResult {
pub height: u64,
pub index: u32,
pub tx: AscPtr<AscBytes>,
pub result: AscPtr<AscResponseDeliverTx>,
pub _padding: u32,
}
impl AscIndexId for AscTxResult {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintTxResult;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscValidator {
pub address: AscPtr<AscHash>,
pub pub_key: AscPtr<AscPublicKey>,
pub voting_power: i64,
pub proposer_priority: i64,
}
impl AscIndexId for AscValidator {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintValidator;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscValidatorParams {
pub pub_key_types: AscPtr<Array<AscPtr<AscString>>>,
}
impl AscIndexId for AscValidatorParams {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintValidatorParams;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscValidatorSet {
pub validators: AscPtr<AscValidatorArray>,
pub proposer: AscPtr<AscValidator>,
pub total_voting_power: i64,
}
impl AscIndexId for AscValidatorSet {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintValidatorSet;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscValidatorUpdate {
pub address: AscPtr<Uint8Array>,
pub pub_key: AscPtr<AscPublicKey>,
pub power: i64,
}
impl AscIndexId for AscValidatorUpdate {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintValidatorUpdate;
}
#[repr(C)]
#[derive(AscType)]
pub(crate) struct AscVersionParams {
pub app_version: u64,
}
impl AscIndexId for AscVersionParams {
const INDEX_ASC_TYPE_ID: IndexForAscTypeId = IndexForAscTypeId::TendermintVersionParams;
}
| 26.621037 | 100 | 0.735751 |
2f78295276b3ff24e7bceb800ce81081b3d68de1
| 10,977 |
use std::{
collections::HashSet,
env,
ffi::OsStr,
fs::File,
io::{BufRead, BufReader},
path::{Path, PathBuf},
process::Command,
};
use glob::glob;
use once_cell::sync::{Lazy, OnceCell};
use semver::{Version, VersionReq};
use library::Library;
#[path = "build/cmake_probe.rs"]
mod cmake_probe;
#[path = "build/generator.rs"]
mod generator;
#[path = "build/library.rs"]
mod library;
type Result<T, E = Box<dyn std::error::Error>> = std::result::Result<T, E>;
static MODULES: OnceCell<Vec<String>> = OnceCell::new();
static OUT_DIR: Lazy<PathBuf> = Lazy::new(|| PathBuf::from(env::var_os("OUT_DIR").expect("Can't read OUT_DIR env var")));
static MANIFEST_DIR: Lazy<PathBuf> = Lazy::new(|| PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").expect("Can't read CARGO_MANIFEST_DIR env var")));
static SRC_DIR: Lazy<PathBuf> = Lazy::new(|| MANIFEST_DIR.join("src"));
static SRC_CPP_DIR: Lazy<PathBuf> = Lazy::new(|| MANIFEST_DIR.join("src_cpp"));
static HOST_TRIPLE: Lazy<Option<String>> = Lazy::new(|| env::var("HOST_TRIPLE").ok());
static OPENCV_BRANCH_32: Lazy<VersionReq> = Lazy::new(|| VersionReq::parse("~3.2").expect("Can't parse OpenCV 3.2 version requirement"));
static OPENCV_BRANCH_34: Lazy<VersionReq> = Lazy::new(|| VersionReq::parse("~3.4").expect("Can't parse OpenCV 3.4 version requirement"));
static OPENCV_BRANCH_4: Lazy<VersionReq> = Lazy::new(|| VersionReq::parse("~4").expect("Can't parse OpenCV 4 version requirement"));
static ENV_VARS: [&str; 17] = [
"OPENCV_PACKAGE_NAME",
"OPENCV_PKGCONFIG_NAME",
"OPENCV_CMAKE_NAME",
"OPENCV_CMAKE_BIN",
"OPENCV_VCPKG_NAME",
"OPENCV_LINK_LIBS",
"OPENCV_LINK_PATHS",
"OPENCV_INCLUDE_PATHS",
"OPENCV_DISABLE_PROBES",
"OPENCV_MODULE_WHITELIST",
"OPENCV_MODULE_BLACKLIST",
"OPENCV_CLANG_TARGET",
"CMAKE_PREFIX_PATH",
"OpenCV_DIR",
"PKG_CONFIG_PATH",
"VCPKG_ROOT",
"VCPKGRS_DYNAMIC",
];
fn cleanup_lib_filename(filename: &OsStr) -> Option<&OsStr> {
let mut strip_performed = false;
let mut filename_path = Path::new(filename);
// used to check for the file extension (with dots stripped) and for the part of the filename
const LIB_EXTS: [&str; 7] = [".so.", ".a.", ".dll.", ".lib.", ".dylib.", ".framework.", ".tbd."];
if let (Some(stem), Some(extension)) = (filename_path.file_stem(), filename_path.extension().and_then(OsStr::to_str)) {
if LIB_EXTS.iter().any(|e| e.trim_matches('.').eq_ignore_ascii_case(extension)) {
filename_path = Path::new(stem);
strip_performed = true;
}
}
if let Some(mut file) = filename_path.file_name().and_then(OsStr::to_str) {
let orig_len = file.len();
file = file.strip_prefix("lib").unwrap_or(file);
LIB_EXTS.iter()
.for_each(|&inner_ext| if let Some(inner_ext_idx) = file.find(inner_ext) {
file = &file[..inner_ext_idx];
});
if orig_len != file.len() {
strip_performed = true;
filename_path = Path::new(file);
}
}
if strip_performed {
Some(filename_path.as_os_str())
} else {
None
}
}
fn get_version_header(header_dir: &Path) -> Option<PathBuf> {
let out = header_dir.join("opencv2/core/version.hpp");
if out.is_file() {
Some(out)
} else {
let out = header_dir.join("Headers/core/version.hpp");
if out.is_file() {
Some(out)
} else {
None
}
}
}
fn get_version_from_headers(header_dir: &Path) -> Option<Version> {
let version_hpp = get_version_header(header_dir)?;
let mut major = None;
let mut minor = None;
let mut revision = None;
let mut line = String::with_capacity(256);
let mut reader = BufReader::new(File::open(version_hpp).ok()?);
while let Ok(bytes_read) = reader.read_line(&mut line) {
if bytes_read == 0 {
break;
}
if let Some(line) = line.strip_prefix("#define CV_VERSION_") {
let mut parts = line.split_whitespace();
if let (Some(ver_spec), Some(version)) = (parts.next(), parts.next()) {
match ver_spec {
"MAJOR" => {
major = Some(version.to_string());
}
"MINOR" => {
minor = Some(version.to_string());
}
"REVISION" => {
revision = Some(version.to_string());
}
_ => {}
}
}
if major.is_some() && minor.is_some() && revision.is_some() {
break;
}
}
line.clear();
}
if let (Some(major), Some(minor), Some(revision)) = (major, minor, revision) {
Some(Version::new(major.parse().ok()?, minor.parse().ok()?, revision.parse().ok()?))
} else {
Some(Version::new(0, 0, 0))
}
}
fn make_modules(opencv_dir: &Path) -> Result<()> {
let ignore_modules: HashSet<&'static str> = [
"core_detect",
"cudalegacy",
"cudev",
"gapi",
"opencv",
"opencv_modules",
].iter().copied().collect();
let env_whitelist = env::var("OPENCV_MODULE_WHITELIST").ok();
let env_whitelist = env_whitelist.as_ref()
.map(|wl| wl.split(',')
.map(|e| e.trim())
.collect::<HashSet<_>>()
);
let env_blacklist = env::var("OPENCV_MODULE_BLACKLIST").ok();
let env_blacklist = env_blacklist.as_ref()
.map(|wl| wl.split(',')
.map(|e| e.trim())
.collect::<HashSet<_>>()
);
let modules: Vec<String> = glob(&format!("{}/*.hpp", opencv_dir.to_str().ok_or("Can't OpenCV header directory to UTF-8 string")?))?
.filter_map(|entry| {
let entry = entry.expect("Can't get path for module file");
let module = entry.file_stem()
.and_then(OsStr::to_str).expect("Can't calculate file stem");
Some(module)
.filter(|m| !ignore_modules.contains(m))
.filter(|m| env_blacklist.as_ref().map_or(true, |bl| !bl.contains(m)))
.filter(|m| env_whitelist.as_ref().map_or(true, |wl| wl.contains(m)))
.map(str::to_string)
})
.collect();
MODULES.set(modules).expect("Can't set MODULES cache");
Ok(())
}
fn build_compiler(opencv: &Library) -> cc::Build {
let mut out = cc::Build::new();
out.cpp(true)
.include(&*SRC_CPP_DIR)
.include(&*OUT_DIR)
.include(".")
.flag_if_supported("-Wno-class-memaccess")
.flag_if_supported("-Wno-deprecated-declarations")
.flag_if_supported("-Wno-deprecated-copy")
.flag_if_supported("-Wno-unused-variable")
.flag_if_supported("-Wno-return-type-c-linkage")
;
opencv.include_paths.iter().for_each(|p| { out.include(p); });
if cfg!(target_env = "msvc") {
out.flag_if_supported("-std:c++latest")
.flag_if_supported("-wd4996")
.flag_if_supported("-wd5054") // deprecated between enumerations of different types
.flag_if_supported("-wd4190") // has C-linkage specified, but returns UDT 'Result<cv::Rect_<int>>' which is incompatible with C
.flag_if_supported("-EHsc")
.flag_if_supported("-bigobj")
.pic(false)
;
} else {
out.flag("-std=c++11")
.flag_if_supported("-Wa,-mbig-obj")
;
}
out
}
fn setup_rerun() -> Result<()> {
for &v in ENV_VARS.iter() {
println!("cargo:rerun-if-env-changed={}", v);
}
let include_exts = &[OsStr::new("cpp"), OsStr::new("hpp")];
for entry in SRC_CPP_DIR.read_dir()?.map(|e| e.unwrap()) {
let path = entry.path();
if path.is_file() && path.extension().map_or(false, |e| include_exts.contains(&e)) {
if let Some(path) = path.to_str() {
println!("cargo:rerun-if-changed={}", path);
}
}
}
Ok(())
}
fn build_wrapper(opencv: &Library) {
let mut cc = build_compiler(opencv);
let modules = MODULES.get().expect("MODULES not initialized");
for module in &["sys", "types"] { // special internal modules
println!("cargo:rustc-cfg=ocvrs_has_module_{}", module);
}
for module in modules.iter() {
println!("cargo:rustc-cfg=ocvrs_has_module_{}", module);
cc.file(OUT_DIR.join(format!("{}.cpp", module)));
let manual_cpp = SRC_CPP_DIR.join(format!("manual-{}.cpp", module));
if manual_cpp.exists() {
cc.file(manual_cpp);
}
}
cc.compile("ocvrs");
}
fn main() -> Result<()> {
if cfg!(feature = "docs-only") { // fake setup for docs.rs
println!(r#"cargo:rustc-cfg=ocvrs_opencv_branch_4"#);
for entry in SRC_DIR.join("opencv/hub").read_dir().expect("Can't read hub dir") {
let entry = entry.expect("Can't read directory entry");
let path = entry.path();
if entry.file_type().map(|f| f.is_file()).unwrap_or(false)
&& path.extension().map_or(false, |e| e == "rs") {
if let Some(module) = path.file_stem().and_then(OsStr::to_str) {
println!("cargo:rustc-cfg=ocvrs_has_module_{}", module);
}
}
}
return Ok(());
}
let generator_build = if cfg!(feature = "clang-runtime") { // start building binding generator as early as possible
let cargo_bin = PathBuf::from(env::var_os("CARGO").unwrap_or_else(|| "cargo".into()));
let mut cargo = Command::new(cargo_bin);
// generator script is quite slow in debug mode, so we force it to be built in release mode
cargo.args(&["build", "--release", "--package", "opencv-binding-generator", "--bin", "binding-generator"])
.env("CARGO_TARGET_DIR", &*OUT_DIR);
if let Some(host_triple) = HOST_TRIPLE.as_ref() {
cargo.args(&["--target", host_triple]);
}
println!("running: {:?}", &cargo);
Some(cargo.spawn()?)
} else {
None
};
eprintln!("=== Crate version: {:?}", env::var_os("CARGO_PKG_VERSION"));
eprintln!("=== Environment configuration:");
for &v in ENV_VARS.iter() {
eprintln!("=== {} = {:?}", v, env::var_os(v));
}
eprintln!("=== Enabled features:");
let features = env::vars()
.filter_map(|(mut name, val)| {
if val != "1" {
return None;
}
const PREFIX: &str = "CARGO_FEATURE_";
if name.starts_with(PREFIX) {
name.drain(..PREFIX.len());
Some(name)
} else {
None
}
});
for feature in features {
eprintln!("=== {}", feature);
}
let opencv = Library::probe()?;
eprintln!("=== OpenCV library configuration: {:#?}", opencv);
if OPENCV_BRANCH_4.matches(&opencv.version) {
println!("cargo:rustc-cfg=ocvrs_opencv_branch_4");
} else if OPENCV_BRANCH_34.matches(&opencv.version) {
println!("cargo:rustc-cfg=ocvrs_opencv_branch_34");
} else if OPENCV_BRANCH_32.matches(&opencv.version) {
println!("cargo:rustc-cfg=ocvrs_opencv_branch_32");
} else {
panic!("Unsupported OpenCV version: {}, must be from 3.2, 3.4 or 4.x branch", opencv.version);
}
let opencv_header_dir = opencv.include_paths.iter()
.find(|p| get_version_header(p).is_some())
.expect("Discovered OpenCV include paths is empty or contains non-existent paths");
make_modules(&opencv_header_dir.join("opencv2"))?;
if let Some(header_version) = get_version_from_headers(opencv_header_dir) {
if header_version != opencv.version {
panic!(
"Version from the headers: {} (at {}) doesn't match version of the OpenCV library: {} (include paths: {:?})",
header_version,
opencv_header_dir.display(),
opencv.version,
opencv.include_paths,
);
}
eprintln!("=== Found OpenCV version: {} in headers located at: {}", header_version, opencv_header_dir.display());
} else {
panic!("Unable to find OpenCV version in headers located at: {}", opencv_header_dir.display())
}
setup_rerun()?;
generator::gen_wrapper(opencv_header_dir, &opencv, generator_build)?;
build_wrapper(&opencv);
// -l linker args should be emitted after -l static
opencv.emit_cargo_metadata();
Ok(())
}
| 32.190616 | 148 | 0.663478 |
d7aa71c877e419f1378918d6e900cddfc1d3d614
| 16,678 |
use std::cell::RefCell;
use std::cmp;
use std::convert::TryInto;
use std::vec::IntoIter as VecIntoIter;
extern crate oboe;
use crate::traits::{DeviceTrait, HostTrait, StreamTrait};
use crate::{
BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError,
DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError,
PlayStreamError, Sample, SampleFormat, SampleRate, StreamConfig, StreamError,
SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange,
SupportedStreamConfigsError,
};
mod android_media;
mod convert;
mod input_callback;
mod output_callback;
use self::android_media::{get_audio_record_min_buffer_size, get_audio_track_min_buffer_size};
use self::input_callback::CpalInputCallback;
use self::output_callback::CpalOutputCallback;
// Android Java API supports up to 8 channels, but oboe API
// only exposes mono and stereo.
const CHANNEL_MASKS: [i32; 2] = [
android_media::CHANNEL_OUT_MONO,
android_media::CHANNEL_OUT_STEREO,
];
const SAMPLE_RATES: [i32; 13] = [
5512, 8000, 11025, 16000, 22050, 32000, 44100, 48000, 64000, 88200, 96000, 176400, 192000,
];
pub struct Host;
pub struct Device(Option<oboe::AudioDeviceInfo>);
pub struct Stream(Box<RefCell<dyn oboe::AudioStream>>);
pub type SupportedInputConfigs = VecIntoIter<SupportedStreamConfigRange>;
pub type SupportedOutputConfigs = VecIntoIter<SupportedStreamConfigRange>;
pub type Devices = VecIntoIter<Device>;
impl Host {
pub fn new() -> Result<Self, crate::HostUnavailable> {
Ok(Host)
}
}
impl HostTrait for Host {
type Devices = Devices;
type Device = Device;
fn is_available() -> bool {
true
}
fn devices(&self) -> Result<Self::Devices, DevicesError> {
if let Ok(devices) = oboe::AudioDeviceInfo::request(oboe::AudioDeviceDirection::InputOutput)
{
Ok(devices
.into_iter()
.map(|d| Device(Some(d)))
.collect::<Vec<_>>()
.into_iter())
} else {
Ok(vec![Device(None)].into_iter())
}
}
fn default_input_device(&self) -> Option<Self::Device> {
if let Ok(devices) = oboe::AudioDeviceInfo::request(oboe::AudioDeviceDirection::Input) {
devices.into_iter().map(|d| Device(Some(d))).next()
} else {
Some(Device(None))
}
}
fn default_output_device(&self) -> Option<Self::Device> {
//if let Ok(devices) = oboe::AudioDeviceInfo::request(oboe::AudioDeviceDirection::Output) {
// devices.into_iter().map(|d| Device(Some(d))).next()
//} else {
Some(Device(None))
//}
}
}
fn buffer_size_range_for_params(
is_output: bool,
sample_rate: i32,
channel_mask: i32,
android_format: i32,
) -> SupportedBufferSize {
let min_buffer_size = if is_output {
get_audio_track_min_buffer_size(sample_rate, channel_mask, android_format)
} else {
get_audio_record_min_buffer_size(sample_rate, channel_mask, android_format)
};
if min_buffer_size > 0 {
SupportedBufferSize::Range {
min: min_buffer_size as u32,
max: i32::MAX as u32,
}
} else {
SupportedBufferSize::Unknown
}
}
fn default_supported_configs(is_output: bool) -> VecIntoIter<SupportedStreamConfigRange> {
// Have to "brute force" the parameter combinations with getMinBufferSize
const FORMATS: [SampleFormat; 2] = [SampleFormat::I16, SampleFormat::F32];
let mut output = Vec::with_capacity(SAMPLE_RATES.len() * CHANNEL_MASKS.len() * FORMATS.len());
for sample_format in &FORMATS {
let android_format = if *sample_format == SampleFormat::I16 {
android_media::ENCODING_PCM_16BIT
} else {
android_media::ENCODING_PCM_FLOAT
};
for mask_idx in 0..CHANNEL_MASKS.len() {
let channel_mask = CHANNEL_MASKS[mask_idx];
let channel_count = mask_idx + 1;
for sample_rate in &SAMPLE_RATES {
if let SupportedBufferSize::Range { min, max } = buffer_size_range_for_params(
is_output,
*sample_rate,
channel_mask,
android_format,
) {
output.push(SupportedStreamConfigRange {
channels: channel_count as u16,
min_sample_rate: SampleRate(*sample_rate as u32),
max_sample_rate: SampleRate(*sample_rate as u32),
buffer_size: SupportedBufferSize::Range { min, max },
sample_format: *sample_format,
});
}
}
}
}
output.into_iter()
}
fn device_supported_configs(
device: &oboe::AudioDeviceInfo,
is_output: bool,
) -> VecIntoIter<SupportedStreamConfigRange> {
let sample_rates = if !device.sample_rates.is_empty() {
device.sample_rates.as_slice()
} else {
&SAMPLE_RATES
};
const ALL_CHANNELS: [i32; 2] = [1, 2];
let channel_counts = if !device.channel_counts.is_empty() {
device.channel_counts.as_slice()
} else {
&ALL_CHANNELS
};
const ALL_FORMATS: [oboe::AudioFormat; 2] = [oboe::AudioFormat::I16, oboe::AudioFormat::F32];
let formats = if !device.formats.is_empty() {
device.formats.as_slice()
} else {
&ALL_FORMATS
};
let mut output = Vec::with_capacity(sample_rates.len() * channel_counts.len() * formats.len());
for sample_rate in sample_rates {
for channel_count in channel_counts {
assert!(*channel_count > 0);
if *channel_count > 2 {
// could be supported by the device, but oboe does not support more than 2 channels
continue;
}
let channel_mask = CHANNEL_MASKS[*channel_count as usize - 1];
for format in formats {
let (android_format, sample_format) = match format {
oboe::AudioFormat::I16 => {
(android_media::ENCODING_PCM_16BIT, SampleFormat::I16)
}
oboe::AudioFormat::F32 => {
(android_media::ENCODING_PCM_FLOAT, SampleFormat::F32)
}
_ => panic!("Unexpected format"),
};
let buffer_size = buffer_size_range_for_params(
is_output,
*sample_rate,
channel_mask,
android_format,
);
output.push(SupportedStreamConfigRange {
channels: cmp::min(*channel_count as u16, 2u16),
min_sample_rate: SampleRate(*sample_rate as u32),
max_sample_rate: SampleRate(*sample_rate as u32),
buffer_size,
sample_format,
});
}
}
}
output.into_iter()
}
fn configure_for_device<D, C, I>(
builder: oboe::AudioStreamBuilder<D, C, I>,
device: &Device,
config: &StreamConfig,
) -> oboe::AudioStreamBuilder<D, C, I> {
let mut builder = if let Some(info) = &device.0 {
builder.set_device_id(info.id)
} else {
builder
};
builder = builder.set_sample_rate(config.sample_rate.0.try_into().unwrap());
match &config.buffer_size {
BufferSize::Default => builder,
BufferSize::Fixed(size) => builder.set_buffer_capacity_in_frames(*size as i32),
}
}
fn build_input_stream<D, E, C, T>(
device: &Device,
config: &StreamConfig,
data_callback: D,
error_callback: E,
builder: oboe::AudioStreamBuilder<oboe::Input, C, T>,
) -> Result<Stream, BuildStreamError>
where
T: Sample + oboe::IsFormat + Send + 'static,
C: oboe::IsChannelCount + Send + 'static,
(T, C): oboe::IsFrameType,
D: FnMut(&Data, &InputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
let builder = configure_for_device(builder, device, config);
let stream = builder
.set_callback(CpalInputCallback::<T, C>::new(
data_callback,
error_callback,
config.sample_rate,
))
.open_stream()?;
Ok(Stream(Box::new(RefCell::new(stream))))
}
fn build_output_stream<D, E, C, T>(
device: &Device,
config: &StreamConfig,
data_callback: D,
error_callback: E,
builder: oboe::AudioStreamBuilder<oboe::Output, C, T>,
) -> Result<Stream, BuildStreamError>
where
T: Sample + oboe::IsFormat + Send + 'static,
C: oboe::IsChannelCount + Send + 'static,
(T, C): oboe::IsFrameType,
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
let builder = configure_for_device(builder, device, config);
let stream = builder
.set_callback(CpalOutputCallback::<T, C>::new(
data_callback,
error_callback,
config.sample_rate,
))
.open_stream()?;
Ok(Stream(Box::new(RefCell::new(stream))))
}
impl DeviceTrait for Device {
type SupportedInputConfigs = SupportedInputConfigs;
type SupportedOutputConfigs = SupportedOutputConfigs;
type Stream = Stream;
fn name(&self) -> Result<String, DeviceNameError> {
match &self.0 {
None => Ok("default".to_owned()),
Some(info) => Ok(info.product_name.clone()),
}
}
fn supported_input_configs(
&self,
) -> Result<Self::SupportedInputConfigs, SupportedStreamConfigsError> {
if let Some(info) = &self.0 {
Ok(device_supported_configs(info, false))
} else {
Ok(default_supported_configs(false))
}
}
fn supported_output_configs(
&self,
) -> Result<Self::SupportedOutputConfigs, SupportedStreamConfigsError> {
if let Some(info) = &self.0 {
Ok(device_supported_configs(info, true))
} else {
Ok(default_supported_configs(true))
}
}
fn default_input_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
let mut configs: Vec<_> = self.supported_input_configs().unwrap().collect();
configs.sort_by(|a, b| b.cmp_default_heuristics(a));
let config = configs
.into_iter()
.next()
.ok_or(DefaultStreamConfigError::StreamTypeNotSupported)?
.with_max_sample_rate();
Ok(config)
}
fn default_output_config(&self) -> Result<SupportedStreamConfig, DefaultStreamConfigError> {
let mut configs: Vec<_> = self.supported_output_configs().unwrap().collect();
configs.sort_by(|a, b| b.cmp_default_heuristics(a));
let config = configs
.into_iter()
.next()
.ok_or(DefaultStreamConfigError::StreamTypeNotSupported)?
.with_max_sample_rate();
Ok(config)
}
fn build_input_stream_raw<D, E>(
&self,
config: &StreamConfig,
sample_format: SampleFormat,
data_callback: D,
error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(&Data, &InputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
match sample_format {
SampleFormat::I16 => {
let builder = oboe::AudioStreamBuilder::default()
.set_input()
.set_format::<i16>();
if config.channels == 1 {
build_input_stream(
self,
config,
data_callback,
error_callback,
builder.set_mono(),
)
} else if config.channels == 2 {
build_input_stream(
self,
config,
data_callback,
error_callback,
builder.set_stereo(),
)
} else {
Err(BackendSpecificError {
description: "More than 2 channels are not supported by Oboe.".to_owned(),
}
.into())
}
}
SampleFormat::F32 => {
let builder = oboe::AudioStreamBuilder::default()
.set_input()
.set_format::<f32>();
if config.channels == 1 {
build_input_stream(
self,
config,
data_callback,
error_callback,
builder.set_mono(),
)
} else if config.channels == 2 {
build_input_stream(
self,
config,
data_callback,
error_callback,
builder.set_stereo(),
)
} else {
Err(BackendSpecificError {
description: "More than 2 channels are not supported by Oboe.".to_owned(),
}
.into())
}
}
SampleFormat::U16 => Err(BackendSpecificError {
description: "U16 format is not supported on Android.".to_owned(),
}
.into()),
}
}
fn build_output_stream_raw<D, E>(
&self,
config: &StreamConfig,
sample_format: SampleFormat,
data_callback: D,
error_callback: E,
) -> Result<Self::Stream, BuildStreamError>
where
D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static,
E: FnMut(StreamError) + Send + 'static,
{
match sample_format {
SampleFormat::I16 => {
let builder = oboe::AudioStreamBuilder::default()
.set_output()
.set_format::<i16>();
if config.channels == 1 {
build_output_stream(
self,
config,
data_callback,
error_callback,
builder.set_mono(),
)
} else if config.channels == 2 {
build_output_stream(
self,
config,
data_callback,
error_callback,
builder.set_stereo(),
)
} else {
Err(BackendSpecificError {
description: "More than 2 channels are not supported by Oboe.".to_owned(),
}
.into())
}
}
SampleFormat::F32 => {
let builder = oboe::AudioStreamBuilder::default()
.set_output()
.set_format::<f32>();
if config.channels == 1 {
build_output_stream(
self,
config,
data_callback,
error_callback,
builder.set_mono(),
)
} else if config.channels == 2 {
build_output_stream(
self,
config,
data_callback,
error_callback,
builder.set_stereo(),
)
} else {
Err(BackendSpecificError {
description: "More than 2 channels are not supported by Oboe.".to_owned(),
}
.into())
}
}
SampleFormat::U16 => Err(BackendSpecificError {
description: "U16 format is not supported on Android.".to_owned(),
}
.into()),
}
}
}
impl StreamTrait for Stream {
fn play(&self) -> Result<(), PlayStreamError> {
self.0
.borrow_mut()
.request_start()
.map_err(PlayStreamError::from)
}
fn pause(&self) -> Result<(), PauseStreamError> {
self.0
.borrow_mut()
.request_stop()
.map_err(PauseStreamError::from)
}
}
| 33.967413 | 100 | 0.537475 |
0165b89d5d6fba826f4df8899d0d4e25f58edf95
| 1,738 |
//! FBX DOM library.
//!
//! # Creating DOM
//!
//! ## Easy setup (recommended)
//!
//! If you don't care about low-level features (such as precise FBX version and
//! parser warning handling), you can use easy setup using [`any`] module.
//!
//! ```no_run
//! use fbxcel_dom::any::AnyDocument;
//!
//! let file = std::fs::File::open("sample.fbx").expect("Failed to open file");
//! // You can also use raw `file`, but do buffering for better efficiency.
//! let reader = std::io::BufReader::new(file);
//!
//! // Use `from_seekable_reader` for readers implementing `std::io::Seek`.
//! // To use readers without `std::io::Seek` implementation, use `from_reader`
//! // instead.
//! match AnyDocument::from_seekable_reader(reader).expect("Failed to load document") {
//! AnyDocument::V7400(fbx_ver, doc) => {
//! // You got a document. You can do what you want.
//! }
//! // `AnyDocument` is nonexhaustive.
//! // You should handle unknown document versions case.
//! _ => panic!("Got FBX document of unsupported version"),
//! }
//! ```
//!
//! ## Manual setup
//!
//! You can create a parser or a tree by yourself, and use appropriate loader to
//! load the document from it.
//!
//! For example:
//!
//! * From `tree: fbxcel::tree::v7400::Tree`, you can create the document by
//! `fbxcel_dom::v7400::Loader::load_from_tree(tree)`.
//! * From `parser: fbxcel::pull_parser::v7400::Parser`, you can create the
//! document by `fbxcel_dom::v7400::Loader::load_from_parser(&mut parser)`.
//!
//! For detail, see documents of loaders.
//!
//! [`any`]: any/index.html
#![forbid(unsafe_code)]
#![warn(missing_docs)]
#![warn(clippy::missing_docs_in_private_items)]
pub use fbxcel;
pub mod any;
pub mod v7400;
| 32.792453 | 87 | 0.649597 |
011a0e637d59e9d41fee699b116c883b83736c7d
| 1,750 |
use is_macro::Is;
use swc_common::{ast_node, Span};
pub use self::{
charset::*, color_profile::*, counter_style::*, document::*, font_face::*, import::*,
keyframe::*, layer::*, media::*, namespace::*, page::*, property::*, support::*, viewport::*,
};
use crate::{DashedIdent, Ident, SimpleBlock, Value};
mod charset;
mod color_profile;
mod counter_style;
mod document;
mod font_face;
mod import;
mod keyframe;
mod layer;
mod media;
mod namespace;
mod page;
mod property;
mod support;
mod viewport;
#[ast_node]
#[derive(Is)]
pub enum AtRule {
#[tag("CharsetRule")]
Charset(CharsetRule),
#[tag("ImportRule")]
Import(ImportRule),
#[tag("FontFaceRule")]
FontFace(FontFaceRule),
#[tag("KeyframesRule")]
Keyframes(KeyframesRule),
#[tag("LayerRule")]
Layer(LayerRule),
#[tag("MediaRule")]
Media(MediaRule),
#[tag("SupportsRule")]
Supports(SupportsRule),
#[tag("PageRule")]
Page(PageRule),
#[tag("PageMarginRule")]
PageMargin(PageMarginRule),
#[tag("NamespaceRule")]
Namespace(NamespaceRule),
#[tag("ViewportRule")]
Viewport(ViewportRule),
#[tag("DocumentRule")]
Document(DocumentRule),
#[tag("ColorProfileRule")]
ColorProfile(ColorProfileRule),
#[tag("CounterStyleRule")]
CounterStyle(CounterStyleRule),
#[tag("PropertyRule")]
Property(PropertyRule),
#[tag("UnknownAtRule")]
Unknown(UnknownAtRule),
}
#[ast_node]
pub enum AtRuleName {
#[tag("DashedIdent")]
DashedIdent(DashedIdent),
#[tag("Ident")]
Ident(Ident),
}
#[ast_node("UnknownAtRule")]
pub struct UnknownAtRule {
pub span: Span,
pub name: AtRuleName,
pub prelude: Vec<Value>,
pub block: Option<SimpleBlock>,
}
| 18.817204 | 97 | 0.645714 |
e65e8ca5fbd837220c7b69fd66ca9a5b283a91fe
| 58,838 |
reactions {
# (1) no decrement, no increment
{ { p0; },{ dec; inc; } -> { p0; } };
{ { p1; },{ dec; inc; } -> { p1; } };
{ { p2; },{ dec; inc; } -> { p2; } };
{ { p3; },{ dec; inc; } -> { p3; } };
{ { p4; },{ dec; inc; } -> { p4; } };
{ { p5; },{ dec; inc; } -> { p5; } };
{ { p6; },{ dec; inc; } -> { p6; } };
{ { p7; },{ dec; inc; } -> { p7; } };
{ { p8; },{ dec; inc; } -> { p8; } };
{ { p9; },{ dec; inc; } -> { p9; } };
{ { p10; },{ dec; inc; } -> { p10; } };
{ { p11; },{ dec; inc; } -> { p11; } };
{ { p12; },{ dec; inc; } -> { p12; } };
{ { p13; },{ dec; inc; } -> { p13; } };
{ { p14; },{ dec; inc; } -> { p14; } };
{ { p15; },{ dec; inc; } -> { p15; } };
{ { p16; },{ dec; inc; } -> { p16; } };
{ { p17; },{ dec; inc; } -> { p17; } };
{ { p18; },{ dec; inc; } -> { p18; } };
{ { p19; },{ dec; inc; } -> { p19; } };
{ { p20; },{ dec; inc; } -> { p20; } };
{ { p21; },{ dec; inc; } -> { p21; } };
{ { p22; },{ dec; inc; } -> { p22; } };
{ { p23; },{ dec; inc; } -> { p23; } };
{ { p24; },{ dec; inc; } -> { p24; } };
{ { p25; },{ dec; inc; } -> { p25; } };
{ { p26; },{ dec; inc; } -> { p26; } };
{ { p27; },{ dec; inc; } -> { p27; } };
{ { p28; },{ dec; inc; } -> { p28; } };
{ { p29; },{ dec; inc; } -> { p29; } };
{ { p30; },{ dec; inc; } -> { p30; } };
{ { p31; },{ dec; inc; } -> { p31; } };
# (2) increment operation
{ { inc; },{ dec; p0; } -> { p0; } };
# the more significant bits remain (inc)
{ { inc; p0; },{ dec; p0; } -> { p0; } };
{ { inc; p1; },{ dec; p0; } -> { p1; } };
{ { inc; p2; },{ dec; p0; } -> { p2; } };
{ { inc; p3; },{ dec; p0; } -> { p3; } };
{ { inc; p4; },{ dec; p0; } -> { p4; } };
{ { inc; p5; },{ dec; p0; } -> { p5; } };
{ { inc; p6; },{ dec; p0; } -> { p6; } };
{ { inc; p7; },{ dec; p0; } -> { p7; } };
{ { inc; p8; },{ dec; p0; } -> { p8; } };
{ { inc; p9; },{ dec; p0; } -> { p9; } };
{ { inc; p10; },{ dec; p0; } -> { p10; } };
{ { inc; p11; },{ dec; p0; } -> { p11; } };
{ { inc; p12; },{ dec; p0; } -> { p12; } };
{ { inc; p13; },{ dec; p0; } -> { p13; } };
{ { inc; p14; },{ dec; p0; } -> { p14; } };
{ { inc; p15; },{ dec; p0; } -> { p15; } };
{ { inc; p16; },{ dec; p0; } -> { p16; } };
{ { inc; p17; },{ dec; p0; } -> { p17; } };
{ { inc; p18; },{ dec; p0; } -> { p18; } };
{ { inc; p19; },{ dec; p0; } -> { p19; } };
{ { inc; p20; },{ dec; p0; } -> { p20; } };
{ { inc; p21; },{ dec; p0; } -> { p21; } };
{ { inc; p22; },{ dec; p0; } -> { p22; } };
{ { inc; p23; },{ dec; p0; } -> { p23; } };
{ { inc; p24; },{ dec; p0; } -> { p24; } };
{ { inc; p25; },{ dec; p0; } -> { p25; } };
{ { inc; p26; },{ dec; p0; } -> { p26; } };
{ { inc; p27; },{ dec; p0; } -> { p27; } };
{ { inc; p28; },{ dec; p0; } -> { p28; } };
{ { inc; p29; },{ dec; p0; } -> { p29; } };
{ { inc; p30; },{ dec; p0; } -> { p30; } };
{ { inc; p31; },{ dec; p0; } -> { p31; } };
{ { inc; p0; },{ dec; p1; } -> { p1; } };
# the more significant bits remain (inc)
{ { inc; p1; },{ dec; p1; } -> { p1; } };
{ { inc; p2; },{ dec; p1; } -> { p2; } };
{ { inc; p3; },{ dec; p1; } -> { p3; } };
{ { inc; p4; },{ dec; p1; } -> { p4; } };
{ { inc; p5; },{ dec; p1; } -> { p5; } };
{ { inc; p6; },{ dec; p1; } -> { p6; } };
{ { inc; p7; },{ dec; p1; } -> { p7; } };
{ { inc; p8; },{ dec; p1; } -> { p8; } };
{ { inc; p9; },{ dec; p1; } -> { p9; } };
{ { inc; p10; },{ dec; p1; } -> { p10; } };
{ { inc; p11; },{ dec; p1; } -> { p11; } };
{ { inc; p12; },{ dec; p1; } -> { p12; } };
{ { inc; p13; },{ dec; p1; } -> { p13; } };
{ { inc; p14; },{ dec; p1; } -> { p14; } };
{ { inc; p15; },{ dec; p1; } -> { p15; } };
{ { inc; p16; },{ dec; p1; } -> { p16; } };
{ { inc; p17; },{ dec; p1; } -> { p17; } };
{ { inc; p18; },{ dec; p1; } -> { p18; } };
{ { inc; p19; },{ dec; p1; } -> { p19; } };
{ { inc; p20; },{ dec; p1; } -> { p20; } };
{ { inc; p21; },{ dec; p1; } -> { p21; } };
{ { inc; p22; },{ dec; p1; } -> { p22; } };
{ { inc; p23; },{ dec; p1; } -> { p23; } };
{ { inc; p24; },{ dec; p1; } -> { p24; } };
{ { inc; p25; },{ dec; p1; } -> { p25; } };
{ { inc; p26; },{ dec; p1; } -> { p26; } };
{ { inc; p27; },{ dec; p1; } -> { p27; } };
{ { inc; p28; },{ dec; p1; } -> { p28; } };
{ { inc; p29; },{ dec; p1; } -> { p29; } };
{ { inc; p30; },{ dec; p1; } -> { p30; } };
{ { inc; p31; },{ dec; p1; } -> { p31; } };
{ { inc; p0; p1; },{ dec; p2; } -> { p2; } };
# the more significant bits remain (inc)
{ { inc; p2; },{ dec; p2; } -> { p2; } };
{ { inc; p3; },{ dec; p2; } -> { p3; } };
{ { inc; p4; },{ dec; p2; } -> { p4; } };
{ { inc; p5; },{ dec; p2; } -> { p5; } };
{ { inc; p6; },{ dec; p2; } -> { p6; } };
{ { inc; p7; },{ dec; p2; } -> { p7; } };
{ { inc; p8; },{ dec; p2; } -> { p8; } };
{ { inc; p9; },{ dec; p2; } -> { p9; } };
{ { inc; p10; },{ dec; p2; } -> { p10; } };
{ { inc; p11; },{ dec; p2; } -> { p11; } };
{ { inc; p12; },{ dec; p2; } -> { p12; } };
{ { inc; p13; },{ dec; p2; } -> { p13; } };
{ { inc; p14; },{ dec; p2; } -> { p14; } };
{ { inc; p15; },{ dec; p2; } -> { p15; } };
{ { inc; p16; },{ dec; p2; } -> { p16; } };
{ { inc; p17; },{ dec; p2; } -> { p17; } };
{ { inc; p18; },{ dec; p2; } -> { p18; } };
{ { inc; p19; },{ dec; p2; } -> { p19; } };
{ { inc; p20; },{ dec; p2; } -> { p20; } };
{ { inc; p21; },{ dec; p2; } -> { p21; } };
{ { inc; p22; },{ dec; p2; } -> { p22; } };
{ { inc; p23; },{ dec; p2; } -> { p23; } };
{ { inc; p24; },{ dec; p2; } -> { p24; } };
{ { inc; p25; },{ dec; p2; } -> { p25; } };
{ { inc; p26; },{ dec; p2; } -> { p26; } };
{ { inc; p27; },{ dec; p2; } -> { p27; } };
{ { inc; p28; },{ dec; p2; } -> { p28; } };
{ { inc; p29; },{ dec; p2; } -> { p29; } };
{ { inc; p30; },{ dec; p2; } -> { p30; } };
{ { inc; p31; },{ dec; p2; } -> { p31; } };
{ { inc; p0; p1; p2; },{ dec; p3; } -> { p3; } };
# the more significant bits remain (inc)
{ { inc; p3; },{ dec; p3; } -> { p3; } };
{ { inc; p4; },{ dec; p3; } -> { p4; } };
{ { inc; p5; },{ dec; p3; } -> { p5; } };
{ { inc; p6; },{ dec; p3; } -> { p6; } };
{ { inc; p7; },{ dec; p3; } -> { p7; } };
{ { inc; p8; },{ dec; p3; } -> { p8; } };
{ { inc; p9; },{ dec; p3; } -> { p9; } };
{ { inc; p10; },{ dec; p3; } -> { p10; } };
{ { inc; p11; },{ dec; p3; } -> { p11; } };
{ { inc; p12; },{ dec; p3; } -> { p12; } };
{ { inc; p13; },{ dec; p3; } -> { p13; } };
{ { inc; p14; },{ dec; p3; } -> { p14; } };
{ { inc; p15; },{ dec; p3; } -> { p15; } };
{ { inc; p16; },{ dec; p3; } -> { p16; } };
{ { inc; p17; },{ dec; p3; } -> { p17; } };
{ { inc; p18; },{ dec; p3; } -> { p18; } };
{ { inc; p19; },{ dec; p3; } -> { p19; } };
{ { inc; p20; },{ dec; p3; } -> { p20; } };
{ { inc; p21; },{ dec; p3; } -> { p21; } };
{ { inc; p22; },{ dec; p3; } -> { p22; } };
{ { inc; p23; },{ dec; p3; } -> { p23; } };
{ { inc; p24; },{ dec; p3; } -> { p24; } };
{ { inc; p25; },{ dec; p3; } -> { p25; } };
{ { inc; p26; },{ dec; p3; } -> { p26; } };
{ { inc; p27; },{ dec; p3; } -> { p27; } };
{ { inc; p28; },{ dec; p3; } -> { p28; } };
{ { inc; p29; },{ dec; p3; } -> { p29; } };
{ { inc; p30; },{ dec; p3; } -> { p30; } };
{ { inc; p31; },{ dec; p3; } -> { p31; } };
{ { inc; p0; p1; p2; p3; },{ dec; p4; } -> { p4; } };
# the more significant bits remain (inc)
{ { inc; p4; },{ dec; p4; } -> { p4; } };
{ { inc; p5; },{ dec; p4; } -> { p5; } };
{ { inc; p6; },{ dec; p4; } -> { p6; } };
{ { inc; p7; },{ dec; p4; } -> { p7; } };
{ { inc; p8; },{ dec; p4; } -> { p8; } };
{ { inc; p9; },{ dec; p4; } -> { p9; } };
{ { inc; p10; },{ dec; p4; } -> { p10; } };
{ { inc; p11; },{ dec; p4; } -> { p11; } };
{ { inc; p12; },{ dec; p4; } -> { p12; } };
{ { inc; p13; },{ dec; p4; } -> { p13; } };
{ { inc; p14; },{ dec; p4; } -> { p14; } };
{ { inc; p15; },{ dec; p4; } -> { p15; } };
{ { inc; p16; },{ dec; p4; } -> { p16; } };
{ { inc; p17; },{ dec; p4; } -> { p17; } };
{ { inc; p18; },{ dec; p4; } -> { p18; } };
{ { inc; p19; },{ dec; p4; } -> { p19; } };
{ { inc; p20; },{ dec; p4; } -> { p20; } };
{ { inc; p21; },{ dec; p4; } -> { p21; } };
{ { inc; p22; },{ dec; p4; } -> { p22; } };
{ { inc; p23; },{ dec; p4; } -> { p23; } };
{ { inc; p24; },{ dec; p4; } -> { p24; } };
{ { inc; p25; },{ dec; p4; } -> { p25; } };
{ { inc; p26; },{ dec; p4; } -> { p26; } };
{ { inc; p27; },{ dec; p4; } -> { p27; } };
{ { inc; p28; },{ dec; p4; } -> { p28; } };
{ { inc; p29; },{ dec; p4; } -> { p29; } };
{ { inc; p30; },{ dec; p4; } -> { p30; } };
{ { inc; p31; },{ dec; p4; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; },{ dec; p5; } -> { p5; } };
# the more significant bits remain (inc)
{ { inc; p5; },{ dec; p5; } -> { p5; } };
{ { inc; p6; },{ dec; p5; } -> { p6; } };
{ { inc; p7; },{ dec; p5; } -> { p7; } };
{ { inc; p8; },{ dec; p5; } -> { p8; } };
{ { inc; p9; },{ dec; p5; } -> { p9; } };
{ { inc; p10; },{ dec; p5; } -> { p10; } };
{ { inc; p11; },{ dec; p5; } -> { p11; } };
{ { inc; p12; },{ dec; p5; } -> { p12; } };
{ { inc; p13; },{ dec; p5; } -> { p13; } };
{ { inc; p14; },{ dec; p5; } -> { p14; } };
{ { inc; p15; },{ dec; p5; } -> { p15; } };
{ { inc; p16; },{ dec; p5; } -> { p16; } };
{ { inc; p17; },{ dec; p5; } -> { p17; } };
{ { inc; p18; },{ dec; p5; } -> { p18; } };
{ { inc; p19; },{ dec; p5; } -> { p19; } };
{ { inc; p20; },{ dec; p5; } -> { p20; } };
{ { inc; p21; },{ dec; p5; } -> { p21; } };
{ { inc; p22; },{ dec; p5; } -> { p22; } };
{ { inc; p23; },{ dec; p5; } -> { p23; } };
{ { inc; p24; },{ dec; p5; } -> { p24; } };
{ { inc; p25; },{ dec; p5; } -> { p25; } };
{ { inc; p26; },{ dec; p5; } -> { p26; } };
{ { inc; p27; },{ dec; p5; } -> { p27; } };
{ { inc; p28; },{ dec; p5; } -> { p28; } };
{ { inc; p29; },{ dec; p5; } -> { p29; } };
{ { inc; p30; },{ dec; p5; } -> { p30; } };
{ { inc; p31; },{ dec; p5; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; },{ dec; p6; } -> { p6; } };
# the more significant bits remain (inc)
{ { inc; p6; },{ dec; p6; } -> { p6; } };
{ { inc; p7; },{ dec; p6; } -> { p7; } };
{ { inc; p8; },{ dec; p6; } -> { p8; } };
{ { inc; p9; },{ dec; p6; } -> { p9; } };
{ { inc; p10; },{ dec; p6; } -> { p10; } };
{ { inc; p11; },{ dec; p6; } -> { p11; } };
{ { inc; p12; },{ dec; p6; } -> { p12; } };
{ { inc; p13; },{ dec; p6; } -> { p13; } };
{ { inc; p14; },{ dec; p6; } -> { p14; } };
{ { inc; p15; },{ dec; p6; } -> { p15; } };
{ { inc; p16; },{ dec; p6; } -> { p16; } };
{ { inc; p17; },{ dec; p6; } -> { p17; } };
{ { inc; p18; },{ dec; p6; } -> { p18; } };
{ { inc; p19; },{ dec; p6; } -> { p19; } };
{ { inc; p20; },{ dec; p6; } -> { p20; } };
{ { inc; p21; },{ dec; p6; } -> { p21; } };
{ { inc; p22; },{ dec; p6; } -> { p22; } };
{ { inc; p23; },{ dec; p6; } -> { p23; } };
{ { inc; p24; },{ dec; p6; } -> { p24; } };
{ { inc; p25; },{ dec; p6; } -> { p25; } };
{ { inc; p26; },{ dec; p6; } -> { p26; } };
{ { inc; p27; },{ dec; p6; } -> { p27; } };
{ { inc; p28; },{ dec; p6; } -> { p28; } };
{ { inc; p29; },{ dec; p6; } -> { p29; } };
{ { inc; p30; },{ dec; p6; } -> { p30; } };
{ { inc; p31; },{ dec; p6; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; },{ dec; p7; } -> { p7; } };
# the more significant bits remain (inc)
{ { inc; p7; },{ dec; p7; } -> { p7; } };
{ { inc; p8; },{ dec; p7; } -> { p8; } };
{ { inc; p9; },{ dec; p7; } -> { p9; } };
{ { inc; p10; },{ dec; p7; } -> { p10; } };
{ { inc; p11; },{ dec; p7; } -> { p11; } };
{ { inc; p12; },{ dec; p7; } -> { p12; } };
{ { inc; p13; },{ dec; p7; } -> { p13; } };
{ { inc; p14; },{ dec; p7; } -> { p14; } };
{ { inc; p15; },{ dec; p7; } -> { p15; } };
{ { inc; p16; },{ dec; p7; } -> { p16; } };
{ { inc; p17; },{ dec; p7; } -> { p17; } };
{ { inc; p18; },{ dec; p7; } -> { p18; } };
{ { inc; p19; },{ dec; p7; } -> { p19; } };
{ { inc; p20; },{ dec; p7; } -> { p20; } };
{ { inc; p21; },{ dec; p7; } -> { p21; } };
{ { inc; p22; },{ dec; p7; } -> { p22; } };
{ { inc; p23; },{ dec; p7; } -> { p23; } };
{ { inc; p24; },{ dec; p7; } -> { p24; } };
{ { inc; p25; },{ dec; p7; } -> { p25; } };
{ { inc; p26; },{ dec; p7; } -> { p26; } };
{ { inc; p27; },{ dec; p7; } -> { p27; } };
{ { inc; p28; },{ dec; p7; } -> { p28; } };
{ { inc; p29; },{ dec; p7; } -> { p29; } };
{ { inc; p30; },{ dec; p7; } -> { p30; } };
{ { inc; p31; },{ dec; p7; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; },{ dec; p8; } -> { p8; } };
# the more significant bits remain (inc)
{ { inc; p8; },{ dec; p8; } -> { p8; } };
{ { inc; p9; },{ dec; p8; } -> { p9; } };
{ { inc; p10; },{ dec; p8; } -> { p10; } };
{ { inc; p11; },{ dec; p8; } -> { p11; } };
{ { inc; p12; },{ dec; p8; } -> { p12; } };
{ { inc; p13; },{ dec; p8; } -> { p13; } };
{ { inc; p14; },{ dec; p8; } -> { p14; } };
{ { inc; p15; },{ dec; p8; } -> { p15; } };
{ { inc; p16; },{ dec; p8; } -> { p16; } };
{ { inc; p17; },{ dec; p8; } -> { p17; } };
{ { inc; p18; },{ dec; p8; } -> { p18; } };
{ { inc; p19; },{ dec; p8; } -> { p19; } };
{ { inc; p20; },{ dec; p8; } -> { p20; } };
{ { inc; p21; },{ dec; p8; } -> { p21; } };
{ { inc; p22; },{ dec; p8; } -> { p22; } };
{ { inc; p23; },{ dec; p8; } -> { p23; } };
{ { inc; p24; },{ dec; p8; } -> { p24; } };
{ { inc; p25; },{ dec; p8; } -> { p25; } };
{ { inc; p26; },{ dec; p8; } -> { p26; } };
{ { inc; p27; },{ dec; p8; } -> { p27; } };
{ { inc; p28; },{ dec; p8; } -> { p28; } };
{ { inc; p29; },{ dec; p8; } -> { p29; } };
{ { inc; p30; },{ dec; p8; } -> { p30; } };
{ { inc; p31; },{ dec; p8; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; },{ dec; p9; } -> { p9; } };
# the more significant bits remain (inc)
{ { inc; p9; },{ dec; p9; } -> { p9; } };
{ { inc; p10; },{ dec; p9; } -> { p10; } };
{ { inc; p11; },{ dec; p9; } -> { p11; } };
{ { inc; p12; },{ dec; p9; } -> { p12; } };
{ { inc; p13; },{ dec; p9; } -> { p13; } };
{ { inc; p14; },{ dec; p9; } -> { p14; } };
{ { inc; p15; },{ dec; p9; } -> { p15; } };
{ { inc; p16; },{ dec; p9; } -> { p16; } };
{ { inc; p17; },{ dec; p9; } -> { p17; } };
{ { inc; p18; },{ dec; p9; } -> { p18; } };
{ { inc; p19; },{ dec; p9; } -> { p19; } };
{ { inc; p20; },{ dec; p9; } -> { p20; } };
{ { inc; p21; },{ dec; p9; } -> { p21; } };
{ { inc; p22; },{ dec; p9; } -> { p22; } };
{ { inc; p23; },{ dec; p9; } -> { p23; } };
{ { inc; p24; },{ dec; p9; } -> { p24; } };
{ { inc; p25; },{ dec; p9; } -> { p25; } };
{ { inc; p26; },{ dec; p9; } -> { p26; } };
{ { inc; p27; },{ dec; p9; } -> { p27; } };
{ { inc; p28; },{ dec; p9; } -> { p28; } };
{ { inc; p29; },{ dec; p9; } -> { p29; } };
{ { inc; p30; },{ dec; p9; } -> { p30; } };
{ { inc; p31; },{ dec; p9; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; },{ dec; p10; } -> { p10; } };
# the more significant bits remain (inc)
{ { inc; p10; },{ dec; p10; } -> { p10; } };
{ { inc; p11; },{ dec; p10; } -> { p11; } };
{ { inc; p12; },{ dec; p10; } -> { p12; } };
{ { inc; p13; },{ dec; p10; } -> { p13; } };
{ { inc; p14; },{ dec; p10; } -> { p14; } };
{ { inc; p15; },{ dec; p10; } -> { p15; } };
{ { inc; p16; },{ dec; p10; } -> { p16; } };
{ { inc; p17; },{ dec; p10; } -> { p17; } };
{ { inc; p18; },{ dec; p10; } -> { p18; } };
{ { inc; p19; },{ dec; p10; } -> { p19; } };
{ { inc; p20; },{ dec; p10; } -> { p20; } };
{ { inc; p21; },{ dec; p10; } -> { p21; } };
{ { inc; p22; },{ dec; p10; } -> { p22; } };
{ { inc; p23; },{ dec; p10; } -> { p23; } };
{ { inc; p24; },{ dec; p10; } -> { p24; } };
{ { inc; p25; },{ dec; p10; } -> { p25; } };
{ { inc; p26; },{ dec; p10; } -> { p26; } };
{ { inc; p27; },{ dec; p10; } -> { p27; } };
{ { inc; p28; },{ dec; p10; } -> { p28; } };
{ { inc; p29; },{ dec; p10; } -> { p29; } };
{ { inc; p30; },{ dec; p10; } -> { p30; } };
{ { inc; p31; },{ dec; p10; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; },{ dec; p11; } -> { p11; } };
# the more significant bits remain (inc)
{ { inc; p11; },{ dec; p11; } -> { p11; } };
{ { inc; p12; },{ dec; p11; } -> { p12; } };
{ { inc; p13; },{ dec; p11; } -> { p13; } };
{ { inc; p14; },{ dec; p11; } -> { p14; } };
{ { inc; p15; },{ dec; p11; } -> { p15; } };
{ { inc; p16; },{ dec; p11; } -> { p16; } };
{ { inc; p17; },{ dec; p11; } -> { p17; } };
{ { inc; p18; },{ dec; p11; } -> { p18; } };
{ { inc; p19; },{ dec; p11; } -> { p19; } };
{ { inc; p20; },{ dec; p11; } -> { p20; } };
{ { inc; p21; },{ dec; p11; } -> { p21; } };
{ { inc; p22; },{ dec; p11; } -> { p22; } };
{ { inc; p23; },{ dec; p11; } -> { p23; } };
{ { inc; p24; },{ dec; p11; } -> { p24; } };
{ { inc; p25; },{ dec; p11; } -> { p25; } };
{ { inc; p26; },{ dec; p11; } -> { p26; } };
{ { inc; p27; },{ dec; p11; } -> { p27; } };
{ { inc; p28; },{ dec; p11; } -> { p28; } };
{ { inc; p29; },{ dec; p11; } -> { p29; } };
{ { inc; p30; },{ dec; p11; } -> { p30; } };
{ { inc; p31; },{ dec; p11; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; },{ dec; p12; } -> { p12; } };
# the more significant bits remain (inc)
{ { inc; p12; },{ dec; p12; } -> { p12; } };
{ { inc; p13; },{ dec; p12; } -> { p13; } };
{ { inc; p14; },{ dec; p12; } -> { p14; } };
{ { inc; p15; },{ dec; p12; } -> { p15; } };
{ { inc; p16; },{ dec; p12; } -> { p16; } };
{ { inc; p17; },{ dec; p12; } -> { p17; } };
{ { inc; p18; },{ dec; p12; } -> { p18; } };
{ { inc; p19; },{ dec; p12; } -> { p19; } };
{ { inc; p20; },{ dec; p12; } -> { p20; } };
{ { inc; p21; },{ dec; p12; } -> { p21; } };
{ { inc; p22; },{ dec; p12; } -> { p22; } };
{ { inc; p23; },{ dec; p12; } -> { p23; } };
{ { inc; p24; },{ dec; p12; } -> { p24; } };
{ { inc; p25; },{ dec; p12; } -> { p25; } };
{ { inc; p26; },{ dec; p12; } -> { p26; } };
{ { inc; p27; },{ dec; p12; } -> { p27; } };
{ { inc; p28; },{ dec; p12; } -> { p28; } };
{ { inc; p29; },{ dec; p12; } -> { p29; } };
{ { inc; p30; },{ dec; p12; } -> { p30; } };
{ { inc; p31; },{ dec; p12; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; },{ dec; p13; } -> { p13; } };
# the more significant bits remain (inc)
{ { inc; p13; },{ dec; p13; } -> { p13; } };
{ { inc; p14; },{ dec; p13; } -> { p14; } };
{ { inc; p15; },{ dec; p13; } -> { p15; } };
{ { inc; p16; },{ dec; p13; } -> { p16; } };
{ { inc; p17; },{ dec; p13; } -> { p17; } };
{ { inc; p18; },{ dec; p13; } -> { p18; } };
{ { inc; p19; },{ dec; p13; } -> { p19; } };
{ { inc; p20; },{ dec; p13; } -> { p20; } };
{ { inc; p21; },{ dec; p13; } -> { p21; } };
{ { inc; p22; },{ dec; p13; } -> { p22; } };
{ { inc; p23; },{ dec; p13; } -> { p23; } };
{ { inc; p24; },{ dec; p13; } -> { p24; } };
{ { inc; p25; },{ dec; p13; } -> { p25; } };
{ { inc; p26; },{ dec; p13; } -> { p26; } };
{ { inc; p27; },{ dec; p13; } -> { p27; } };
{ { inc; p28; },{ dec; p13; } -> { p28; } };
{ { inc; p29; },{ dec; p13; } -> { p29; } };
{ { inc; p30; },{ dec; p13; } -> { p30; } };
{ { inc; p31; },{ dec; p13; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; },{ dec; p14; } -> { p14; } };
# the more significant bits remain (inc)
{ { inc; p14; },{ dec; p14; } -> { p14; } };
{ { inc; p15; },{ dec; p14; } -> { p15; } };
{ { inc; p16; },{ dec; p14; } -> { p16; } };
{ { inc; p17; },{ dec; p14; } -> { p17; } };
{ { inc; p18; },{ dec; p14; } -> { p18; } };
{ { inc; p19; },{ dec; p14; } -> { p19; } };
{ { inc; p20; },{ dec; p14; } -> { p20; } };
{ { inc; p21; },{ dec; p14; } -> { p21; } };
{ { inc; p22; },{ dec; p14; } -> { p22; } };
{ { inc; p23; },{ dec; p14; } -> { p23; } };
{ { inc; p24; },{ dec; p14; } -> { p24; } };
{ { inc; p25; },{ dec; p14; } -> { p25; } };
{ { inc; p26; },{ dec; p14; } -> { p26; } };
{ { inc; p27; },{ dec; p14; } -> { p27; } };
{ { inc; p28; },{ dec; p14; } -> { p28; } };
{ { inc; p29; },{ dec; p14; } -> { p29; } };
{ { inc; p30; },{ dec; p14; } -> { p30; } };
{ { inc; p31; },{ dec; p14; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; },{ dec; p15; } -> { p15; } };
# the more significant bits remain (inc)
{ { inc; p15; },{ dec; p15; } -> { p15; } };
{ { inc; p16; },{ dec; p15; } -> { p16; } };
{ { inc; p17; },{ dec; p15; } -> { p17; } };
{ { inc; p18; },{ dec; p15; } -> { p18; } };
{ { inc; p19; },{ dec; p15; } -> { p19; } };
{ { inc; p20; },{ dec; p15; } -> { p20; } };
{ { inc; p21; },{ dec; p15; } -> { p21; } };
{ { inc; p22; },{ dec; p15; } -> { p22; } };
{ { inc; p23; },{ dec; p15; } -> { p23; } };
{ { inc; p24; },{ dec; p15; } -> { p24; } };
{ { inc; p25; },{ dec; p15; } -> { p25; } };
{ { inc; p26; },{ dec; p15; } -> { p26; } };
{ { inc; p27; },{ dec; p15; } -> { p27; } };
{ { inc; p28; },{ dec; p15; } -> { p28; } };
{ { inc; p29; },{ dec; p15; } -> { p29; } };
{ { inc; p30; },{ dec; p15; } -> { p30; } };
{ { inc; p31; },{ dec; p15; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; },{ dec; p16; } -> { p16; } };
# the more significant bits remain (inc)
{ { inc; p16; },{ dec; p16; } -> { p16; } };
{ { inc; p17; },{ dec; p16; } -> { p17; } };
{ { inc; p18; },{ dec; p16; } -> { p18; } };
{ { inc; p19; },{ dec; p16; } -> { p19; } };
{ { inc; p20; },{ dec; p16; } -> { p20; } };
{ { inc; p21; },{ dec; p16; } -> { p21; } };
{ { inc; p22; },{ dec; p16; } -> { p22; } };
{ { inc; p23; },{ dec; p16; } -> { p23; } };
{ { inc; p24; },{ dec; p16; } -> { p24; } };
{ { inc; p25; },{ dec; p16; } -> { p25; } };
{ { inc; p26; },{ dec; p16; } -> { p26; } };
{ { inc; p27; },{ dec; p16; } -> { p27; } };
{ { inc; p28; },{ dec; p16; } -> { p28; } };
{ { inc; p29; },{ dec; p16; } -> { p29; } };
{ { inc; p30; },{ dec; p16; } -> { p30; } };
{ { inc; p31; },{ dec; p16; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; },{ dec; p17; } -> { p17; } };
# the more significant bits remain (inc)
{ { inc; p17; },{ dec; p17; } -> { p17; } };
{ { inc; p18; },{ dec; p17; } -> { p18; } };
{ { inc; p19; },{ dec; p17; } -> { p19; } };
{ { inc; p20; },{ dec; p17; } -> { p20; } };
{ { inc; p21; },{ dec; p17; } -> { p21; } };
{ { inc; p22; },{ dec; p17; } -> { p22; } };
{ { inc; p23; },{ dec; p17; } -> { p23; } };
{ { inc; p24; },{ dec; p17; } -> { p24; } };
{ { inc; p25; },{ dec; p17; } -> { p25; } };
{ { inc; p26; },{ dec; p17; } -> { p26; } };
{ { inc; p27; },{ dec; p17; } -> { p27; } };
{ { inc; p28; },{ dec; p17; } -> { p28; } };
{ { inc; p29; },{ dec; p17; } -> { p29; } };
{ { inc; p30; },{ dec; p17; } -> { p30; } };
{ { inc; p31; },{ dec; p17; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; },{ dec; p18; } -> { p18; } };
# the more significant bits remain (inc)
{ { inc; p18; },{ dec; p18; } -> { p18; } };
{ { inc; p19; },{ dec; p18; } -> { p19; } };
{ { inc; p20; },{ dec; p18; } -> { p20; } };
{ { inc; p21; },{ dec; p18; } -> { p21; } };
{ { inc; p22; },{ dec; p18; } -> { p22; } };
{ { inc; p23; },{ dec; p18; } -> { p23; } };
{ { inc; p24; },{ dec; p18; } -> { p24; } };
{ { inc; p25; },{ dec; p18; } -> { p25; } };
{ { inc; p26; },{ dec; p18; } -> { p26; } };
{ { inc; p27; },{ dec; p18; } -> { p27; } };
{ { inc; p28; },{ dec; p18; } -> { p28; } };
{ { inc; p29; },{ dec; p18; } -> { p29; } };
{ { inc; p30; },{ dec; p18; } -> { p30; } };
{ { inc; p31; },{ dec; p18; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; },{ dec; p19; } -> { p19; } };
# the more significant bits remain (inc)
{ { inc; p19; },{ dec; p19; } -> { p19; } };
{ { inc; p20; },{ dec; p19; } -> { p20; } };
{ { inc; p21; },{ dec; p19; } -> { p21; } };
{ { inc; p22; },{ dec; p19; } -> { p22; } };
{ { inc; p23; },{ dec; p19; } -> { p23; } };
{ { inc; p24; },{ dec; p19; } -> { p24; } };
{ { inc; p25; },{ dec; p19; } -> { p25; } };
{ { inc; p26; },{ dec; p19; } -> { p26; } };
{ { inc; p27; },{ dec; p19; } -> { p27; } };
{ { inc; p28; },{ dec; p19; } -> { p28; } };
{ { inc; p29; },{ dec; p19; } -> { p29; } };
{ { inc; p30; },{ dec; p19; } -> { p30; } };
{ { inc; p31; },{ dec; p19; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; },{ dec; p20; } -> { p20; } };
# the more significant bits remain (inc)
{ { inc; p20; },{ dec; p20; } -> { p20; } };
{ { inc; p21; },{ dec; p20; } -> { p21; } };
{ { inc; p22; },{ dec; p20; } -> { p22; } };
{ { inc; p23; },{ dec; p20; } -> { p23; } };
{ { inc; p24; },{ dec; p20; } -> { p24; } };
{ { inc; p25; },{ dec; p20; } -> { p25; } };
{ { inc; p26; },{ dec; p20; } -> { p26; } };
{ { inc; p27; },{ dec; p20; } -> { p27; } };
{ { inc; p28; },{ dec; p20; } -> { p28; } };
{ { inc; p29; },{ dec; p20; } -> { p29; } };
{ { inc; p30; },{ dec; p20; } -> { p30; } };
{ { inc; p31; },{ dec; p20; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; },{ dec; p21; } -> { p21; } };
# the more significant bits remain (inc)
{ { inc; p21; },{ dec; p21; } -> { p21; } };
{ { inc; p22; },{ dec; p21; } -> { p22; } };
{ { inc; p23; },{ dec; p21; } -> { p23; } };
{ { inc; p24; },{ dec; p21; } -> { p24; } };
{ { inc; p25; },{ dec; p21; } -> { p25; } };
{ { inc; p26; },{ dec; p21; } -> { p26; } };
{ { inc; p27; },{ dec; p21; } -> { p27; } };
{ { inc; p28; },{ dec; p21; } -> { p28; } };
{ { inc; p29; },{ dec; p21; } -> { p29; } };
{ { inc; p30; },{ dec; p21; } -> { p30; } };
{ { inc; p31; },{ dec; p21; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; },{ dec; p22; } -> { p22; } };
# the more significant bits remain (inc)
{ { inc; p22; },{ dec; p22; } -> { p22; } };
{ { inc; p23; },{ dec; p22; } -> { p23; } };
{ { inc; p24; },{ dec; p22; } -> { p24; } };
{ { inc; p25; },{ dec; p22; } -> { p25; } };
{ { inc; p26; },{ dec; p22; } -> { p26; } };
{ { inc; p27; },{ dec; p22; } -> { p27; } };
{ { inc; p28; },{ dec; p22; } -> { p28; } };
{ { inc; p29; },{ dec; p22; } -> { p29; } };
{ { inc; p30; },{ dec; p22; } -> { p30; } };
{ { inc; p31; },{ dec; p22; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; },{ dec; p23; } -> { p23; } };
# the more significant bits remain (inc)
{ { inc; p23; },{ dec; p23; } -> { p23; } };
{ { inc; p24; },{ dec; p23; } -> { p24; } };
{ { inc; p25; },{ dec; p23; } -> { p25; } };
{ { inc; p26; },{ dec; p23; } -> { p26; } };
{ { inc; p27; },{ dec; p23; } -> { p27; } };
{ { inc; p28; },{ dec; p23; } -> { p28; } };
{ { inc; p29; },{ dec; p23; } -> { p29; } };
{ { inc; p30; },{ dec; p23; } -> { p30; } };
{ { inc; p31; },{ dec; p23; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; },{ dec; p24; } -> { p24; } };
# the more significant bits remain (inc)
{ { inc; p24; },{ dec; p24; } -> { p24; } };
{ { inc; p25; },{ dec; p24; } -> { p25; } };
{ { inc; p26; },{ dec; p24; } -> { p26; } };
{ { inc; p27; },{ dec; p24; } -> { p27; } };
{ { inc; p28; },{ dec; p24; } -> { p28; } };
{ { inc; p29; },{ dec; p24; } -> { p29; } };
{ { inc; p30; },{ dec; p24; } -> { p30; } };
{ { inc; p31; },{ dec; p24; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; },{ dec; p25; } -> { p25; } };
# the more significant bits remain (inc)
{ { inc; p25; },{ dec; p25; } -> { p25; } };
{ { inc; p26; },{ dec; p25; } -> { p26; } };
{ { inc; p27; },{ dec; p25; } -> { p27; } };
{ { inc; p28; },{ dec; p25; } -> { p28; } };
{ { inc; p29; },{ dec; p25; } -> { p29; } };
{ { inc; p30; },{ dec; p25; } -> { p30; } };
{ { inc; p31; },{ dec; p25; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; },{ dec; p26; } -> { p26; } };
# the more significant bits remain (inc)
{ { inc; p26; },{ dec; p26; } -> { p26; } };
{ { inc; p27; },{ dec; p26; } -> { p27; } };
{ { inc; p28; },{ dec; p26; } -> { p28; } };
{ { inc; p29; },{ dec; p26; } -> { p29; } };
{ { inc; p30; },{ dec; p26; } -> { p30; } };
{ { inc; p31; },{ dec; p26; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; },{ dec; p27; } -> { p27; } };
# the more significant bits remain (inc)
{ { inc; p27; },{ dec; p27; } -> { p27; } };
{ { inc; p28; },{ dec; p27; } -> { p28; } };
{ { inc; p29; },{ dec; p27; } -> { p29; } };
{ { inc; p30; },{ dec; p27; } -> { p30; } };
{ { inc; p31; },{ dec; p27; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; },{ dec; p28; } -> { p28; } };
# the more significant bits remain (inc)
{ { inc; p28; },{ dec; p28; } -> { p28; } };
{ { inc; p29; },{ dec; p28; } -> { p29; } };
{ { inc; p30; },{ dec; p28; } -> { p30; } };
{ { inc; p31; },{ dec; p28; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; p28; },{ dec; p29; } -> { p29; } };
# the more significant bits remain (inc)
{ { inc; p29; },{ dec; p29; } -> { p29; } };
{ { inc; p30; },{ dec; p29; } -> { p30; } };
{ { inc; p31; },{ dec; p29; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; p28; p29; },{ dec; p30; } -> { p30; } };
# the more significant bits remain (inc)
{ { inc; p30; },{ dec; p30; } -> { p30; } };
{ { inc; p31; },{ dec; p30; } -> { p31; } };
{ { inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; p28; p29; p30; },{ dec; p31; } -> { p31; } };
# the more significant bits remain (inc)
{ { inc; p31; },{ dec; p31; } -> { p31; } };
# (3) decrement operation
{ { dec; },{ inc; } -> { p0; } };
# the more significant bits remain (dec)
{ { dec; p0; p0; },{ inc; } -> { p0; } };
{ { dec; p0; p1; },{ inc; } -> { p1; } };
{ { dec; p0; p2; },{ inc; } -> { p2; } };
{ { dec; p0; p3; },{ inc; } -> { p3; } };
{ { dec; p0; p4; },{ inc; } -> { p4; } };
{ { dec; p0; p5; },{ inc; } -> { p5; } };
{ { dec; p0; p6; },{ inc; } -> { p6; } };
{ { dec; p0; p7; },{ inc; } -> { p7; } };
{ { dec; p0; p8; },{ inc; } -> { p8; } };
{ { dec; p0; p9; },{ inc; } -> { p9; } };
{ { dec; p0; p10; },{ inc; } -> { p10; } };
{ { dec; p0; p11; },{ inc; } -> { p11; } };
{ { dec; p0; p12; },{ inc; } -> { p12; } };
{ { dec; p0; p13; },{ inc; } -> { p13; } };
{ { dec; p0; p14; },{ inc; } -> { p14; } };
{ { dec; p0; p15; },{ inc; } -> { p15; } };
{ { dec; p0; p16; },{ inc; } -> { p16; } };
{ { dec; p0; p17; },{ inc; } -> { p17; } };
{ { dec; p0; p18; },{ inc; } -> { p18; } };
{ { dec; p0; p19; },{ inc; } -> { p19; } };
{ { dec; p0; p20; },{ inc; } -> { p20; } };
{ { dec; p0; p21; },{ inc; } -> { p21; } };
{ { dec; p0; p22; },{ inc; } -> { p22; } };
{ { dec; p0; p23; },{ inc; } -> { p23; } };
{ { dec; p0; p24; },{ inc; } -> { p24; } };
{ { dec; p0; p25; },{ inc; } -> { p25; } };
{ { dec; p0; p26; },{ inc; } -> { p26; } };
{ { dec; p0; p27; },{ inc; } -> { p27; } };
{ { dec; p0; p28; },{ inc; } -> { p28; } };
{ { dec; p0; p29; },{ inc; } -> { p29; } };
{ { dec; p0; p30; },{ inc; } -> { p30; } };
{ { dec; p0; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; } -> { p1; } };
# the more significant bits remain (dec)
{ { dec; p1; p1; },{ inc; } -> { p1; } };
{ { dec; p1; p2; },{ inc; } -> { p2; } };
{ { dec; p1; p3; },{ inc; } -> { p3; } };
{ { dec; p1; p4; },{ inc; } -> { p4; } };
{ { dec; p1; p5; },{ inc; } -> { p5; } };
{ { dec; p1; p6; },{ inc; } -> { p6; } };
{ { dec; p1; p7; },{ inc; } -> { p7; } };
{ { dec; p1; p8; },{ inc; } -> { p8; } };
{ { dec; p1; p9; },{ inc; } -> { p9; } };
{ { dec; p1; p10; },{ inc; } -> { p10; } };
{ { dec; p1; p11; },{ inc; } -> { p11; } };
{ { dec; p1; p12; },{ inc; } -> { p12; } };
{ { dec; p1; p13; },{ inc; } -> { p13; } };
{ { dec; p1; p14; },{ inc; } -> { p14; } };
{ { dec; p1; p15; },{ inc; } -> { p15; } };
{ { dec; p1; p16; },{ inc; } -> { p16; } };
{ { dec; p1; p17; },{ inc; } -> { p17; } };
{ { dec; p1; p18; },{ inc; } -> { p18; } };
{ { dec; p1; p19; },{ inc; } -> { p19; } };
{ { dec; p1; p20; },{ inc; } -> { p20; } };
{ { dec; p1; p21; },{ inc; } -> { p21; } };
{ { dec; p1; p22; },{ inc; } -> { p22; } };
{ { dec; p1; p23; },{ inc; } -> { p23; } };
{ { dec; p1; p24; },{ inc; } -> { p24; } };
{ { dec; p1; p25; },{ inc; } -> { p25; } };
{ { dec; p1; p26; },{ inc; } -> { p26; } };
{ { dec; p1; p27; },{ inc; } -> { p27; } };
{ { dec; p1; p28; },{ inc; } -> { p28; } };
{ { dec; p1; p29; },{ inc; } -> { p29; } };
{ { dec; p1; p30; },{ inc; } -> { p30; } };
{ { dec; p1; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; } -> { p2; } };
# the more significant bits remain (dec)
{ { dec; p2; p2; },{ inc; } -> { p2; } };
{ { dec; p2; p3; },{ inc; } -> { p3; } };
{ { dec; p2; p4; },{ inc; } -> { p4; } };
{ { dec; p2; p5; },{ inc; } -> { p5; } };
{ { dec; p2; p6; },{ inc; } -> { p6; } };
{ { dec; p2; p7; },{ inc; } -> { p7; } };
{ { dec; p2; p8; },{ inc; } -> { p8; } };
{ { dec; p2; p9; },{ inc; } -> { p9; } };
{ { dec; p2; p10; },{ inc; } -> { p10; } };
{ { dec; p2; p11; },{ inc; } -> { p11; } };
{ { dec; p2; p12; },{ inc; } -> { p12; } };
{ { dec; p2; p13; },{ inc; } -> { p13; } };
{ { dec; p2; p14; },{ inc; } -> { p14; } };
{ { dec; p2; p15; },{ inc; } -> { p15; } };
{ { dec; p2; p16; },{ inc; } -> { p16; } };
{ { dec; p2; p17; },{ inc; } -> { p17; } };
{ { dec; p2; p18; },{ inc; } -> { p18; } };
{ { dec; p2; p19; },{ inc; } -> { p19; } };
{ { dec; p2; p20; },{ inc; } -> { p20; } };
{ { dec; p2; p21; },{ inc; } -> { p21; } };
{ { dec; p2; p22; },{ inc; } -> { p22; } };
{ { dec; p2; p23; },{ inc; } -> { p23; } };
{ { dec; p2; p24; },{ inc; } -> { p24; } };
{ { dec; p2; p25; },{ inc; } -> { p25; } };
{ { dec; p2; p26; },{ inc; } -> { p26; } };
{ { dec; p2; p27; },{ inc; } -> { p27; } };
{ { dec; p2; p28; },{ inc; } -> { p28; } };
{ { dec; p2; p29; },{ inc; } -> { p29; } };
{ { dec; p2; p30; },{ inc; } -> { p30; } };
{ { dec; p2; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; } -> { p3; } };
# the more significant bits remain (dec)
{ { dec; p3; p3; },{ inc; } -> { p3; } };
{ { dec; p3; p4; },{ inc; } -> { p4; } };
{ { dec; p3; p5; },{ inc; } -> { p5; } };
{ { dec; p3; p6; },{ inc; } -> { p6; } };
{ { dec; p3; p7; },{ inc; } -> { p7; } };
{ { dec; p3; p8; },{ inc; } -> { p8; } };
{ { dec; p3; p9; },{ inc; } -> { p9; } };
{ { dec; p3; p10; },{ inc; } -> { p10; } };
{ { dec; p3; p11; },{ inc; } -> { p11; } };
{ { dec; p3; p12; },{ inc; } -> { p12; } };
{ { dec; p3; p13; },{ inc; } -> { p13; } };
{ { dec; p3; p14; },{ inc; } -> { p14; } };
{ { dec; p3; p15; },{ inc; } -> { p15; } };
{ { dec; p3; p16; },{ inc; } -> { p16; } };
{ { dec; p3; p17; },{ inc; } -> { p17; } };
{ { dec; p3; p18; },{ inc; } -> { p18; } };
{ { dec; p3; p19; },{ inc; } -> { p19; } };
{ { dec; p3; p20; },{ inc; } -> { p20; } };
{ { dec; p3; p21; },{ inc; } -> { p21; } };
{ { dec; p3; p22; },{ inc; } -> { p22; } };
{ { dec; p3; p23; },{ inc; } -> { p23; } };
{ { dec; p3; p24; },{ inc; } -> { p24; } };
{ { dec; p3; p25; },{ inc; } -> { p25; } };
{ { dec; p3; p26; },{ inc; } -> { p26; } };
{ { dec; p3; p27; },{ inc; } -> { p27; } };
{ { dec; p3; p28; },{ inc; } -> { p28; } };
{ { dec; p3; p29; },{ inc; } -> { p29; } };
{ { dec; p3; p30; },{ inc; } -> { p30; } };
{ { dec; p3; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; } -> { p4; } };
# the more significant bits remain (dec)
{ { dec; p4; p4; },{ inc; } -> { p4; } };
{ { dec; p4; p5; },{ inc; } -> { p5; } };
{ { dec; p4; p6; },{ inc; } -> { p6; } };
{ { dec; p4; p7; },{ inc; } -> { p7; } };
{ { dec; p4; p8; },{ inc; } -> { p8; } };
{ { dec; p4; p9; },{ inc; } -> { p9; } };
{ { dec; p4; p10; },{ inc; } -> { p10; } };
{ { dec; p4; p11; },{ inc; } -> { p11; } };
{ { dec; p4; p12; },{ inc; } -> { p12; } };
{ { dec; p4; p13; },{ inc; } -> { p13; } };
{ { dec; p4; p14; },{ inc; } -> { p14; } };
{ { dec; p4; p15; },{ inc; } -> { p15; } };
{ { dec; p4; p16; },{ inc; } -> { p16; } };
{ { dec; p4; p17; },{ inc; } -> { p17; } };
{ { dec; p4; p18; },{ inc; } -> { p18; } };
{ { dec; p4; p19; },{ inc; } -> { p19; } };
{ { dec; p4; p20; },{ inc; } -> { p20; } };
{ { dec; p4; p21; },{ inc; } -> { p21; } };
{ { dec; p4; p22; },{ inc; } -> { p22; } };
{ { dec; p4; p23; },{ inc; } -> { p23; } };
{ { dec; p4; p24; },{ inc; } -> { p24; } };
{ { dec; p4; p25; },{ inc; } -> { p25; } };
{ { dec; p4; p26; },{ inc; } -> { p26; } };
{ { dec; p4; p27; },{ inc; } -> { p27; } };
{ { dec; p4; p28; },{ inc; } -> { p28; } };
{ { dec; p4; p29; },{ inc; } -> { p29; } };
{ { dec; p4; p30; },{ inc; } -> { p30; } };
{ { dec; p4; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; } -> { p5; } };
# the more significant bits remain (dec)
{ { dec; p5; p5; },{ inc; } -> { p5; } };
{ { dec; p5; p6; },{ inc; } -> { p6; } };
{ { dec; p5; p7; },{ inc; } -> { p7; } };
{ { dec; p5; p8; },{ inc; } -> { p8; } };
{ { dec; p5; p9; },{ inc; } -> { p9; } };
{ { dec; p5; p10; },{ inc; } -> { p10; } };
{ { dec; p5; p11; },{ inc; } -> { p11; } };
{ { dec; p5; p12; },{ inc; } -> { p12; } };
{ { dec; p5; p13; },{ inc; } -> { p13; } };
{ { dec; p5; p14; },{ inc; } -> { p14; } };
{ { dec; p5; p15; },{ inc; } -> { p15; } };
{ { dec; p5; p16; },{ inc; } -> { p16; } };
{ { dec; p5; p17; },{ inc; } -> { p17; } };
{ { dec; p5; p18; },{ inc; } -> { p18; } };
{ { dec; p5; p19; },{ inc; } -> { p19; } };
{ { dec; p5; p20; },{ inc; } -> { p20; } };
{ { dec; p5; p21; },{ inc; } -> { p21; } };
{ { dec; p5; p22; },{ inc; } -> { p22; } };
{ { dec; p5; p23; },{ inc; } -> { p23; } };
{ { dec; p5; p24; },{ inc; } -> { p24; } };
{ { dec; p5; p25; },{ inc; } -> { p25; } };
{ { dec; p5; p26; },{ inc; } -> { p26; } };
{ { dec; p5; p27; },{ inc; } -> { p27; } };
{ { dec; p5; p28; },{ inc; } -> { p28; } };
{ { dec; p5; p29; },{ inc; } -> { p29; } };
{ { dec; p5; p30; },{ inc; } -> { p30; } };
{ { dec; p5; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; } -> { p6; } };
# the more significant bits remain (dec)
{ { dec; p6; p6; },{ inc; } -> { p6; } };
{ { dec; p6; p7; },{ inc; } -> { p7; } };
{ { dec; p6; p8; },{ inc; } -> { p8; } };
{ { dec; p6; p9; },{ inc; } -> { p9; } };
{ { dec; p6; p10; },{ inc; } -> { p10; } };
{ { dec; p6; p11; },{ inc; } -> { p11; } };
{ { dec; p6; p12; },{ inc; } -> { p12; } };
{ { dec; p6; p13; },{ inc; } -> { p13; } };
{ { dec; p6; p14; },{ inc; } -> { p14; } };
{ { dec; p6; p15; },{ inc; } -> { p15; } };
{ { dec; p6; p16; },{ inc; } -> { p16; } };
{ { dec; p6; p17; },{ inc; } -> { p17; } };
{ { dec; p6; p18; },{ inc; } -> { p18; } };
{ { dec; p6; p19; },{ inc; } -> { p19; } };
{ { dec; p6; p20; },{ inc; } -> { p20; } };
{ { dec; p6; p21; },{ inc; } -> { p21; } };
{ { dec; p6; p22; },{ inc; } -> { p22; } };
{ { dec; p6; p23; },{ inc; } -> { p23; } };
{ { dec; p6; p24; },{ inc; } -> { p24; } };
{ { dec; p6; p25; },{ inc; } -> { p25; } };
{ { dec; p6; p26; },{ inc; } -> { p26; } };
{ { dec; p6; p27; },{ inc; } -> { p27; } };
{ { dec; p6; p28; },{ inc; } -> { p28; } };
{ { dec; p6; p29; },{ inc; } -> { p29; } };
{ { dec; p6; p30; },{ inc; } -> { p30; } };
{ { dec; p6; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; } -> { p7; } };
# the more significant bits remain (dec)
{ { dec; p7; p7; },{ inc; } -> { p7; } };
{ { dec; p7; p8; },{ inc; } -> { p8; } };
{ { dec; p7; p9; },{ inc; } -> { p9; } };
{ { dec; p7; p10; },{ inc; } -> { p10; } };
{ { dec; p7; p11; },{ inc; } -> { p11; } };
{ { dec; p7; p12; },{ inc; } -> { p12; } };
{ { dec; p7; p13; },{ inc; } -> { p13; } };
{ { dec; p7; p14; },{ inc; } -> { p14; } };
{ { dec; p7; p15; },{ inc; } -> { p15; } };
{ { dec; p7; p16; },{ inc; } -> { p16; } };
{ { dec; p7; p17; },{ inc; } -> { p17; } };
{ { dec; p7; p18; },{ inc; } -> { p18; } };
{ { dec; p7; p19; },{ inc; } -> { p19; } };
{ { dec; p7; p20; },{ inc; } -> { p20; } };
{ { dec; p7; p21; },{ inc; } -> { p21; } };
{ { dec; p7; p22; },{ inc; } -> { p22; } };
{ { dec; p7; p23; },{ inc; } -> { p23; } };
{ { dec; p7; p24; },{ inc; } -> { p24; } };
{ { dec; p7; p25; },{ inc; } -> { p25; } };
{ { dec; p7; p26; },{ inc; } -> { p26; } };
{ { dec; p7; p27; },{ inc; } -> { p27; } };
{ { dec; p7; p28; },{ inc; } -> { p28; } };
{ { dec; p7; p29; },{ inc; } -> { p29; } };
{ { dec; p7; p30; },{ inc; } -> { p30; } };
{ { dec; p7; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; } -> { p8; } };
# the more significant bits remain (dec)
{ { dec; p8; p8; },{ inc; } -> { p8; } };
{ { dec; p8; p9; },{ inc; } -> { p9; } };
{ { dec; p8; p10; },{ inc; } -> { p10; } };
{ { dec; p8; p11; },{ inc; } -> { p11; } };
{ { dec; p8; p12; },{ inc; } -> { p12; } };
{ { dec; p8; p13; },{ inc; } -> { p13; } };
{ { dec; p8; p14; },{ inc; } -> { p14; } };
{ { dec; p8; p15; },{ inc; } -> { p15; } };
{ { dec; p8; p16; },{ inc; } -> { p16; } };
{ { dec; p8; p17; },{ inc; } -> { p17; } };
{ { dec; p8; p18; },{ inc; } -> { p18; } };
{ { dec; p8; p19; },{ inc; } -> { p19; } };
{ { dec; p8; p20; },{ inc; } -> { p20; } };
{ { dec; p8; p21; },{ inc; } -> { p21; } };
{ { dec; p8; p22; },{ inc; } -> { p22; } };
{ { dec; p8; p23; },{ inc; } -> { p23; } };
{ { dec; p8; p24; },{ inc; } -> { p24; } };
{ { dec; p8; p25; },{ inc; } -> { p25; } };
{ { dec; p8; p26; },{ inc; } -> { p26; } };
{ { dec; p8; p27; },{ inc; } -> { p27; } };
{ { dec; p8; p28; },{ inc; } -> { p28; } };
{ { dec; p8; p29; },{ inc; } -> { p29; } };
{ { dec; p8; p30; },{ inc; } -> { p30; } };
{ { dec; p8; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; } -> { p9; } };
# the more significant bits remain (dec)
{ { dec; p9; p9; },{ inc; } -> { p9; } };
{ { dec; p9; p10; },{ inc; } -> { p10; } };
{ { dec; p9; p11; },{ inc; } -> { p11; } };
{ { dec; p9; p12; },{ inc; } -> { p12; } };
{ { dec; p9; p13; },{ inc; } -> { p13; } };
{ { dec; p9; p14; },{ inc; } -> { p14; } };
{ { dec; p9; p15; },{ inc; } -> { p15; } };
{ { dec; p9; p16; },{ inc; } -> { p16; } };
{ { dec; p9; p17; },{ inc; } -> { p17; } };
{ { dec; p9; p18; },{ inc; } -> { p18; } };
{ { dec; p9; p19; },{ inc; } -> { p19; } };
{ { dec; p9; p20; },{ inc; } -> { p20; } };
{ { dec; p9; p21; },{ inc; } -> { p21; } };
{ { dec; p9; p22; },{ inc; } -> { p22; } };
{ { dec; p9; p23; },{ inc; } -> { p23; } };
{ { dec; p9; p24; },{ inc; } -> { p24; } };
{ { dec; p9; p25; },{ inc; } -> { p25; } };
{ { dec; p9; p26; },{ inc; } -> { p26; } };
{ { dec; p9; p27; },{ inc; } -> { p27; } };
{ { dec; p9; p28; },{ inc; } -> { p28; } };
{ { dec; p9; p29; },{ inc; } -> { p29; } };
{ { dec; p9; p30; },{ inc; } -> { p30; } };
{ { dec; p9; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; } -> { p10; } };
# the more significant bits remain (dec)
{ { dec; p10; p10; },{ inc; } -> { p10; } };
{ { dec; p10; p11; },{ inc; } -> { p11; } };
{ { dec; p10; p12; },{ inc; } -> { p12; } };
{ { dec; p10; p13; },{ inc; } -> { p13; } };
{ { dec; p10; p14; },{ inc; } -> { p14; } };
{ { dec; p10; p15; },{ inc; } -> { p15; } };
{ { dec; p10; p16; },{ inc; } -> { p16; } };
{ { dec; p10; p17; },{ inc; } -> { p17; } };
{ { dec; p10; p18; },{ inc; } -> { p18; } };
{ { dec; p10; p19; },{ inc; } -> { p19; } };
{ { dec; p10; p20; },{ inc; } -> { p20; } };
{ { dec; p10; p21; },{ inc; } -> { p21; } };
{ { dec; p10; p22; },{ inc; } -> { p22; } };
{ { dec; p10; p23; },{ inc; } -> { p23; } };
{ { dec; p10; p24; },{ inc; } -> { p24; } };
{ { dec; p10; p25; },{ inc; } -> { p25; } };
{ { dec; p10; p26; },{ inc; } -> { p26; } };
{ { dec; p10; p27; },{ inc; } -> { p27; } };
{ { dec; p10; p28; },{ inc; } -> { p28; } };
{ { dec; p10; p29; },{ inc; } -> { p29; } };
{ { dec; p10; p30; },{ inc; } -> { p30; } };
{ { dec; p10; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; } -> { p11; } };
# the more significant bits remain (dec)
{ { dec; p11; p11; },{ inc; } -> { p11; } };
{ { dec; p11; p12; },{ inc; } -> { p12; } };
{ { dec; p11; p13; },{ inc; } -> { p13; } };
{ { dec; p11; p14; },{ inc; } -> { p14; } };
{ { dec; p11; p15; },{ inc; } -> { p15; } };
{ { dec; p11; p16; },{ inc; } -> { p16; } };
{ { dec; p11; p17; },{ inc; } -> { p17; } };
{ { dec; p11; p18; },{ inc; } -> { p18; } };
{ { dec; p11; p19; },{ inc; } -> { p19; } };
{ { dec; p11; p20; },{ inc; } -> { p20; } };
{ { dec; p11; p21; },{ inc; } -> { p21; } };
{ { dec; p11; p22; },{ inc; } -> { p22; } };
{ { dec; p11; p23; },{ inc; } -> { p23; } };
{ { dec; p11; p24; },{ inc; } -> { p24; } };
{ { dec; p11; p25; },{ inc; } -> { p25; } };
{ { dec; p11; p26; },{ inc; } -> { p26; } };
{ { dec; p11; p27; },{ inc; } -> { p27; } };
{ { dec; p11; p28; },{ inc; } -> { p28; } };
{ { dec; p11; p29; },{ inc; } -> { p29; } };
{ { dec; p11; p30; },{ inc; } -> { p30; } };
{ { dec; p11; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; } -> { p12; } };
# the more significant bits remain (dec)
{ { dec; p12; p12; },{ inc; } -> { p12; } };
{ { dec; p12; p13; },{ inc; } -> { p13; } };
{ { dec; p12; p14; },{ inc; } -> { p14; } };
{ { dec; p12; p15; },{ inc; } -> { p15; } };
{ { dec; p12; p16; },{ inc; } -> { p16; } };
{ { dec; p12; p17; },{ inc; } -> { p17; } };
{ { dec; p12; p18; },{ inc; } -> { p18; } };
{ { dec; p12; p19; },{ inc; } -> { p19; } };
{ { dec; p12; p20; },{ inc; } -> { p20; } };
{ { dec; p12; p21; },{ inc; } -> { p21; } };
{ { dec; p12; p22; },{ inc; } -> { p22; } };
{ { dec; p12; p23; },{ inc; } -> { p23; } };
{ { dec; p12; p24; },{ inc; } -> { p24; } };
{ { dec; p12; p25; },{ inc; } -> { p25; } };
{ { dec; p12; p26; },{ inc; } -> { p26; } };
{ { dec; p12; p27; },{ inc; } -> { p27; } };
{ { dec; p12; p28; },{ inc; } -> { p28; } };
{ { dec; p12; p29; },{ inc; } -> { p29; } };
{ { dec; p12; p30; },{ inc; } -> { p30; } };
{ { dec; p12; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; } -> { p13; } };
# the more significant bits remain (dec)
{ { dec; p13; p13; },{ inc; } -> { p13; } };
{ { dec; p13; p14; },{ inc; } -> { p14; } };
{ { dec; p13; p15; },{ inc; } -> { p15; } };
{ { dec; p13; p16; },{ inc; } -> { p16; } };
{ { dec; p13; p17; },{ inc; } -> { p17; } };
{ { dec; p13; p18; },{ inc; } -> { p18; } };
{ { dec; p13; p19; },{ inc; } -> { p19; } };
{ { dec; p13; p20; },{ inc; } -> { p20; } };
{ { dec; p13; p21; },{ inc; } -> { p21; } };
{ { dec; p13; p22; },{ inc; } -> { p22; } };
{ { dec; p13; p23; },{ inc; } -> { p23; } };
{ { dec; p13; p24; },{ inc; } -> { p24; } };
{ { dec; p13; p25; },{ inc; } -> { p25; } };
{ { dec; p13; p26; },{ inc; } -> { p26; } };
{ { dec; p13; p27; },{ inc; } -> { p27; } };
{ { dec; p13; p28; },{ inc; } -> { p28; } };
{ { dec; p13; p29; },{ inc; } -> { p29; } };
{ { dec; p13; p30; },{ inc; } -> { p30; } };
{ { dec; p13; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; } -> { p14; } };
# the more significant bits remain (dec)
{ { dec; p14; p14; },{ inc; } -> { p14; } };
{ { dec; p14; p15; },{ inc; } -> { p15; } };
{ { dec; p14; p16; },{ inc; } -> { p16; } };
{ { dec; p14; p17; },{ inc; } -> { p17; } };
{ { dec; p14; p18; },{ inc; } -> { p18; } };
{ { dec; p14; p19; },{ inc; } -> { p19; } };
{ { dec; p14; p20; },{ inc; } -> { p20; } };
{ { dec; p14; p21; },{ inc; } -> { p21; } };
{ { dec; p14; p22; },{ inc; } -> { p22; } };
{ { dec; p14; p23; },{ inc; } -> { p23; } };
{ { dec; p14; p24; },{ inc; } -> { p24; } };
{ { dec; p14; p25; },{ inc; } -> { p25; } };
{ { dec; p14; p26; },{ inc; } -> { p26; } };
{ { dec; p14; p27; },{ inc; } -> { p27; } };
{ { dec; p14; p28; },{ inc; } -> { p28; } };
{ { dec; p14; p29; },{ inc; } -> { p29; } };
{ { dec; p14; p30; },{ inc; } -> { p30; } };
{ { dec; p14; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; } -> { p15; } };
# the more significant bits remain (dec)
{ { dec; p15; p15; },{ inc; } -> { p15; } };
{ { dec; p15; p16; },{ inc; } -> { p16; } };
{ { dec; p15; p17; },{ inc; } -> { p17; } };
{ { dec; p15; p18; },{ inc; } -> { p18; } };
{ { dec; p15; p19; },{ inc; } -> { p19; } };
{ { dec; p15; p20; },{ inc; } -> { p20; } };
{ { dec; p15; p21; },{ inc; } -> { p21; } };
{ { dec; p15; p22; },{ inc; } -> { p22; } };
{ { dec; p15; p23; },{ inc; } -> { p23; } };
{ { dec; p15; p24; },{ inc; } -> { p24; } };
{ { dec; p15; p25; },{ inc; } -> { p25; } };
{ { dec; p15; p26; },{ inc; } -> { p26; } };
{ { dec; p15; p27; },{ inc; } -> { p27; } };
{ { dec; p15; p28; },{ inc; } -> { p28; } };
{ { dec; p15; p29; },{ inc; } -> { p29; } };
{ { dec; p15; p30; },{ inc; } -> { p30; } };
{ { dec; p15; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; } -> { p16; } };
# the more significant bits remain (dec)
{ { dec; p16; p16; },{ inc; } -> { p16; } };
{ { dec; p16; p17; },{ inc; } -> { p17; } };
{ { dec; p16; p18; },{ inc; } -> { p18; } };
{ { dec; p16; p19; },{ inc; } -> { p19; } };
{ { dec; p16; p20; },{ inc; } -> { p20; } };
{ { dec; p16; p21; },{ inc; } -> { p21; } };
{ { dec; p16; p22; },{ inc; } -> { p22; } };
{ { dec; p16; p23; },{ inc; } -> { p23; } };
{ { dec; p16; p24; },{ inc; } -> { p24; } };
{ { dec; p16; p25; },{ inc; } -> { p25; } };
{ { dec; p16; p26; },{ inc; } -> { p26; } };
{ { dec; p16; p27; },{ inc; } -> { p27; } };
{ { dec; p16; p28; },{ inc; } -> { p28; } };
{ { dec; p16; p29; },{ inc; } -> { p29; } };
{ { dec; p16; p30; },{ inc; } -> { p30; } };
{ { dec; p16; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; } -> { p17; } };
# the more significant bits remain (dec)
{ { dec; p17; p17; },{ inc; } -> { p17; } };
{ { dec; p17; p18; },{ inc; } -> { p18; } };
{ { dec; p17; p19; },{ inc; } -> { p19; } };
{ { dec; p17; p20; },{ inc; } -> { p20; } };
{ { dec; p17; p21; },{ inc; } -> { p21; } };
{ { dec; p17; p22; },{ inc; } -> { p22; } };
{ { dec; p17; p23; },{ inc; } -> { p23; } };
{ { dec; p17; p24; },{ inc; } -> { p24; } };
{ { dec; p17; p25; },{ inc; } -> { p25; } };
{ { dec; p17; p26; },{ inc; } -> { p26; } };
{ { dec; p17; p27; },{ inc; } -> { p27; } };
{ { dec; p17; p28; },{ inc; } -> { p28; } };
{ { dec; p17; p29; },{ inc; } -> { p29; } };
{ { dec; p17; p30; },{ inc; } -> { p30; } };
{ { dec; p17; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; } -> { p18; } };
# the more significant bits remain (dec)
{ { dec; p18; p18; },{ inc; } -> { p18; } };
{ { dec; p18; p19; },{ inc; } -> { p19; } };
{ { dec; p18; p20; },{ inc; } -> { p20; } };
{ { dec; p18; p21; },{ inc; } -> { p21; } };
{ { dec; p18; p22; },{ inc; } -> { p22; } };
{ { dec; p18; p23; },{ inc; } -> { p23; } };
{ { dec; p18; p24; },{ inc; } -> { p24; } };
{ { dec; p18; p25; },{ inc; } -> { p25; } };
{ { dec; p18; p26; },{ inc; } -> { p26; } };
{ { dec; p18; p27; },{ inc; } -> { p27; } };
{ { dec; p18; p28; },{ inc; } -> { p28; } };
{ { dec; p18; p29; },{ inc; } -> { p29; } };
{ { dec; p18; p30; },{ inc; } -> { p30; } };
{ { dec; p18; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; } -> { p19; } };
# the more significant bits remain (dec)
{ { dec; p19; p19; },{ inc; } -> { p19; } };
{ { dec; p19; p20; },{ inc; } -> { p20; } };
{ { dec; p19; p21; },{ inc; } -> { p21; } };
{ { dec; p19; p22; },{ inc; } -> { p22; } };
{ { dec; p19; p23; },{ inc; } -> { p23; } };
{ { dec; p19; p24; },{ inc; } -> { p24; } };
{ { dec; p19; p25; },{ inc; } -> { p25; } };
{ { dec; p19; p26; },{ inc; } -> { p26; } };
{ { dec; p19; p27; },{ inc; } -> { p27; } };
{ { dec; p19; p28; },{ inc; } -> { p28; } };
{ { dec; p19; p29; },{ inc; } -> { p29; } };
{ { dec; p19; p30; },{ inc; } -> { p30; } };
{ { dec; p19; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; } -> { p20; } };
# the more significant bits remain (dec)
{ { dec; p20; p20; },{ inc; } -> { p20; } };
{ { dec; p20; p21; },{ inc; } -> { p21; } };
{ { dec; p20; p22; },{ inc; } -> { p22; } };
{ { dec; p20; p23; },{ inc; } -> { p23; } };
{ { dec; p20; p24; },{ inc; } -> { p24; } };
{ { dec; p20; p25; },{ inc; } -> { p25; } };
{ { dec; p20; p26; },{ inc; } -> { p26; } };
{ { dec; p20; p27; },{ inc; } -> { p27; } };
{ { dec; p20; p28; },{ inc; } -> { p28; } };
{ { dec; p20; p29; },{ inc; } -> { p29; } };
{ { dec; p20; p30; },{ inc; } -> { p30; } };
{ { dec; p20; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; } -> { p21; } };
# the more significant bits remain (dec)
{ { dec; p21; p21; },{ inc; } -> { p21; } };
{ { dec; p21; p22; },{ inc; } -> { p22; } };
{ { dec; p21; p23; },{ inc; } -> { p23; } };
{ { dec; p21; p24; },{ inc; } -> { p24; } };
{ { dec; p21; p25; },{ inc; } -> { p25; } };
{ { dec; p21; p26; },{ inc; } -> { p26; } };
{ { dec; p21; p27; },{ inc; } -> { p27; } };
{ { dec; p21; p28; },{ inc; } -> { p28; } };
{ { dec; p21; p29; },{ inc; } -> { p29; } };
{ { dec; p21; p30; },{ inc; } -> { p30; } };
{ { dec; p21; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; } -> { p22; } };
# the more significant bits remain (dec)
{ { dec; p22; p22; },{ inc; } -> { p22; } };
{ { dec; p22; p23; },{ inc; } -> { p23; } };
{ { dec; p22; p24; },{ inc; } -> { p24; } };
{ { dec; p22; p25; },{ inc; } -> { p25; } };
{ { dec; p22; p26; },{ inc; } -> { p26; } };
{ { dec; p22; p27; },{ inc; } -> { p27; } };
{ { dec; p22; p28; },{ inc; } -> { p28; } };
{ { dec; p22; p29; },{ inc; } -> { p29; } };
{ { dec; p22; p30; },{ inc; } -> { p30; } };
{ { dec; p22; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; } -> { p23; } };
# the more significant bits remain (dec)
{ { dec; p23; p23; },{ inc; } -> { p23; } };
{ { dec; p23; p24; },{ inc; } -> { p24; } };
{ { dec; p23; p25; },{ inc; } -> { p25; } };
{ { dec; p23; p26; },{ inc; } -> { p26; } };
{ { dec; p23; p27; },{ inc; } -> { p27; } };
{ { dec; p23; p28; },{ inc; } -> { p28; } };
{ { dec; p23; p29; },{ inc; } -> { p29; } };
{ { dec; p23; p30; },{ inc; } -> { p30; } };
{ { dec; p23; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; } -> { p24; } };
# the more significant bits remain (dec)
{ { dec; p24; p24; },{ inc; } -> { p24; } };
{ { dec; p24; p25; },{ inc; } -> { p25; } };
{ { dec; p24; p26; },{ inc; } -> { p26; } };
{ { dec; p24; p27; },{ inc; } -> { p27; } };
{ { dec; p24; p28; },{ inc; } -> { p28; } };
{ { dec; p24; p29; },{ inc; } -> { p29; } };
{ { dec; p24; p30; },{ inc; } -> { p30; } };
{ { dec; p24; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; } -> { p25; } };
# the more significant bits remain (dec)
{ { dec; p25; p25; },{ inc; } -> { p25; } };
{ { dec; p25; p26; },{ inc; } -> { p26; } };
{ { dec; p25; p27; },{ inc; } -> { p27; } };
{ { dec; p25; p28; },{ inc; } -> { p28; } };
{ { dec; p25; p29; },{ inc; } -> { p29; } };
{ { dec; p25; p30; },{ inc; } -> { p30; } };
{ { dec; p25; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; } -> { p26; } };
# the more significant bits remain (dec)
{ { dec; p26; p26; },{ inc; } -> { p26; } };
{ { dec; p26; p27; },{ inc; } -> { p27; } };
{ { dec; p26; p28; },{ inc; } -> { p28; } };
{ { dec; p26; p29; },{ inc; } -> { p29; } };
{ { dec; p26; p30; },{ inc; } -> { p30; } };
{ { dec; p26; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; } -> { p27; } };
# the more significant bits remain (dec)
{ { dec; p27; p27; },{ inc; } -> { p27; } };
{ { dec; p27; p28; },{ inc; } -> { p28; } };
{ { dec; p27; p29; },{ inc; } -> { p29; } };
{ { dec; p27; p30; },{ inc; } -> { p30; } };
{ { dec; p27; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; } -> { p28; } };
# the more significant bits remain (dec)
{ { dec; p28; p28; },{ inc; } -> { p28; } };
{ { dec; p28; p29; },{ inc; } -> { p29; } };
{ { dec; p28; p30; },{ inc; } -> { p30; } };
{ { dec; p28; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; p28; } -> { p29; } };
# the more significant bits remain (dec)
{ { dec; p29; p29; },{ inc; } -> { p29; } };
{ { dec; p29; p30; },{ inc; } -> { p30; } };
{ { dec; p29; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; p28; p29; } -> { p30; } };
# the more significant bits remain (dec)
{ { dec; p30; p30; },{ inc; } -> { p30; } };
{ { dec; p30; p31; },{ inc; } -> { p31; } };
{ { dec; },{ inc; p0; p1; p2; p3; p4; p5; p6; p7; p8; p9; p10; p11; p12; p13; p14; p15; p16; p17; p18; p19; p20; p21; p22; p23; p24; p25; p26; p27; p28; p29; p30; } -> { p31; } };
# the more significant bits remain (dec)
{ { dec; p31; p31; },{ inc; } -> { p31; } };
}
action-atoms { inc; dec; }
initial-state { }
ctl-property { ~EF( p31 AND EG p31 ) }
| 47.95273 | 185 | 0.350607 |
8af1f2e182181d4363697cf87bcf0dd8622cb36a
| 603 |
use std::f64::consts::TAU;
use crate::complex::Complex;
pub struct Oscillator {
sample_rate: f64,
frequency: f64,
phase: f64,
}
impl Oscillator {
pub fn new(sample_rate: f64, frequency: f64) -> Self {
Self {
sample_rate,
frequency,
phase: 0.0,
}
}
}
impl Iterator for Oscillator {
type Item = Complex;
fn next(&mut self) -> Option<Self::Item> {
let sample = Complex::polar(1.0, self.phase);
self.phase += TAU * self.frequency / self.sample_rate;
self.phase %= TAU;
Some(sample)
}
}
| 19.451613 | 62 | 0.558872 |
3a8bfdd95c66d0a999e58152ef5241aa94bca37b
| 8,233 |
#[doc = r"Register block"]
#[repr(C)]
pub struct RegisterBlock {
#[doc = "0x00 - Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is configured in CONFIG\\[n\\].POLARITY."]
pub tasks_out: [TASKS_OUT; 8],
_reserved1: [u8; 16usize],
#[doc = "0x30 - Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is to set it high."]
pub tasks_set: [TASKS_SET; 8],
_reserved2: [u8; 16usize],
#[doc = "0x60 - Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is to set it low."]
pub tasks_clr: [TASKS_CLR; 8],
_reserved3: [u8; 128usize],
#[doc = "0x100 - Description collection: Event generated from pin specified in CONFIG\\[n\\].PSEL"]
pub events_in: [EVENTS_IN; 8],
_reserved4: [u8; 92usize],
#[doc = "0x17c - Event generated from multiple input GPIO pins with SENSE mechanism enabled"]
pub events_port: EVENTS_PORT,
_reserved5: [u8; 388usize],
#[doc = "0x304 - Enable interrupt"]
pub intenset: INTENSET,
#[doc = "0x308 - Disable interrupt"]
pub intenclr: INTENCLR,
_reserved7: [u8; 516usize],
#[doc = "0x510 - Description collection: Configuration for OUT\\[n\\], SET\\[n\\] and CLR\\[n\\] tasks and IN\\[n\\] event"]
pub config: [CONFIG; 8],
}
#[doc = "Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is configured in CONFIG\\[n\\].POLARITY.\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tasks_out](tasks_out) module"]
pub type TASKS_OUT = crate::Reg<u32, _TASKS_OUT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TASKS_OUT;
#[doc = "`write(|w| ..)` method takes [tasks_out::W](tasks_out::W) writer structure"]
impl crate::Writable for TASKS_OUT {}
#[doc = "Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is configured in CONFIG\\[n\\].POLARITY."]
pub mod tasks_out;
#[doc = "Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is to set it high.\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tasks_set](tasks_set) module"]
pub type TASKS_SET = crate::Reg<u32, _TASKS_SET>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TASKS_SET;
#[doc = "`write(|w| ..)` method takes [tasks_set::W](tasks_set::W) writer structure"]
impl crate::Writable for TASKS_SET {}
#[doc = "Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is to set it high."]
pub mod tasks_set;
#[doc = "Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is to set it low.\n\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [tasks_clr](tasks_clr) module"]
pub type TASKS_CLR = crate::Reg<u32, _TASKS_CLR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _TASKS_CLR;
#[doc = "`write(|w| ..)` method takes [tasks_clr::W](tasks_clr::W) writer structure"]
impl crate::Writable for TASKS_CLR {}
#[doc = "Description collection: Task for writing to pin specified in CONFIG\\[n\\].PSEL. Action on pin is to set it low."]
pub mod tasks_clr;
#[doc = "Description collection: Event generated from pin specified in CONFIG\\[n\\].PSEL\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [events_in](events_in) module"]
pub type EVENTS_IN = crate::Reg<u32, _EVENTS_IN>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _EVENTS_IN;
#[doc = "`read()` method returns [events_in::R](events_in::R) reader structure"]
impl crate::Readable for EVENTS_IN {}
#[doc = "`write(|w| ..)` method takes [events_in::W](events_in::W) writer structure"]
impl crate::Writable for EVENTS_IN {}
#[doc = "Description collection: Event generated from pin specified in CONFIG\\[n\\].PSEL"]
pub mod events_in;
#[doc = "Event generated from multiple input GPIO pins with SENSE mechanism enabled\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [events_port](events_port) module"]
pub type EVENTS_PORT = crate::Reg<u32, _EVENTS_PORT>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _EVENTS_PORT;
#[doc = "`read()` method returns [events_port::R](events_port::R) reader structure"]
impl crate::Readable for EVENTS_PORT {}
#[doc = "`write(|w| ..)` method takes [events_port::W](events_port::W) writer structure"]
impl crate::Writable for EVENTS_PORT {}
#[doc = "Event generated from multiple input GPIO pins with SENSE mechanism enabled"]
pub mod events_port;
#[doc = "Enable interrupt\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intenset](intenset) module"]
pub type INTENSET = crate::Reg<u32, _INTENSET>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTENSET;
#[doc = "`read()` method returns [intenset::R](intenset::R) reader structure"]
impl crate::Readable for INTENSET {}
#[doc = "`write(|w| ..)` method takes [intenset::W](intenset::W) writer structure"]
impl crate::Writable for INTENSET {}
#[doc = "Enable interrupt"]
pub mod intenset;
#[doc = "Disable interrupt\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [intenclr](intenclr) module"]
pub type INTENCLR = crate::Reg<u32, _INTENCLR>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _INTENCLR;
#[doc = "`read()` method returns [intenclr::R](intenclr::R) reader structure"]
impl crate::Readable for INTENCLR {}
#[doc = "`write(|w| ..)` method takes [intenclr::W](intenclr::W) writer structure"]
impl crate::Writable for INTENCLR {}
#[doc = "Disable interrupt"]
pub mod intenclr;
#[doc = "Description collection: Configuration for OUT\\[n\\], SET\\[n\\] and CLR\\[n\\] tasks and IN\\[n\\] event\n\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about avaliable fields see [config](config) module"]
pub type CONFIG = crate::Reg<u32, _CONFIG>;
#[allow(missing_docs)]
#[doc(hidden)]
pub struct _CONFIG;
#[doc = "`read()` method returns [config::R](config::R) reader structure"]
impl crate::Readable for CONFIG {}
#[doc = "`write(|w| ..)` method takes [config::W](config::W) writer structure"]
impl crate::Writable for CONFIG {}
#[doc = "Description collection: Configuration for OUT\\[n\\], SET\\[n\\] and CLR\\[n\\] tasks and IN\\[n\\] event"]
pub mod config;
| 75.53211 | 488 | 0.702903 |
76a8bac127a453e9fb16a9845c5a257c99f64cae
| 1,766 |
use futures::executor::block_on;
use futures::future::Future;
use futures::stream::{self, StreamExt, TryStreamExt};
use futures::io::AsyncBufReadExt;
use futures::task::Poll;
use futures_test::io::AsyncReadTestExt;
use futures_test::task::noop_context;
use std::io::Cursor;
use std::pin::Pin;
#[test]
fn read_line() {
let mut buf = Cursor::new(b"12");
let mut v = String::new();
assert_eq!(block_on(buf.read_line(&mut v)).unwrap(), 2);
assert_eq!(v, "12");
let mut buf = Cursor::new(b"12\n\n");
let mut v = String::new();
assert_eq!(block_on(buf.read_line(&mut v)).unwrap(), 3);
assert_eq!(v, "12\n");
v.clear();
assert_eq!(block_on(buf.read_line(&mut v)).unwrap(), 1);
assert_eq!(v, "\n");
v.clear();
assert_eq!(block_on(buf.read_line(&mut v)).unwrap(), 0);
assert_eq!(v, "");
}
fn run<F: Future + Unpin>(mut f: F) -> F::Output {
let mut cx = noop_context();
loop {
if let Poll::Ready(x) = Pin::new(&mut f).poll(&mut cx) {
return x;
}
}
}
#[test]
fn maybe_pending() {
let mut buf = b"12".interleave_pending();
let mut v = String::new();
assert_eq!(run(buf.read_line(&mut v)).unwrap(), 2);
assert_eq!(v, "12");
let mut buf = stream::iter(vec![&b"12"[..], &b"\n\n"[..]])
.map(Ok)
.into_async_read()
.interleave_pending();
let mut v = String::new();
assert_eq!(run(buf.read_line(&mut v)).unwrap(), 3);
assert_eq!(v, "12\n");
v.clear();
assert_eq!(run(buf.read_line(&mut v)).unwrap(), 1);
assert_eq!(v, "\n");
v.clear();
assert_eq!(run(buf.read_line(&mut v)).unwrap(), 0);
assert_eq!(v, "");
v.clear();
assert_eq!(run(buf.read_line(&mut v)).unwrap(), 0);
assert_eq!(v, "");
}
| 28.031746 | 64 | 0.579275 |
1a76afcff140150b85eea911c5078e3fc9285d1d
| 86,709 |
use std::collections::HashSet;
use std::fmt::Debug;
use std::marker::PhantomData;
use std::ops::Deref;
use std::sync::Arc;
use chrono::{DateTime, Utc};
use oauth2::helpers::variant_name;
use oauth2::{ClientId, ClientSecret};
use ring::constant_time::verify_slices_are_equal;
use serde::de::DeserializeOwned;
use serde::Serialize;
use thiserror::Error;
use crate::jwt::{JsonWebToken, JsonWebTokenJsonPayloadSerde};
use crate::user_info::UserInfoClaimsImpl;
use crate::{
AdditionalClaims, Audience, AuthenticationContextClass, GenderClaim, IdTokenClaims, IssuerUrl,
JsonWebKey, JsonWebKeySet, JsonWebKeyType, JsonWebKeyUse, JsonWebTokenAccess,
JsonWebTokenAlgorithm, JsonWebTokenHeader, JweContentEncryptionAlgorithm, JwsSigningAlgorithm,
Nonce, SubjectIdentifier,
};
pub(crate) trait AudiencesClaim {
fn audiences(&self) -> Option<&Vec<Audience>>;
}
pub(crate) trait IssuerClaim {
fn issuer(&self) -> Option<&IssuerUrl>;
}
///
/// Error verifying claims.
///
#[derive(Clone, Debug, Error, PartialEq)]
#[non_exhaustive]
pub enum ClaimsVerificationError {
/// Claims have expired.
#[error("Expired: {0}")]
Expired(String),
/// Audience claim is invalid.
#[error("Invalid audiences: {0}")]
InvalidAudience(String),
/// Authorization context class reference (`acr`) claim is invalid.
#[error("Invalid authorization context class reference: {0}")]
InvalidAuthContext(String),
/// User authenticated too long ago.
#[error("Invalid authentication time: {0}")]
InvalidAuthTime(String),
/// Issuer claim is invalid.
#[error("Invalid issuer: {0}")]
InvalidIssuer(String),
/// Nonce is invalid.
#[error("Invalid nonce: {0}")]
InvalidNonce(String),
/// Subject claim is invalid.
#[error("Invalid subject: {0}")]
InvalidSubject(String),
/// No signature present but claims must be signed.
#[error("Claims must be signed")]
NoSignature,
/// An unexpected error occurred.
#[error("{0}")]
Other(String),
/// Failed to verify the claims signature.
#[error("Signature verification failed")]
SignatureVerification(#[source] SignatureVerificationError),
/// Unsupported argument or value.
#[error("Unsupported: {0}")]
Unsupported(String),
}
///
/// Error verifying claims signature.
///
#[derive(Clone, Debug, Error, PartialEq)]
#[non_exhaustive]
pub enum SignatureVerificationError {
/// More than one key matches the supplied key constraints (e.g., key ID).
#[error("Ambiguous key identification: {0}")]
AmbiguousKeyId(String),
/// Invalid signature for the supplied claims and signing key.
#[error("Crypto error: {0}")]
CryptoError(String),
/// The supplied signature algorithm is disallowed by the verifier.
#[error("Disallowed signature algorithm: {0}")]
DisallowedAlg(String),
/// The supplied key cannot be used in this context. This may occur if the key type does not
/// match the signature type (e.g., an RSA key used to validate an HMAC) or the JWK usage
/// disallows signatures.
#[error("Invalid cryptographic key: {0}")]
InvalidKey(String),
/// The signing key needed for verifying the
/// [JSON Web Token](https://tools.ietf.org/html/rfc7519)'s signature/MAC could not be found.
/// This error can occur if the key ID (`kid`) specified in the JWT's
/// [JOSE header](https://tools.ietf.org/html/rfc7519#section-5) does not match the ID of any
/// key in the OpenID Connect provider's JSON Web Key Set (JWKS), typically retrieved from
/// the provider's [JWKS document](
/// http://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata). To support
/// [rotation of asymmetric signing keys](
/// http://openid.net/specs/openid-connect-core-1_0.html#RotateSigKeys), client applications
/// should consider refreshing the JWKS document (via
/// [`JsonWebKeySet::fetch`][crate::JsonWebKeySet::fetch]).
///
/// This error can also occur if the identified
/// [JSON Web Key](https://tools.ietf.org/html/rfc7517) is of the wrong type (e.g., an RSA key
/// when the JOSE header specifies an ECDSA algorithm) or does not support signing.
#[error("No matching key found")]
NoMatchingKey,
/// Unsupported signature algorithm.
#[error("Unsupported signature algorithm: {0}")]
UnsupportedAlg(String),
/// An unexpected error occurred.
#[error("Other error: {0}")]
Other(String),
}
// This struct is intentionally private.
#[derive(Clone)]
struct JwtClaimsVerifier<'a, JS, JT, JU, K>
where
JS: JwsSigningAlgorithm<JT>,
JT: JsonWebKeyType,
JU: JsonWebKeyUse,
K: JsonWebKey<JS, JT, JU>,
{
allowed_algs: Option<HashSet<JS>>,
aud_match_required: bool,
client_id: ClientId,
client_secret: Option<ClientSecret>,
iss_required: bool,
issuer: IssuerUrl,
is_signature_check_enabled: bool,
other_aud_verifier_fn: Arc<dyn Fn(&Audience) -> bool + 'a>,
signature_keys: JsonWebKeySet<JS, JT, JU, K>,
}
impl<'a, JS, JT, JU, K> JwtClaimsVerifier<'a, JS, JT, JU, K>
where
JS: JwsSigningAlgorithm<JT>,
JT: JsonWebKeyType,
JU: JsonWebKeyUse,
K: JsonWebKey<JS, JT, JU>,
{
pub fn new(
client_id: ClientId,
issuer: IssuerUrl,
signature_keys: JsonWebKeySet<JS, JT, JU, K>,
) -> Self {
JwtClaimsVerifier {
allowed_algs: Some([JS::rsa_sha_256()].iter().cloned().collect()),
aud_match_required: true,
client_id,
client_secret: None,
iss_required: true,
issuer,
is_signature_check_enabled: true,
// Secure default: reject all other audiences as untrusted, since any other audience
// can potentially impersonate the user when by sending its copy of these claims
// to this relying party.
other_aud_verifier_fn: Arc::new(|_| false),
signature_keys,
}
}
pub fn require_audience_match(mut self, aud_required: bool) -> Self {
self.aud_match_required = aud_required;
self
}
pub fn require_issuer_match(mut self, iss_required: bool) -> Self {
self.iss_required = iss_required;
self
}
pub fn require_signature_check(mut self, sig_required: bool) -> Self {
self.is_signature_check_enabled = sig_required;
self
}
pub fn set_allowed_algs<I>(mut self, algs: I) -> Self
where
I: IntoIterator<Item = JS>,
{
self.allowed_algs = Some(algs.into_iter().collect());
self
}
pub fn allow_any_alg(mut self) -> Self {
self.allowed_algs = None;
self
}
pub fn set_client_secret(mut self, client_secret: ClientSecret) -> Self {
self.client_secret = Some(client_secret);
self
}
pub fn set_other_audience_verifier_fn<T>(mut self, other_aud_verifier_fn: T) -> Self
where
T: Fn(&Audience) -> bool + 'a,
{
self.other_aud_verifier_fn = Arc::new(other_aud_verifier_fn);
self
}
fn validate_jose_header<JE>(
jose_header: &JsonWebTokenHeader<JE, JS, JT>,
) -> Result<(), ClaimsVerificationError>
where
JE: JweContentEncryptionAlgorithm<JT>,
{
// The 'typ' header field must either be omitted or have the canonicalized value JWT.
if let Some(ref jwt_type) = jose_header.typ {
if jwt_type.to_uppercase() != "JWT" {
return Err(ClaimsVerificationError::Unsupported(format!(
"unexpected or unsupported JWT type `{}`",
**jwt_type
)));
}
}
// The 'cty' header field must be omitted, since it's only used for JWTs that contain
// content types other than JSON-encoded claims. This may include nested JWTs, such as if
// JWE encryption is used. This is currently unsupported.
if let Some(ref content_type) = jose_header.cty {
if content_type.to_uppercase() == "JWT" {
return Err(ClaimsVerificationError::Unsupported(
"nested JWT's are not currently supported".to_string(),
));
} else {
return Err(ClaimsVerificationError::Unsupported(format!(
"unexpected or unsupported JWT content type `{}`",
**content_type
)));
}
}
// If 'crit' fields are specified, we must reject any we do not understand. Since this
// implementation doesn't understand any of them, unconditionally reject the JWT. Note that
// the spec prohibits this field from containing any of the standard headers or being empty.
if jose_header.crit.is_some() {
// https://tools.ietf.org/html/rfc7515#appendix-E
return Err(ClaimsVerificationError::Unsupported(
"critical JWT header fields are unsupported".to_string(),
));
}
Ok(())
}
pub fn verified_claims<A, C, JE, T>(&self, jwt: A) -> Result<T, ClaimsVerificationError>
where
A: JsonWebTokenAccess<JE, JS, JT, C, ReturnType = T>,
C: AudiencesClaim + Debug + DeserializeOwned + IssuerClaim + Serialize,
JE: JweContentEncryptionAlgorithm<JT>,
T: AudiencesClaim + IssuerClaim,
{
{
let jose_header = jwt.unverified_header();
Self::validate_jose_header(jose_header)?;
// The code below roughly follows the validation steps described in
// https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
// 1. If the ID Token is encrypted, decrypt it using the keys and algorithms that the Client
// specified during Registration that the OP was to use to encrypt the ID Token. If
// encryption was negotiated with the OP at Registration time and the ID Token is not
// encrypted, the RP SHOULD reject it.
if let JsonWebTokenAlgorithm::Encryption(ref encryption_alg) = jose_header.alg {
return Err(ClaimsVerificationError::Unsupported(format!(
"JWE encryption is not currently supported (found algorithm `{}`)",
variant_name(encryption_alg),
)));
}
}
// TODO: Add encryption (JWE) support
{
// 2. The Issuer Identifier for the OpenID Provider (which is typically obtained during
// Discovery) MUST exactly match the value of the iss (issuer) Claim.
let unverified_claims = jwt.unverified_payload_ref();
if self.iss_required {
if let Some(issuer) = unverified_claims.issuer() {
if *issuer != self.issuer {
return Err(ClaimsVerificationError::InvalidIssuer(format!(
"expected `{}` (found `{}`)",
*self.issuer, **issuer
)));
}
} else {
return Err(ClaimsVerificationError::InvalidIssuer(
"missing issuer claim".to_string(),
));
}
}
// 3. The Client MUST validate that the aud (audience) Claim contains its client_id value
// registered at the Issuer identified by the iss (issuer) Claim as an audience. The aud
// (audience) Claim MAY contain an array with more than one element. The ID Token MUST be
// rejected if the ID Token does not list the Client as a valid audience, or if it
// contains additional audiences not trusted by the Client.
if self.aud_match_required {
if let Some(audiences) = unverified_claims.audiences() {
if !audiences
.iter()
.any(|aud| (**aud).deref() == self.client_id.deref())
{
return Err(ClaimsVerificationError::InvalidAudience(format!(
"must contain `{}` (found audiences: {})",
*self.client_id,
audiences
.iter()
.map(|aud| format!("`{}`", Deref::deref(aud)))
.collect::<Vec<_>>()
.join(", ")
)));
} else if audiences.len() > 1 {
audiences
.iter()
.filter(|aud| (**aud).deref() != self.client_id.deref())
.find(|aud| !(self.other_aud_verifier_fn)(aud))
.map(|aud| {
Err(ClaimsVerificationError::InvalidAudience(format!(
"`{}` is not a trusted audience",
**aud,
)))
})
.unwrap_or(Ok(()))?;
}
} else {
return Err(ClaimsVerificationError::InvalidAudience(
"missing audiences claim".to_string(),
));
}
}
}
// Steps 4--5 (azp claim validation) are specific to the ID token.
// 6. If the ID Token is received via direct communication between the Client and the Token
// Endpoint (which it is in this flow), the TLS server validation MAY be used to validate
// the issuer in place of checking the token signature. The Client MUST validate the
// signature of all other ID Tokens according to JWS [JWS] using the algorithm specified
// in the JWT alg Header Parameter. The Client MUST use the keys provided by the Issuer.
if !self.is_signature_check_enabled {
return Ok(jwt.unverified_payload());
}
// Borrow the header again. We had to drop the reference above to allow for the
// early exit calling jwt.unverified_claims(), which takes ownership of the JWT.
let signature_alg = match jwt.unverified_header().alg {
// Encryption is handled above.
JsonWebTokenAlgorithm::Encryption(_) => unreachable!(),
JsonWebTokenAlgorithm::Signature(ref signature_alg, _) => signature_alg,
// Section 2 of OpenID Connect Core 1.0 specifies that "ID Tokens MUST NOT use
// none as the alg value unless the Response Type used returns no ID Token from
// the Authorization Endpoint (such as when using the Authorization Code Flow)
// and the Client explicitly requested the use of none at Registration time."
//
// While there's technically a use case where this is ok, we choose not to
// support it for now to protect against accidental misuse. If demand arises,
// we can figure out a API that mitigates the risk.
JsonWebTokenAlgorithm::None => return Err(ClaimsVerificationError::NoSignature),
}
.clone();
// 7. The alg value SHOULD be the default of RS256 or the algorithm sent by the Client
// in the id_token_signed_response_alg parameter during Registration.
if let Some(ref allowed_algs) = self.allowed_algs {
if !allowed_algs.contains(&signature_alg) {
return Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::DisallowedAlg(format!(
"algorithm `{}` is not one of: {}",
variant_name(&signature_alg),
allowed_algs
.iter()
.map(variant_name)
.collect::<Vec<_>>()
.join(", "),
)),
));
}
}
// NB: We must *not* trust the 'kid' (key ID) or 'alg' (algorithm) fields present in the
// JOSE header, as an attacker could manipulate these while forging the JWT. The code
// below must be secure regardless of how these fields are manipulated.
if signature_alg.uses_shared_secret() {
// 8. If the JWT alg Header Parameter uses a MAC based algorithm such as HS256,
// HS384, or HS512, the octets of the UTF-8 representation of the client_secret
// corresponding to the client_id contained in the aud (audience) Claim are used
// as the key to validate the signature. For MAC based algorithms, the behavior
// is unspecified if the aud is multi-valued or if an azp value is present that
// is different than the aud value.
if let Some(ref client_secret) = self.client_secret {
let key = K::new_symmetric(client_secret.secret().clone().into_bytes());
return jwt
.payload(&signature_alg, &key)
.map_err(ClaimsVerificationError::SignatureVerification);
} else {
// The client secret isn't confidential for public clients, so anyone can forge a
// JWT with a valid signature.
return Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::DisallowedAlg(
"symmetric signatures are disallowed for public clients".to_string(),
),
));
}
}
// Section 10.1 of OpenID Connect Core 1.0 states that the JWT must include a key ID
// if the JWK set contains more than one public key.
// See if any key has a matching key ID (if supplied) and compatible type.
let public_keys = {
let jose_header = jwt.unverified_header();
self.signature_keys
.keys()
.iter()
.filter(|key|
// The key must be of the type expected for this signature algorithm.
Some(key.key_type()) == signature_alg.key_type().as_ref() &&
// Either the key hasn't specified it's allowed usage (in which case
// any usage is acceptable), or the key supports signing.
(key.key_use().is_none() ||
key.key_use().iter().any(
|key_use| key_use.allows_signature()
)) &&
// Either the JWT doesn't include a 'kid' (in which case any 'kid'
// is acceptable), or the 'kid' matches the key's ID.
(jose_header.kid.is_none() ||
jose_header.kid.as_ref() == key.key_id()))
.collect::<Vec<&K>>()
};
if public_keys.is_empty() {
return Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::NoMatchingKey,
));
} else if public_keys.len() != 1 {
return Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::AmbiguousKeyId(format!(
"JWK set must only contain one eligible public key \
({} eligible keys: {})",
public_keys.len(),
public_keys
.iter()
.map(|key| format!(
"{} ({})",
key.key_id()
.map(|kid| format!("`{}`", **kid))
.unwrap_or_else(|| "null ID".to_string()),
variant_name(key.key_type())
))
.collect::<Vec<_>>()
.join(", ")
)),
));
}
jwt.payload(
&signature_alg.clone(),
*public_keys.first().expect("unreachable"),
)
.map_err(ClaimsVerificationError::SignatureVerification)
// Steps 9--13 are specific to the ID token.
}
}
///
/// Trait for verifying ID token nonces.
///
pub trait NonceVerifier {
///
/// Verifies the nonce.
///
/// Returns `Ok(())` if the nonce is valid, or a string describing the error otherwise.
///
fn verify(self, nonce: Option<&Nonce>) -> Result<(), String>;
}
impl NonceVerifier for &Nonce {
fn verify(self, nonce: Option<&Nonce>) -> Result<(), String> {
if let Some(claims_nonce) = nonce {
if verify_slices_are_equal(claims_nonce.secret().as_bytes(), self.secret().as_bytes())
.is_err()
{
return Err("nonce mismatch".to_string());
}
} else {
return Err("missing nonce claim".to_string());
}
Ok(())
}
}
impl<F> NonceVerifier for F
where
F: FnOnce(Option<&Nonce>) -> Result<(), String>,
{
fn verify(self, nonce: Option<&Nonce>) -> Result<(), String> {
self(nonce)
}
}
///
/// ID token verifier.
///
#[derive(Clone)]
pub struct IdTokenVerifier<'a, JS, JT, JU, K>
where
JS: JwsSigningAlgorithm<JT>,
JT: JsonWebKeyType,
JU: JsonWebKeyUse,
K: JsonWebKey<JS, JT, JU>,
{
acr_verifier_fn: Arc<dyn Fn(Option<&AuthenticationContextClass>) -> Result<(), String> + 'a>,
#[allow(clippy::type_complexity)]
auth_time_verifier_fn: Arc<dyn Fn(Option<DateTime<Utc>>) -> Result<(), String> + 'a>,
iat_verifier_fn: Arc<dyn Fn(DateTime<Utc>) -> Result<(), String> + 'a>,
jwt_verifier: JwtClaimsVerifier<'a, JS, JT, JU, K>,
time_fn: Arc<dyn Fn() -> DateTime<Utc> + 'a>,
}
impl<'a, JS, JT, JU, K> IdTokenVerifier<'a, JS, JT, JU, K>
where
JS: JwsSigningAlgorithm<JT>,
JT: JsonWebKeyType,
JU: JsonWebKeyUse,
K: JsonWebKey<JS, JT, JU>,
{
fn new(jwt_verifier: JwtClaimsVerifier<'a, JS, JT, JU, K>) -> Self {
IdTokenVerifier {
// By default, accept authorization context reference (acr claim).
acr_verifier_fn: Arc::new(|_| Ok(())),
auth_time_verifier_fn: Arc::new(|_| Ok(())),
// By default, accept any issued time (iat claim).
iat_verifier_fn: Arc::new(|_| Ok(())),
jwt_verifier,
// By default, use the current system time.
time_fn: Arc::new(Utc::now),
}
}
///
/// Initializes a new verifier for a public client (i.e., one without a client secret).
///
pub fn new_public_client(
client_id: ClientId,
issuer: IssuerUrl,
signature_keys: JsonWebKeySet<JS, JT, JU, K>,
) -> Self {
Self::new(JwtClaimsVerifier::new(client_id, issuer, signature_keys))
}
///
/// Initializes a new verifier for a confidential client (i.e., one with a client secret).
///
/// A confidential client verifier is required in order to verify ID tokens signed using a
/// shared secret algorithm such as `HS256`, `HS384`, or `HS512`. For these algorithms, the
/// client secret is the shared secret.
///
pub fn new_confidential_client(
client_id: ClientId,
client_secret: ClientSecret,
issuer: IssuerUrl,
signature_keys: JsonWebKeySet<JS, JT, JU, K>,
) -> Self {
Self::new(
JwtClaimsVerifier::new(client_id, issuer, signature_keys)
.set_client_secret(client_secret),
)
}
///
/// Specifies which JSON Web Signature algorithms are supported.
///
pub fn set_allowed_algs<I>(mut self, algs: I) -> Self
where
I: IntoIterator<Item = JS>,
{
self.jwt_verifier = self.jwt_verifier.set_allowed_algs(algs);
self
}
///
/// Specifies that any signature algorithm is supported.
///
pub fn allow_any_alg(mut self) -> Self {
self.jwt_verifier = self.jwt_verifier.allow_any_alg();
self
}
///
/// Specifies a function for verifying the `acr` claim.
///
/// The function should return `Ok(())` if the claim is valid, or a string describing the error
/// otherwise.
///
pub fn set_auth_context_verifier_fn<T>(mut self, acr_verifier_fn: T) -> Self
where
T: Fn(Option<&AuthenticationContextClass>) -> Result<(), String> + 'a,
{
self.acr_verifier_fn = Arc::new(acr_verifier_fn);
self
}
///
/// Specifies a function for verifying the `auth_time` claim.
///
/// The function should return `Ok(())` if the claim is valid, or a string describing the error
/// otherwise.
///
pub fn set_auth_time_verifier_fn<T>(mut self, auth_time_verifier_fn: T) -> Self
where
T: Fn(Option<DateTime<Utc>>) -> Result<(), String> + 'a,
{
self.auth_time_verifier_fn = Arc::new(auth_time_verifier_fn);
self
}
///
/// Enables signature verification.
///
/// Signature verification is enabled by default, so this function is only useful if
/// [`IdTokenVerifier::insecure_disable_signature_check`] was previously invoked.
///
pub fn enable_signature_check(mut self) -> Self {
self.jwt_verifier = self.jwt_verifier.require_signature_check(true);
self
}
///
/// Disables signature verification.
///
/// # Security Warning
///
/// Unverified ID tokens may be subject to forgery. See [Section 16.3](
/// https://openid.net/specs/openid-connect-core-1_0.html#TokenManufacture) for more
/// information.
///
pub fn insecure_disable_signature_check(mut self) -> Self {
self.jwt_verifier = self.jwt_verifier.require_signature_check(false);
self
}
///
/// Specifies whether the issuer claim must match the expected issuer URL for the provider.
///
pub fn require_issuer_match(mut self, iss_required: bool) -> Self {
self.jwt_verifier = self.jwt_verifier.require_issuer_match(iss_required);
self
}
///
/// Specifies whether the audience claim must match this client's client ID.
///
pub fn require_audience_match(mut self, aud_required: bool) -> Self {
self.jwt_verifier = self.jwt_verifier.require_audience_match(aud_required);
self
}
///
/// Specifies a function for returning the current time.
///
/// This function is used for verifying the ID token expiration time.
///
pub fn set_time_fn<T>(mut self, time_fn: T) -> Self
where
T: Fn() -> DateTime<Utc> + 'a,
{
self.time_fn = Arc::new(time_fn);
self
}
///
/// Specifies a function for verifying the ID token issue time.
///
/// The function should return `Ok(())` if the claim is valid, or a string describing the error
/// otherwise.
///
pub fn set_issue_time_verifier_fn<T>(mut self, iat_verifier_fn: T) -> Self
where
T: Fn(DateTime<Utc>) -> Result<(), String> + 'a,
{
self.iat_verifier_fn = Arc::new(iat_verifier_fn);
self
}
///
/// Specifies a function for verifying audiences included in the `aud` claim that differ from
/// this client's client ID.
///
/// The function should return `true` if the audience is trusted, or `false` otherwise.
///
/// [Section 3.1.3.7](https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation)
/// states that *"The ID Token MUST be rejected if the ID Token does not list the Client as a
/// valid audience, or if it contains additional audiences not trusted by the Client."*
///
pub fn set_other_audience_verifier_fn<T>(mut self, other_aud_verifier_fn: T) -> Self
where
T: Fn(&Audience) -> bool + 'a,
{
self.jwt_verifier = self
.jwt_verifier
.set_other_audience_verifier_fn(other_aud_verifier_fn);
self
}
pub(super) fn verified_claims<'b, AC, GC, JE, N>(
&self,
jwt: &'b JsonWebToken<JE, JS, JT, IdTokenClaims<AC, GC>, JsonWebTokenJsonPayloadSerde>,
nonce_verifier: N,
) -> Result<&'b IdTokenClaims<AC, GC>, ClaimsVerificationError>
where
AC: AdditionalClaims,
GC: GenderClaim,
JE: JweContentEncryptionAlgorithm<JT>,
N: NonceVerifier,
{
// The code below roughly follows the validation steps described in
// https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
// Steps 1--3 are handled by the generic JwtClaimsVerifier.
let partially_verified_claims = self.jwt_verifier.verified_claims(jwt)?;
self.verify_claims(partially_verified_claims, nonce_verifier)?;
Ok(partially_verified_claims)
}
pub(super) fn verified_claims_owned<AC, GC, JE, N>(
&self,
jwt: JsonWebToken<JE, JS, JT, IdTokenClaims<AC, GC>, JsonWebTokenJsonPayloadSerde>,
nonce_verifier: N,
) -> Result<IdTokenClaims<AC, GC>, ClaimsVerificationError>
where
AC: AdditionalClaims,
GC: GenderClaim,
JE: JweContentEncryptionAlgorithm<JT>,
N: NonceVerifier,
{
// The code below roughly follows the validation steps described in
// https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation
// Steps 1--3 are handled by the generic JwtClaimsVerifier.
let partially_verified_claims = self.jwt_verifier.verified_claims(jwt)?;
self.verify_claims(&partially_verified_claims, nonce_verifier)?;
Ok(partially_verified_claims)
}
fn verify_claims<'b, AC, GC, N>(
&self,
partially_verified_claims: &'b IdTokenClaims<AC, GC>,
nonce_verifier: N,
) -> Result<(), ClaimsVerificationError>
where
AC: AdditionalClaims,
GC: GenderClaim,
N: NonceVerifier,
{
// 4. If the ID Token contains multiple audiences, the Client SHOULD verify that an azp
// Claim is present.
// There is significant confusion and contradiction in the OpenID Connect Core spec around
// the azp claim. See https://bitbucket.org/openid/connect/issues/973/ for a detailed
// discussion. Given the lack of clarity around how this claim should be used, we defer
// any verification of it here until a use case becomes apparent. If such a use case does
// arise, we most likely want to allow clients to pass in a function for validating the
// azp claim rather than introducing logic that affects all clients of this library.
// This naive implementation of the spec would almost certainly not be useful in practice:
/*
let azp_required = partially_verified_claims.audiences().len() > 1;
// 5. If an azp (authorized party) Claim is present, the Client SHOULD verify that its
// client_id is the Claim Value.
if let Some(authorized_party) = partially_verified_claims.authorized_party() {
if *authorized_party != self.client_id {
return Err(ClaimsVerificationError::InvalidAudience(format!(
"authorized party must match client ID `{}` (found `{}`",
*self.client_id, **authorized_party
)));
}
} else if azp_required {
return Err(ClaimsVerificationError::InvalidAudience(format!(
"missing authorized party claim but multiple audiences found"
)));
}
*/
// Steps 6--8 are handled by the generic JwtClaimsVerifier.
// 9. The current time MUST be before the time represented by the exp Claim.
let cur_time = (*self.time_fn)();
if cur_time >= partially_verified_claims.expiration() {
return Err(ClaimsVerificationError::Expired(format!(
"ID token expired at {} (current time is {})",
partially_verified_claims.expiration(),
cur_time
)));
}
// 10. The iat Claim can be used to reject tokens that were issued too far away from the
// current time, limiting the amount of time that nonces need to be stored to prevent
// attacks. The acceptable range is Client specific.
(*self.iat_verifier_fn)(partially_verified_claims.issue_time())
.map_err(ClaimsVerificationError::Expired)?;
// 11. If a nonce value was sent in the Authentication Request, a nonce Claim MUST be
// present and its value checked to verify that it is the same value as the one that was
// sent in the Authentication Request. The Client SHOULD check the nonce value for
// replay attacks. The precise method for detecting replay attacks is Client specific.
nonce_verifier
.verify(partially_verified_claims.nonce())
.map_err(ClaimsVerificationError::InvalidNonce)?;
// 12. If the acr Claim was requested, the Client SHOULD check that the asserted Claim Value
// is appropriate. The meaning and processing of acr Claim Values is out of scope for
// this specification.
(*self.acr_verifier_fn)(partially_verified_claims.auth_context_ref())
.map_err(ClaimsVerificationError::InvalidAuthContext)?;
// 13. If the auth_time Claim was requested, either through a specific request for this
// Claim or by using the max_age parameter, the Client SHOULD check the auth_time Claim
// value and request re-authentication if it determines too much time has elapsed since
// the last End-User authentication.
(*self.auth_time_verifier_fn)(partially_verified_claims.auth_time())
.map_err(ClaimsVerificationError::InvalidAuthTime)?;
Ok(())
}
}
///
/// User info verifier.
///
#[derive(Clone)]
pub struct UserInfoVerifier<'a, JE, JS, JT, JU, K>
where
JE: JweContentEncryptionAlgorithm<JT>,
JS: JwsSigningAlgorithm<JT>,
JT: JsonWebKeyType,
JU: JsonWebKeyUse,
K: JsonWebKey<JS, JT, JU>,
{
jwt_verifier: JwtClaimsVerifier<'a, JS, JT, JU, K>,
expected_subject: Option<SubjectIdentifier>,
_phantom: PhantomData<JE>,
}
impl<'a, JE, JS, JT, JU, K> UserInfoVerifier<'a, JE, JS, JT, JU, K>
where
JE: JweContentEncryptionAlgorithm<JT>,
JS: JwsSigningAlgorithm<JT>,
JT: JsonWebKeyType,
JU: JsonWebKeyUse,
K: JsonWebKey<JS, JT, JU>,
{
///
/// Instantiates a user info verifier.
///
pub fn new(
client_id: ClientId,
issuer: IssuerUrl,
signature_keys: JsonWebKeySet<JS, JT, JU, K>,
expected_subject: Option<SubjectIdentifier>,
) -> Self {
UserInfoVerifier {
jwt_verifier: JwtClaimsVerifier::new(client_id, issuer, signature_keys),
expected_subject,
_phantom: PhantomData,
}
}
pub(crate) fn expected_subject(&self) -> Option<&SubjectIdentifier> {
self.expected_subject.as_ref()
}
///
/// Specifies whether the issuer claim must match the expected issuer URL for the provider.
///
pub fn require_issuer_match(mut self, iss_required: bool) -> Self {
self.jwt_verifier = self.jwt_verifier.require_issuer_match(iss_required);
self
}
///
/// Specifies whether the audience claim must match this client's client ID.
///
pub fn require_audience_match(mut self, aud_required: bool) -> Self {
self.jwt_verifier = self.jwt_verifier.require_audience_match(aud_required);
self
}
pub(crate) fn verified_claims<AC, GC>(
&self,
user_info_jwt: JsonWebToken<
JE,
JS,
JT,
UserInfoClaimsImpl<AC, GC>,
JsonWebTokenJsonPayloadSerde,
>,
) -> Result<UserInfoClaimsImpl<AC, GC>, ClaimsVerificationError>
where
AC: AdditionalClaims,
GC: GenderClaim,
{
let user_info = self.jwt_verifier.verified_claims(user_info_jwt)?;
if self
.expected_subject
.iter()
.all(|expected_subject| user_info.standard_claims.sub == *expected_subject)
{
Ok(user_info)
} else {
Err(ClaimsVerificationError::InvalidSubject(format!(
"expected `{}` (found `{}`)",
// This can only happen when self.expected_subject is not None.
self.expected_subject.as_ref().unwrap().as_str(),
user_info.standard_claims.sub.as_str()
)))
}
}
}
#[cfg(test)]
mod tests {
use std::cell::Cell;
use chrono::{TimeZone, Utc};
use oauth2::{ClientId, ClientSecret};
use super::{
AudiencesClaim, ClaimsVerificationError, IssuerClaim, JsonWebTokenHeader,
JwtClaimsVerifier, SignatureVerificationError, SubjectIdentifier,
};
use crate::core::{
CoreIdToken, CoreIdTokenClaims, CoreIdTokenVerifier, CoreJsonWebKey, CoreJsonWebKeySet,
CoreJsonWebKeyType, CoreJsonWebKeyUse, CoreJweContentEncryptionAlgorithm,
CoreJwsSigningAlgorithm, CoreRsaPrivateSigningKey, CoreUserInfoClaims,
CoreUserInfoJsonWebToken, CoreUserInfoVerifier,
};
use crate::jwt::tests::{TEST_RSA_PRIV_KEY, TEST_RSA_PUB_KEY};
use crate::jwt::{JsonWebToken, JsonWebTokenJsonPayloadSerde};
use crate::types::helpers::seconds_to_utc;
use crate::types::Base64UrlEncodedBytes;
use crate::types::Seconds;
use crate::{
AccessToken, Audience, AuthenticationContextClass, AuthorizationCode, EndUserName,
IssuerUrl, JsonWebKeyId, Nonce, StandardClaims, UserInfoError,
};
type CoreJsonWebTokenHeader = JsonWebTokenHeader<
CoreJweContentEncryptionAlgorithm,
CoreJwsSigningAlgorithm,
CoreJsonWebKeyType,
>;
type CoreJwtClaimsVerifier<'a> = JwtClaimsVerifier<
'a,
CoreJwsSigningAlgorithm,
CoreJsonWebKeyType,
CoreJsonWebKeyUse,
CoreJsonWebKey,
>;
fn assert_unsupported<T>(result: Result<T, ClaimsVerificationError>, expected_substr: &str) {
match result {
Err(ClaimsVerificationError::Unsupported(msg)) => {
assert!(msg.contains(expected_substr))
}
Err(err) => panic!("unexpected error: {:?}", err),
Ok(_) => panic!("validation should fail"),
}
}
#[test]
fn test_jose_header() {
// Unexpected JWT type.
assert_unsupported(
CoreJwtClaimsVerifier::validate_jose_header(
&serde_json::from_str::<CoreJsonWebTokenHeader>(
"{\"alg\":\"RS256\",\"typ\":\"NOT_A_JWT\"}",
)
.expect("failed to deserialize"),
),
"unsupported JWT type",
);
// Nested JWTs.
assert_unsupported(
CoreJwtClaimsVerifier::validate_jose_header(
&serde_json::from_str::<CoreJsonWebTokenHeader>(
"{\"alg\":\"RS256\",\"cty\":\"JWT\"}",
)
.expect("failed to deserialize"),
),
"nested JWT",
);
assert_unsupported(
CoreJwtClaimsVerifier::validate_jose_header(
&serde_json::from_str::<CoreJsonWebTokenHeader>(
"{\"alg\":\"RS256\",\"cty\":\"NOT_A_JWT\"}",
)
.expect("failed to deserialize"),
),
"unsupported JWT content type",
);
// Critical fields. Adapted from https://tools.ietf.org/html/rfc7515#appendix-E
assert_unsupported(
CoreJwtClaimsVerifier::validate_jose_header(
&serde_json::from_str::<CoreJsonWebTokenHeader>(
"{\
\"alg\":\"RS256\",\
\"crit\":[\"http://example.invalid/UNDEFINED\"],\
\"http://example.invalid/UNDEFINED\":true\
}",
)
.expect("failed to deserialize"),
),
"critical JWT header fields are unsupported",
);
}
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
struct TestClaims {
aud: Option<Vec<Audience>>,
iss: Option<IssuerUrl>,
payload: String,
}
impl AudiencesClaim for TestClaims {
fn audiences(&self) -> Option<&Vec<Audience>> {
self.aud.as_ref()
}
}
impl IssuerClaim for TestClaims {
fn issuer(&self) -> Option<&IssuerUrl> {
self.iss.as_ref()
}
}
type TestClaimsJsonWebToken = JsonWebToken<
CoreJweContentEncryptionAlgorithm,
CoreJwsSigningAlgorithm,
CoreJsonWebKeyType,
TestClaims,
JsonWebTokenJsonPayloadSerde,
>;
#[test]
fn test_jwt_verified_claims() {
let rsa_key = serde_json::from_str::<CoreJsonWebKey>(TEST_RSA_PUB_KEY)
.expect("deserialization failed");
let client_id = ClientId::new("my_client".to_string());
let issuer = IssuerUrl::new("https://example.com".to_string()).unwrap();
let verifier = CoreJwtClaimsVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
);
// Invalid JOSE header.
assert_unsupported(
verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJBMjU2R0NNIiwiY3R5IjoiSldUIn0.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Im\
h0dHBzOi8vZXhhbXBsZS5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
),
"nested JWT",
);
// JWE-encrypted JWT.
assert_unsupported(
verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJBMjU2R0NNIn0.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbX\
BsZS5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
),
"JWE encryption",
);
// Wrong issuer.
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vYXR0YWNrZXIuY\
29tIiwicGF5bG9hZCI6ImhlbGxvIHdvcmxkIn0.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::InvalidIssuer(_)) => {},
other => panic!("unexpected result: {:?}", other),
}
// Missing issuer.
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sInBheWxvYWQiOiJoZWxsbyB3b3JsZCJ9.\
YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::InvalidIssuer(_)) => {},
other => panic!("unexpected result: {:?}", other),
}
// Ignore missing issuer.
verifier
.clone()
.require_issuer_match(false)
.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sInBheWxvYWQiOiJoZWxsbyB3b3JsZCJ9.\
nv09al63NNDfb8cF3IozegXKbPaUC08zknRPKmQ5qKgXv80hjVxknkpRz7BxocB3JYTBjhYd0gyN9wAuJj\
byZ1QaUC14HOB83awAGbehy5yFLkLadTfPT7-siBCvE2V7AF73a_21YvwdkKmJ-RaKWHzFnG8CDmioma3X\
cWyrsdRLgvUkrWllajLRo8DCIXQ8OuZo1_o4n17PSlPxSkhKIrgaWCvG6tan40Y_1DZOFv47bx4hQUGd-J\
h2aEjiwn65WV3M_Xb2vQMP7VgYNVaNlfxzpL4yDASItbPMWaXBt3ZUa_IOGoSx2GMnPkrQ4xp56qUth6U7\
esWPqRSqqolnHg"
.to_string(),
)).expect("failed to deserialize"),
).expect("verification should succeed");
// Wrong audience.
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsib3RoZXJfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::InvalidAudience(_)) => {},
other => panic!("unexpected result: {:?}", other),
}
// Missing audience.
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwicGF5bG9hZCI6ImhlbGxvI\
HdvcmxkIn0.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::InvalidAudience(_)) => {},
other => panic!("unexpected result: {:?}", other),
}
// Ignore missing audience.
verifier
.clone()
.require_audience_match(false)
.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwicGF5bG9hZCI6Imhlb\
GxvIHdvcmxkIn0.lP-Z_zGPNoKIbLQsnrZc2LAc5qJrKyb7t07ZtJUKVhcwHiCUou4bBhq5RHlElCh\
0ElRRP6I25lp6UszkRvIC46UV3GVze0x73kVkHSvCVI7MO75LbL9BRqrm5b4CN2zCiFBY8-EwTXnJd\
Ri0d_U8K29TV24L2I-Z5ZILebwUue1N59AGDjx2yYLFx5NOw3TUsPyscG62aZAT321pL_jcYwTWTWw\
2FYm07zguwx-PUTZwGXlJiOgXQqRIbY_1bS3I_D8UWsmEB3DmV0f9z-iklgIPFawa4wHaE-hpzBAEx\
pSieyOavA5pl0Se3XRYA-CkdDVgzG0Pt4IdnxFanfUXTw"
.to_string(),
)).expect("failed to deserialize"),
).expect("verification should succeed");
// Multiple audiences, where one is a match (default = reject)
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsiYXVkMSIsIm15X2NsaWVudCIsImF1ZDIiXSwiaXNzIjoia\
HR0cHM6Ly9leGFtcGxlLmNvbSIsInBheWxvYWQiOiJoZWxsbyB3b3JsZCJ9.N9ibisEe0kKLe1GDWM\
ON3PmYqbL73dag-loM8pjKJNinF9SB7n4JuSu4FrNkeW4F1Cz8MIbLuWfKvDa_4v_3FstMA3GODZWH\
BVIiuNFay2ovCfGFyykwe47dF_47g_OM5AkJc_teE5MN8lPh9V5zYCy3ON3zZ3acFPJMOPTdbU56xD\
eFe7lil6DmV4JU9A52t5ZkJILFaIuxxXJUIDmqpPTvHkggh_QOj9C2US9bgg5b543JwT4j-HbDp51L\
dDB4k3azOssT1ddtoAuuDOctnraMKUtqffJXexxfwA1uM6EIofSrK5v11xwgTciL9xDXAvav_G2buP\
ol1bjGLa2t0Q"
.to_string(),
))
.expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::InvalidAudience(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Multiple audiences, where one is a match (allowed)
verifier
.clone()
.set_other_audience_verifier_fn(|aud| **aud == "aud1" || **aud == "aud2")
.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsiYXVkMSIsIm15X2NsaWVudCIsImF1ZDIiXSwiaXNzIjoia\
HR0cHM6Ly9leGFtcGxlLmNvbSIsInBheWxvYWQiOiJoZWxsbyB3b3JsZCJ9.N9ibisEe0kKLe1GDWM\
ON3PmYqbL73dag-loM8pjKJNinF9SB7n4JuSu4FrNkeW4F1Cz8MIbLuWfKvDa_4v_3FstMA3GODZWH\
BVIiuNFay2ovCfGFyykwe47dF_47g_OM5AkJc_teE5MN8lPh9V5zYCy3ON3zZ3acFPJMOPTdbU56xD\
eFe7lil6DmV4JU9A52t5ZkJILFaIuxxXJUIDmqpPTvHkggh_QOj9C2US9bgg5b543JwT4j-HbDp51L\
dDB4k3azOssT1ddtoAuuDOctnraMKUtqffJXexxfwA1uM6EIofSrK5v11xwgTciL9xDXAvav_G2buP\
ol1bjGLa2t0Q"
.to_string(),
)).expect("failed to deserialize"),
).expect("verification should succeed");
// Multiple audiences, where none is a match
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsiYXVkMSIsImF1ZDIiXSwiaXNzIjoiaHR0cHM6Ly9leGFtcGxlL\
mNvbSIsInBheWxvYWQiOiJoZWxsbyB3b3JsZCJ9.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::InvalidAudience(_)) => {},
other => panic!("unexpected result: {:?}", other),
}
// Disable signature check.
verifier
.clone()
.require_signature_check(false)
.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
).expect("verification should succeed");
// "none" algorithm (unsigned JWT).
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJub25lIn0.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ."
.to_string(),
))
.expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::NoSignature) => {}
other => panic!("unexpected result: {:?}", other),
}
let valid_rs256_jwt =
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.UZ7vmAsDmOBzeB6e2_0POUfyhMRZKM6WSKz3\
jB2QdmO-eZ9605EzhkJufJQ8515ryWnHv-gUHtZHQi3zilrzhBwvE2cVP83Gv2XIL1EKaMMmfISeEB\
ShWez_FvqxN_bamh5yTROhWmoZTmof-MweBCHgINcsEd7K4e_BHHgq3aaRBpvSFlL_z4l_1NwNcTBo\
kqjNScKZITk42AbsSuGR39L94BWLhz6WXQZ_Sn6R1Ro6roOm1b7E82jJiQEtlseQiCCvPR2JJ6LgW6\
XTMzQ0vCqSh1A7U_IBDsjY_yag8_X3xxFh2URCtHJ47ZSjqfv6hq7OAq8tmVecOVgfIvABOg"
.to_string(),
))
.expect("failed to deserialize");
// Default algs + RS256 -> allowed
verifier
.verified_claims(valid_rs256_jwt.clone())
.expect("verification should succeed");
let verifier_with_client_secret = CoreJwtClaimsVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![]),
)
.set_client_secret(ClientSecret::new("my_secret".to_string()));
let valid_hs256_jwt =
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.dTXvSWen74_rC4oiWw0ziLZNe4KZk8Jw2VZe\
N6vLCDo"
.to_string(),
))
.expect("failed to deserialize");
// Default algs + HS256 -> disallowed
match verifier_with_client_secret.verified_claims(valid_hs256_jwt.clone()) {
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::DisallowedAlg(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// none algs + RS256 -> allowed
verifier
.clone()
.allow_any_alg()
.verified_claims(valid_rs256_jwt.clone())
.expect("verification should succeed");
// none algs + HS256 -> allowed
verifier_with_client_secret
.clone()
.allow_any_alg()
.verified_claims(valid_hs256_jwt.clone())
.expect("verification should succeed");
// none algs + none -> disallowed
match verifier.clone().allow_any_alg().verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJub25lIn0.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ."
.to_string(),
))
.expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::NoSignature) => {}
other => panic!("unexpected result: {:?}", other),
}
// HS256 + no client secret -> disallowed
match verifier
.clone()
.allow_any_alg()
.verified_claims(valid_hs256_jwt.clone())
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::DisallowedAlg(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// HS256 + valid signature
verifier_with_client_secret
.clone()
.set_allowed_algs(vec![CoreJwsSigningAlgorithm::HmacSha256])
.verified_claims(valid_hs256_jwt)
.expect("verification should succeed");
// HS256 + invalid signature
match verifier_with_client_secret
.clone()
.set_allowed_algs(vec![CoreJwsSigningAlgorithm::HmacSha256])
.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.dTXvSWen74_rC4oiWw0ziLZNe4KZk8Jw2VZe\
N6vLCEo"
.to_string(),
)).expect("failed to deserialize")
)
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::CryptoError(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// No public keys
match CoreJwtClaimsVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![]),
)
.verified_claims(valid_rs256_jwt.clone())
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::NoMatchingKey,
)) => {}
other => panic!("unexpected result: {:?}", other),
}
let kid = JsonWebKeyId::new("[email protected]".to_string());
let n = Base64UrlEncodedBytes::new(vec![
159, 129, 15, 180, 3, 130, 115, 208, 37, 145, 228, 7, 63, 49, 210, 182, 0, 27, 130,
206, 219, 77, 146, 240, 80, 22, 93, 71, 207, 202, 184, 163, 196, 28, 183, 120, 172,
117, 83, 121, 63, 142, 249, 117, 118, 141, 26, 35, 116, 216, 113, 37, 100, 195, 188,
215, 123, 158, 164, 52, 84, 72, 153, 64, 124, 255, 0, 153, 146, 10, 147, 26, 36, 196,
65, 72, 82, 171, 41, 189, 176, 169, 92, 6, 83, 243, 108, 96, 230, 11, 249, 11, 98, 88,
221, 165, 111, 55, 4, 123, 165, 194, 209, 208, 41, 175, 156, 157, 64, 186, 199, 170,
65, 199, 138, 13, 209, 6, 138, 221, 105, 158, 128, 143, 234, 1, 30, 161, 68, 29, 138,
79, 123, 180, 233, 123, 227, 159, 85, 241, 221, 212, 78, 156, 75, 163, 53, 21, 151, 3,
212, 211, 75, 96, 62, 101, 20, 122, 79, 35, 214, 211, 192, 153, 108, 117, 237, 238,
132, 106, 130, 209, 144, 174, 16, 120, 60, 150, 28, 240, 56, 122, 237, 33, 6, 210, 208,
85, 91, 111, 217, 55, 250, 213, 83, 83, 135, 224, 255, 114, 255, 190, 120, 148, 20, 2,
176, 184, 34, 234, 42, 116, 182, 5, 140, 29, 171, 249, 179, 74, 118, 203, 99, 184, 127,
170, 44, 104, 71, 184, 226, 131, 127, 255, 145, 24, 110, 107, 28, 20, 145, 28, 249,
137, 168, 144, 146, 168, 28, 230, 1, 221, 172, 211, 249, 207,
]);
let e = Base64UrlEncodedBytes::new(vec![1, 0, 1]);
// Wrong key type (symmetric key)
match CoreJwtClaimsVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![CoreJsonWebKey {
kty: CoreJsonWebKeyType::Symmetric,
use_: Some(CoreJsonWebKeyUse::Signature),
kid: Some(kid.clone()),
n: None,
e: None,
k: Some(Base64UrlEncodedBytes::new(vec![1, 2, 3, 4])),
}]),
)
.verified_claims(valid_rs256_jwt.clone())
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::NoMatchingKey,
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Correct public key, but with signing disallowed
match CoreJwtClaimsVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![CoreJsonWebKey {
kty: CoreJsonWebKeyType::RSA,
use_: Some(CoreJsonWebKeyUse::Encryption),
kid: Some(kid),
n: Some(n),
e: Some(e),
k: None,
}]),
)
.verified_claims(valid_rs256_jwt.clone())
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::NoMatchingKey,
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Wrong key ID
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiIsImtpZCI6Indyb25nX2tleSJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6I\
mh0dHBzOi8vZXhhbXBsZS5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.lVLomyIyO8WmyS1VZWPu\
cGhRTUyK9RCw90fJC5CfDWUCgt1CBn-aP_ieWWBGfjb4ccR4dl57OYxdLl0Day8QN5pTCBud9QKpQ0rKQX\
K8eBlOW8uSosx8q5pwU_bRyy-XuKJiPlDCOwTEHOp_hOgZFGjoN27MH3Xm8kc0iT3PgyqQ46-wsqHY9S02\
hdJORX7vqYwQLZF8_k_L8K0IG_dC-1Co0g5oAf37oVSdl8hE-ScQ9K-AiSpS-cGYyldbMhyKNDL3ry2cuI\
EUgYSIznkVFuM7RrEdNK222z5PF11ijYx-TM7BIDggbcIyJm-UqpmvVaJImmj5FNkMzuHYznLtdg"
.to_string(),
)).expect("failed to deserialize")
) {
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::NoMatchingKey,
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Client secret + public key
verifier
.clone()
.set_client_secret(ClientSecret::new("my_secret".to_string()))
.verified_claims(valid_rs256_jwt.clone())
.expect("verification should succeed");
// Multiple matching public keys: no KID specified
match CoreJwtClaimsVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![rsa_key.clone(), rsa_key.clone()]),
)
.verified_claims(valid_rs256_jwt.clone())
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::AmbiguousKeyId(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Multiple matching public keys: KID specified
match CoreJwtClaimsVerifier::new(
client_id,
issuer,
CoreJsonWebKeySet::new(vec![rsa_key.clone(), rsa_key]),
).verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiIsImtpZCI6ImJpbGJvLmJhZ2dpbnNAaG9iYml0b24uZXhhbXBsZSJ9.eyJhdWQiO\
lsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZS5jb20iLCJwYXlsb2FkIjoiaGVsbG8gd29\
ybGQifQ.jH0v2fQGvH2MD0jn5pQP6W6AF5rJlizyofdyRUIt7E3GraGA1LYDiLAVIfhST3uwJopP-TgtBk\
zc-zyJSvgTR63S8iI1YlHypItpx7r4I9ydzo8GSN5RrZudcU2esY4uEnLbVl17ZVNu4IyTExeKJ0sPM0Hj\
qkOA4XaP2cJwsK-bookNHSA8NRE6adRMrHAKJbor5jrGjpkZAKHbnQFK-wu-nEV_OjS9jpN_FboRZVcDTZ\
GFzeFbqFqHdRn6UWPFnVpVnUhih16UjNH1om6gwc0uFoPWTDxJlXQCFbHMhZtgCbUkXQBH7twPMc4YUziw\
S8GIRKCcXjdrP5oyxmcitQ"
.to_string(),
)).expect("failed to deserialize")
) {
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::AmbiguousKeyId(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// RS256 + valid signature
verifier
.verified_claims(valid_rs256_jwt)
.expect("verification should succeed");
// RS256 + invalid signature
match verifier.verified_claims(
serde_json::from_value::<TestClaimsJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZS5jb\
20iLCJwYXlsb2FkIjoiaGVsbG8gd29ybGQifQ.YmFkX2hhc2g"
.to_string(),
)).expect("failed to deserialize"),
) {
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::CryptoError(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
}
type CoreIdTokenJwt = JsonWebToken<
CoreJweContentEncryptionAlgorithm,
CoreJwsSigningAlgorithm,
CoreJsonWebKeyType,
CoreIdTokenClaims,
JsonWebTokenJsonPayloadSerde,
>;
#[test]
fn test_id_token_verified_claims() {
let rsa_key = serde_json::from_str::<CoreJsonWebKey>(TEST_RSA_PUB_KEY)
.expect("deserialization failed");
let client_id = ClientId::new("my_client".to_string());
let issuer = IssuerUrl::new("https://example.com".to_string()).unwrap();
let mock_current_time = Cell::new(1544932149);
let mock_is_valid_issue_time = Cell::new(true);
// Extra scope needed to ensure closures are destroyed before the values they borrow.
{
let public_client_verifier = CoreIdTokenVerifier::new_public_client(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
)
.set_time_fn(|| seconds_to_utc(&Seconds::new(mock_current_time.get().into())).unwrap())
.set_issue_time_verifier_fn(|_| {
if mock_is_valid_issue_time.get() {
Ok(())
} else {
Err("Invalid iat claim".to_string())
}
});
// This JWTs below have an issue time of 1544928549 and an expiration time of 1544932149.
let test_jwt_without_nonce =
serde_json::from_value::<CoreIdTokenJwt>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJzdWIiOiJzdWJqZWN0IiwiZXhwIjoxNTQ0OTMyMTQ5LCJpYXQiOjE1NDQ5Mjg1NDl9.nN\
aTxNwclnTHd1Q9POkddm5wB1w3wJ-gwQWHomhimttk3SWQTLhxI0SSjWrHahGxlfkjufJlSyt-t_VO\
SdcROvIYZTDznDfFZz3oSOev-p9XiZ-EZTS-U6N11Y923sDQjbTMeukz1F3ZFEfn5Mv2xjdEoJccCe\
7SaGuDmVqMqTLXMtsw9NCE_KDd0oKSwDzbJIBBPEfG3JjbKg0Dln7ENHg9wzoNFQzPXrkKzjneBgD3\
vuwFCV5y-e8xUBdLaLZF1kdkDZJIA48uRROLlWjsM8pEptosA5QK07luQCZNqcaZWEczoGXeQs8PyA\
zkNV7JEmti3bJnWSN-ud4cFU0LiQ"
.to_string(),
))
.expect("failed to deserialize");
// Invalid JWT claims
match public_client_verifier.verified_claims(
&serde_json::from_value::<CoreIdTokenJwt>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vYXR0YWNrZ\
XIuY29tIiwic3ViIjoic3ViamVjdCIsImV4cCI6MTU0NDkzMjE0OSwiaWF0IjoxNTQ0OTI4NTQ5LCJ\
ub25jZSI6InRoZV9ub25jZSIsImFjciI6InRoZV9hY3IifQ.Pkicxk0dTU5BkSxgqTON6lE7A7ir3l\
aADRyoeRoCNDX3AOx7BXCbfzbda6HJiPskN2nu56w0q-0OdkDSIHls-2xTUlLEJv2Bv0BLYwV5ZVJ8\
hoc-rTd0_oLUb5NzyD80RyVByjVMK8bh6cwysTnr8QDxsEiFZbFo3mVJob2yjPZnNOdcNJWPcVVueP\
8vqMJnx5kHih1gKZpWj_dMN9b2AW6zVLOInW3Ox__gx6fsFFz7rjxItG-PTY_OQMzthqeHUyq4o9y7\
Jv8mB_jFkTZGVKHTPpObHV-qptJ_rnlwvF_mP5GARBLng-4Yd7nmSr31onYL48QDjGOrwPqQ-IyaCQ"
.to_string(),
))
.expect("failed to deserialize"), |_: Option<&Nonce>| Ok(())) {
Err(ClaimsVerificationError::InvalidIssuer(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// TODO: disallowed algs
// Expired token
mock_current_time.set(1544928549 + 3600);
match public_client_verifier
.verified_claims(&test_jwt_without_nonce, |_: Option<&Nonce>| Ok(()))
{
Err(ClaimsVerificationError::Expired(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
mock_current_time.set(1544928549 + 1);
// Invalid issue time
mock_is_valid_issue_time.set(false);
match public_client_verifier
.verified_claims(&test_jwt_without_nonce, |_: Option<&Nonce>| Ok(()))
{
Err(ClaimsVerificationError::Expired(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
mock_is_valid_issue_time.set(true);
let valid_nonce = Nonce::new("the_nonce".to_string());
// Successful verification w/o checking nonce
public_client_verifier
.verified_claims(&test_jwt_without_nonce, |_: Option<&Nonce>| Ok(()))
.expect("verification should succeed");
// Missing nonce
match public_client_verifier.verified_claims(&test_jwt_without_nonce, &valid_nonce) {
Err(ClaimsVerificationError::InvalidNonce(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Missing nonce w/ closure
match public_client_verifier.verified_claims(
&test_jwt_without_nonce,
|nonce: Option<&Nonce>| {
if nonce.iter().any(|n| n.secret() == valid_nonce.secret()) {
Ok(())
} else {
Err("invalid nonce".to_string())
}
},
) {
Err(ClaimsVerificationError::InvalidNonce(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
let test_jwt_with_nonce =
serde_json::from_value::<CoreIdTokenJwt>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJzdWIiOiJzdWJqZWN0IiwiZXhwIjoxNTQ0OTMyMTQ5LCJpYXQiOjE1NDQ5Mjg1NDksIm5\
vbmNlIjoidGhlX25vbmNlIiwiYWNyIjoidGhlX2FjciIsImF1dGhfdGltZSI6MTU0NDkyODU0OH0.W\
XA7SS9aMh_6rvBEgQce5D2J84OqphmmnCLGgEKRTN5G-UuQTNOBp8VS5_4f3xgzMEEMvGJJauJoALk\
muUeHB-N_ESrkmB3tgDzBSYBa7kuYPHUPYpdjZM2UVolqI9RYyHaWwKjL_Io5YyAazB5lH5ibPaiBl\
UNKGs3cmVsEB22UGMFKM6cek7GinrHQe_aJQsMU839-c2zzlEyFSeI8QBphQtG6AN82IPkNRv8QWmw\
ZjUiB5a-W73Z3gURYMNs7f32BjAUNoJzW0Qj34vzD2djoSHhltE0wHKBzPqGhUM1Y3A-a3q-LS2g1h\
6qgXb_KQ_Mmok8v8ld0cW_aYRLfNg"
.to_string(),
))
.expect("failed to deserialize");
// Invalid nonce
match public_client_verifier.verified_claims(
&test_jwt_with_nonce,
&Nonce::new("different_nonce".to_string()),
) {
Err(ClaimsVerificationError::InvalidNonce(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Invalid AuthenticationContextClass reference
match public_client_verifier
.clone()
.set_auth_context_verifier_fn(|acr| {
assert_eq!(**acr.unwrap(), "the_acr");
Err("Invalid acr claim".to_string())
})
.verified_claims(&test_jwt_with_nonce, &valid_nonce)
{
Err(ClaimsVerificationError::InvalidAuthContext(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
let test_jwt_without_auth_time =
serde_json::from_value::<CoreIdTokenJwt>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJzdWIiOiJzdWJqZWN0IiwiZXhwIjoxNTQ0OTMyMTQ5LCJpYXQiOjE1NDQ5Mjg1NDksIm5\
vbmNlIjoidGhlX25vbmNlIiwiYWNyIjoidGhlX2FjciJ9.c_lU1VRasTg0mB4lwdOzbzvFS_XShMLN\
lAPUpHBaMtCSPtI71L2x3hIByfkqIrAED-Qc_am2gNJ20bifidlkTOO6nyaBrJuaSjwT8aqajEbXon\
5JFswwPvqCIWjd0eV5dXC1MZunpd7ANXSC7Qw16v3m_crc9wcI_fLFCzuAKrWYokGvNy0gr1CxcgVg\
aE9qR0eqaatetzCuaOJhYOq4njrRlGZWtbj5Q56q3zhxJ_yS8K8gv1QcB4sHjUyXIj21jzjUD87zVG\
dJsn8E-nFJSltBdQhEaLksTBH6ZZhkeGicQ8cEPnNeS4L1vfVyAd_cjl64JHLmzw8RUp8XuoF9nA"
.to_string(),
))
.expect("failed to deserialize");
// Missing auth_time (ok)
public_client_verifier
.verified_claims(&test_jwt_without_auth_time, |_: Option<&Nonce>| Ok(()))
.expect("verification should succeed");
// Missing auth_time (error)
match public_client_verifier
.clone()
.set_auth_time_verifier_fn(|auth_time| {
assert!(auth_time.is_none());
Err("Invalid auth_time claim".to_string())
})
.verified_claims(&test_jwt_without_auth_time, |_: Option<&Nonce>| Ok(()))
{
Err(ClaimsVerificationError::InvalidAuthTime(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Invalid auth_time
match public_client_verifier
.clone()
.set_auth_time_verifier_fn(|auth_time| {
assert_eq!(
auth_time.unwrap(),
seconds_to_utc(&Seconds::new(1544928548.into())).unwrap(),
);
Err("Invalid auth_time claim".to_string())
})
.verified_claims(&test_jwt_with_nonce, &valid_nonce)
{
Err(ClaimsVerificationError::InvalidAuthTime(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Successful verification with nonce, acr, and auth_time specified (no expected Nonce)
public_client_verifier
.verified_claims(&test_jwt_with_nonce, |_: Option<&Nonce>| Ok(()))
.expect("verification should succeed");
// Successful verification with nonce, acr, and auth_time specified (w/ expected Nonce)
public_client_verifier
.verified_claims(&test_jwt_with_nonce, &valid_nonce)
.expect("verification should succeed");
// Successful verification with nonce, acr, and auth_time specified (w/ closure)
public_client_verifier
.verified_claims(&test_jwt_with_nonce, |nonce: Option<&Nonce>| {
if nonce.iter().any(|n| n.secret() == valid_nonce.secret()) {
Ok(())
} else {
Err("invalid nonce".to_string())
}
})
.expect("verification should succeed");
// HS256 w/ default algs
let test_jwt_hs256 =
serde_json::from_value::<CoreIdTokenJwt>(serde_json::Value::String(
"eyJhbGciOiJIUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhbXBsZ\
S5jb20iLCJzdWIiOiJzdWJqZWN0IiwiZXhwIjoxNTQ0OTMyMTQ5LCJpYXQiOjE1NDQ5Mjg1NDksIm5\
vbmNlIjoidGhlX25vbmNlIn0.xUnSwSbcHsHWyJxwKGg69BIo_CktcyN5BVulGDb_QzE"
.to_string(),
))
.expect("failed to deserialize");
let private_client_verifier = CoreIdTokenVerifier::new_confidential_client(
client_id.clone(),
ClientSecret::new("my_secret".to_string()),
issuer.clone(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
)
.set_time_fn(|| seconds_to_utc(&Seconds::new(mock_current_time.get().into())).unwrap());
match private_client_verifier.verified_claims(&test_jwt_hs256, &valid_nonce) {
Err(ClaimsVerificationError::SignatureVerification(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// HS256 w/ set_allowed_algs
private_client_verifier
.clone()
.set_allowed_algs(vec![CoreJwsSigningAlgorithm::HmacSha256])
.verified_claims(&test_jwt_hs256, &valid_nonce)
.expect("verification should succeed");
// HS256 w/ allow_any_alg
private_client_verifier
.clone()
.allow_any_alg()
.verified_claims(&test_jwt_hs256, &valid_nonce)
.expect("verification should succeed");
// Invalid signature
let private_client_verifier_with_other_secret =
CoreIdTokenVerifier::new_confidential_client(
client_id,
ClientSecret::new("other_secret".to_string()),
issuer,
CoreJsonWebKeySet::new(vec![rsa_key]),
)
.allow_any_alg()
.set_time_fn(|| {
seconds_to_utc(&Seconds::new(mock_current_time.get().into())).unwrap()
});
match private_client_verifier_with_other_secret
.verified_claims(&test_jwt_hs256, &valid_nonce)
{
Err(ClaimsVerificationError::SignatureVerification(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// Invalid signature w/ signature check disabled
private_client_verifier_with_other_secret
.clone()
.insecure_disable_signature_check()
.verified_claims(&test_jwt_hs256, &valid_nonce)
.expect("verification should succeed");
};
}
#[test]
fn test_new_id_token() {
let client_id = ClientId::new("my_client".to_string());
let issuer = IssuerUrl::new("https://example.com".to_string()).unwrap();
let nonce = Nonce::new("the_nonce".to_string());
let rsa_priv_key = CoreRsaPrivateSigningKey::from_pem(TEST_RSA_PRIV_KEY, None).unwrap();
let id_token = CoreIdToken::new(
CoreIdTokenClaims::new(
issuer.clone(),
vec![Audience::new((*client_id).clone())],
Utc.timestamp(1544932149, 0),
Utc.timestamp(1544928549, 0),
StandardClaims::new(SubjectIdentifier::new("subject".to_string())),
Default::default(),
)
.set_nonce(Some(nonce.clone()))
.set_auth_context_ref(Some(AuthenticationContextClass::new("the_acr".to_string())))
.set_auth_time(Some(Utc.timestamp(1544928548, 0))),
&rsa_priv_key,
CoreJwsSigningAlgorithm::RsaSsaPkcs1V15Sha256,
Some(&AccessToken::new("the_access_token".to_string())),
Some(&AuthorizationCode::new(
"the_authorization_code".to_string(),
)),
)
.unwrap();
let serialized_jwt: serde_json::Value = serde_json::to_value(&id_token).unwrap();
let expected_serialized_jwt =
"eyJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwiYXVkIjpbIm15X2NsaWVudCJdL\
CJleHAiOjE1NDQ5MzIxNDksImlhdCI6MTU0NDkyODU0OSwiYXV0aF90aW1lIjoxNTQ0OTI4NTQ4LCJub25jZSI\
6InRoZV9ub25jZSIsImFjciI6InRoZV9hY3IiLCJhdF9oYXNoIjoiWjNJQUNVR00tbXhIV3lZUXZpSzhFUSIsI\
mNfaGFzaCI6Imo2OW1CZmFIbmRMM1Y1RmNoak9LVXciLCJzdWIiOiJzdWJqZWN0In0.CHCWFcIqbCZhZwZH4oY\
_mlcRy5aUQQtlNI0VHNYxiILn9ppRHLL4Bn_LMn9VP8tGXkfZWxCgP25ZTyBXXKfk0fQvnukVdyM0bCOpQbiBg\
5gB9c46l_f-ZznDoHWonpnKky2Gmzk3ocb3TCUQ9GSeRXAzRdRNWTT0ElWNBsLWU4j2IIdnghM78gkXwOC76Rk\
pshgB73ubtuHGdIf5L9Ec3hifHlVjzKuvedAM4SIOjdBOelgtBlF3463ufX_Ut91CjP5TzLMsuK3Lh_vyo8ttn\
S41rBDuetR2ENvR0yj5RjkX_SPY3V0yCW8_NPPu1CHu_1oL0Nma0ohCbF3vnUJcwg";
assert_eq!(expected_serialized_jwt, serialized_jwt.as_str().unwrap());
let rsa_pub_key = serde_json::from_str::<CoreJsonWebKey>(TEST_RSA_PUB_KEY)
.expect("deserialization failed");
let mock_current_time = Cell::new(1544932148);
let verifier = CoreIdTokenVerifier::new_public_client(
client_id,
issuer,
CoreJsonWebKeySet::new(vec![rsa_pub_key]),
)
.set_time_fn(|| seconds_to_utc(&Seconds::new(mock_current_time.get().into())).unwrap());
id_token.claims(&verifier, &nonce).unwrap();
}
#[test]
fn test_user_info_verified_claims() {
let rsa_key = serde_json::from_str::<CoreJsonWebKey>(TEST_RSA_PUB_KEY)
.expect("deserialization failed");
let client_id = ClientId::new("my_client".to_string());
let issuer = IssuerUrl::new("https://example.com".to_string()).unwrap();
let sub = SubjectIdentifier::new("the_subject".to_string());
let verifier = CoreUserInfoVerifier::new(
client_id.clone(),
issuer.clone(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
Some(sub.clone()),
);
let json_claims = "{\
\"sub\": \"the_subject\",\
\"name\": \"Jane Doe\"\
}";
// JSON response (default args)
assert_eq!(
CoreUserInfoClaims::from_json::<crate::reqwest::HttpClientError>(
json_claims.as_bytes(),
Some(&sub)
)
.expect("verification should succeed")
.name()
.unwrap()
.iter()
.collect::<Vec<_>>(),
vec![(None, &EndUserName::new("Jane Doe".to_string()))],
);
// Invalid subject
match CoreUserInfoClaims::from_json::<crate::reqwest::HttpClientError>(
json_claims.as_bytes(),
Some(&SubjectIdentifier::new("wrong_subject".to_string())),
) {
Err(UserInfoError::ClaimsVerification(ClaimsVerificationError::InvalidSubject(_))) => {}
other => panic!("unexpected result: {:?}", other),
}
let jwt_claims =
serde_json::from_value::<CoreUserInfoJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhb\
XBsZS5jb20iLCJzdWIiOiJ0aGVfc3ViamVjdCIsIm5hbWUiOiJKYW5lIERvZSJ9.aX7VpexLAd\
43HtC1cFTot3jmqsr105rB50mzTcS1TXzWcxLbqYf1K7Kf-S1oP-ZCL_dnL9-nu3iDK_vRa6xT\
nGGt3I1JwhoIv6znSS3JOPT1wtekyD-sLcUwqsJHWBBiTSBwlmGG_kVRuGkBtXgVZ9aGlqg9u1\
FlxvyGUJ5q1o9gdb8mKql5ojgsThTNo9qdW3lPIVsiDO-n4mMp4HuOp1re4ZDDkHxiExjtLQAV\
kR4q3SlhJC2mkr4mw3_0a2AW52ocWDiwY_lPcdmohmwFaB8aHlivYLFnmKGQIatEW-KDaW5fFo\
JYreNkplo4FvzXYyxgxAsqHjHMI8MZVEa1IA"
.to_string(),
))
.expect("failed to deserialize");
// Valid JWT response (default args)
jwt_claims
.clone()
.claims(&verifier)
.expect("verification should succeed");
// JWT response with invalid signature
match serde_json::from_value::<CoreUserInfoJsonWebToken>(serde_json::Value::String(
"eyJhbGciOiJSUzI1NiJ9.eyJhdWQiOlsibXlfY2xpZW50Il0sImlzcyI6Imh0dHBzOi8vZXhhb\
XBsZS5jb20iLCJzdWIiOiJ0aGVfc3ViamVjdCIsIm5hbWUiOiJKYW5lIERvZSJ9.bX7VpexLAd\
43HtC1cFTot3jmqsr105rB50mzTcS1TXzWcxLbqYf1K7Kf-S1oP-ZCL_dnL9-nu3iDK_vRa6xT\
nGGt3I1JwhoIv6znSS3JOPT1wtekyD-sLcUwqsJHWBBiTSBwlmGG_kVRuGkBtXgVZ9aGlqg9u1\
FlxvyGUJ5q1o9gdb8mKql5ojgsThTNo9qdW3lPIVsiDO-n4mMp4HuOp1re4ZDDkHxiExjtLQAV\
kR4q3SlhJC2mkr4mw3_0a2AW52ocWDiwY_lPcdmohmwFaB8aHlivYLFnmKGQIatEW-KDaW5fFo\
JYreNkplo4FvzXYyxgxAsqHjHMI8MZVEa1IA"
.to_string(),
))
.expect("failed to deserialize")
.claims(&verifier)
{
Err(ClaimsVerificationError::SignatureVerification(
SignatureVerificationError::CryptoError(_),
)) => {}
other => panic!("unexpected result: {:?}", other),
}
// JWT response with invalid issuer claim (error)
match jwt_claims.clone().claims(&CoreUserInfoVerifier::new(
client_id.clone(),
IssuerUrl::new("https://attacker.com".to_string()).unwrap(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
Some(sub.clone()),
)) {
Err(ClaimsVerificationError::InvalidIssuer(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// JWT response with invalid issuer claim (allowed)
jwt_claims
.clone()
.claims(
&CoreUserInfoVerifier::new(
client_id,
IssuerUrl::new("https://attacker.com".to_string()).unwrap(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
Some(sub.clone()),
)
.require_issuer_match(false),
)
.expect("verification should succeed");
// JWT response with invalid audience claim (error)
match jwt_claims.clone().claims(&CoreUserInfoVerifier::new(
ClientId::new("wrong_client".to_string()),
issuer.clone(),
CoreJsonWebKeySet::new(vec![rsa_key.clone()]),
Some(sub.clone()),
)) {
Err(ClaimsVerificationError::InvalidAudience(_)) => {}
other => panic!("unexpected result: {:?}", other),
}
// JWT response with invalid audience claim (allowed)
jwt_claims
.claims(
&CoreUserInfoVerifier::new(
ClientId::new("wrong_client".to_string()),
issuer,
CoreJsonWebKeySet::new(vec![rsa_key]),
Some(sub),
)
.require_audience_match(false),
)
.expect("verification should succeed");
}
#[test]
fn test_new_user_info_claims() {
let claims = CoreUserInfoClaims::new(
StandardClaims {
sub: SubjectIdentifier::new("the_subject".to_string()),
name: Some(EndUserName::new("John Doe".to_string()).into()),
given_name: None,
family_name: None,
middle_name: None,
nickname: None,
preferred_username: None,
profile: None,
picture: None,
website: None,
email: None,
email_verified: None,
gender: None,
birthday: None,
zoneinfo: None,
locale: None,
phone_number: None,
phone_number_verified: None,
address: None,
updated_at: Some(Utc.timestamp(1544928548, 0)),
},
Default::default(),
);
assert_eq!(
"{\"sub\":\"the_subject\",\"name\":\"John Doe\",\"updated_at\":1544928548}",
serde_json::to_string(&claims).unwrap()
);
let rsa_priv_key = CoreRsaPrivateSigningKey::from_pem(TEST_RSA_PRIV_KEY, None).unwrap();
let claims_jwt = CoreUserInfoJsonWebToken::new(
claims,
&rsa_priv_key,
CoreJwsSigningAlgorithm::RsaSsaPkcs1V15Sha256,
)
.unwrap();
assert_eq!(
"eyJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJ0aGVfc3ViamVjdCIsIm5hbWUiOiJKb2huIERvZSIsInVwZGF0ZWRfY\
XQiOjE1NDQ5Mjg1NDh9.nJ7Buckt_p_ACXkyVRCQLqyaW8KhDsk5H9Nu7PdNf4daEcEWm-lGjoSTAfAbDPgHAZ\
78knomgLgDxiGWrj1qdFTIEFep32I3q18VBP_DcMdyuQafipK6T98RgZFWP8YnxlxLPHeJQlRsdMpemHK4vxas\
ZD4A4aIn0K7z5J9RvrR3L7DWnc3fJQ0VU2v5QLePyqNWnFxks5eyl8Ios8JrZhwr4Q8GES8Q4Iw8Sz6W9vYpHK\
2r1YdaACMM4g_TTtV91lpjn-Li2-HxW9NERdLvYvF6HwGIwbss26trp2yjNTARlxBUT6LR7y82oPIJKXIKL1GD\
YeSLeErhb6oTQ0a5gQ",
serde_json::to_value(&claims_jwt).unwrap().as_str().unwrap()
);
}
}
| 42.989093 | 104 | 0.596155 |
8f49242d925e6fa71688a852723a93ed26db9045
| 1,769 |
mod diff;
mod snippet;
use crate::snippet::SnippetDef;
use annotate_snippets::display_list::DisplayList;
use annotate_snippets::formatter::DisplayListFormatter;
use annotate_snippets::snippet::Snippet;
use glob::glob;
use serde::Deserialize;
use std::error::Error;
use std::fs::File;
use std::io;
use std::io::prelude::*;
use std::path::Path;
fn read_file(path: &str) -> Result<String, io::Error> {
let mut f = File::open(path)?;
let mut s = String::new();
(f.read_to_string(&mut s))?;
Ok(s.trim_end().to_string())
}
fn read_fixture<P: AsRef<Path>>(path: P) -> Result<Snippet, Box<dyn Error>> {
#[derive(Deserialize)]
struct Wrapper(#[serde(with = "SnippetDef")] Snippet);
let file = File::open(path)?;
let u = serde_yaml::from_reader(file).map(|Wrapper(a)| a)?;
Ok(u)
}
#[test]
fn test_fixtures() {
for entry in glob("./tests/fixtures/no-color/**/*.yaml").expect("Failed to read glob pattern") {
let p = entry.expect("Error while getting an entry");
let path_in = p.to_str().expect("Can't print path");
let path_out = path_in.replace(".yaml", ".txt");
let snippet = read_fixture(path_in).expect("Failed to read file");
let expected_out = read_file(&path_out).expect("Failed to read file");
let dl = DisplayList::from(snippet);
let dlf = DisplayListFormatter::new(true, false);
let actual_out = dlf.format(&dl);
println!("{}", expected_out);
println!("{}", actual_out.trim_end());
assert_eq!(
expected_out,
actual_out.trim_end(),
"\n\n\nWhile parsing: {}\nThe diff is:\n\n\n{}\n\n\n",
path_in,
diff::get_diff(expected_out.as_str(), actual_out.as_str())
);
}
}
| 30.5 | 100 | 0.62182 |
50abc47d03ea39e9143589fefb02b8d3eba35b72
| 2,650 |
#![allow(clippy::module_inception)]
#![allow(clippy::upper_case_acronyms)]
#![allow(clippy::large_enum_variant)]
#![allow(clippy::wrong_self_convention)]
#![allow(clippy::should_implement_trait)]
#![allow(clippy::blacklisted_name)]
#![allow(clippy::vec_init_then_push)]
#![allow(clippy::type_complexity)]
#![allow(rustdoc::bare_urls)]
#![warn(missing_docs)]
//! <p>
//! AWS Mobile Service provides mobile app and website developers with capabilities
//! required to configure AWS resources and bootstrap their developer desktop projects
//! with the necessary SDKs, constants, tools and samples to make use of those resources.
//! </p>
//!
//! # Crate Organization
//!
//! The entry point for most customers will be [`Client`]. [`Client`] exposes one method for each API offered
//! by the service.
//!
//! Some APIs require complex or nested arguments. These exist in [`model`](crate::model).
//!
//! Lastly, errors that can be returned by the service are contained within [`error`]. [`Error`] defines a meta
//! error encompassing all possible errors that can be returned by the service.
//!
//! The other modules within this crate are not required for normal usage.
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
pub use error_meta::Error;
#[doc(inline)]
pub use config::Config;
mod aws_endpoint;
/// Client and fluent builders for calling the service.
pub mod client;
/// Configuration for the service.
pub mod config;
/// Errors that can occur when calling the service.
pub mod error;
mod error_meta;
mod http_serde;
/// Input structures for operations.
pub mod input;
mod json_deser;
mod json_errors;
/// Generated accessors for nested fields
pub mod lens;
pub mod middleware;
/// Data structures used by operation inputs/outputs.
pub mod model;
mod no_credentials;
/// All operations that this crate can perform.
pub mod operation;
mod operation_deser;
mod operation_ser;
/// Output structures for operations.
pub mod output;
/// Paginators for the service
pub mod paginator;
/// Crate version number.
pub static PKG_VERSION: &str = env!("CARGO_PKG_VERSION");
/// Re-exported types from supporting crates.
pub mod types {
pub use aws_smithy_http::result::SdkError;
pub use aws_smithy_types::Blob;
pub use aws_smithy_types::DateTime;
}
static API_METADATA: aws_http::user_agent::ApiMetadata =
aws_http::user_agent::ApiMetadata::new("mobile", PKG_VERSION);
pub use aws_smithy_http::endpoint::Endpoint;
pub use aws_smithy_types::retry::RetryConfig;
pub use aws_types::app_name::AppName;
pub use aws_types::region::Region;
pub use aws_types::Credentials;
#[doc(inline)]
pub use client::Client;
| 33.544304 | 111 | 0.751698 |
64024436d130babf0457f5801722aa438ff35a21
| 4,001 |
use std::convert::TryInto;
use std::collections::HashMap;
use std::process::Stdio;
use async_trait::async_trait;
use tokio::process::Command;
use super::{CopyDirection, CopyOptions, Host, key_uploader};
use crate::nix::{StorePath, Profile, Goal, NixResult, NixCommand, Key, SYSTEM_PROFILE};
use crate::util::CommandExecution;
use crate::progress::TaskProgress;
/// The local machine running Colmena.
///
/// It may not be capable of realizing some derivations
/// (e.g., building Linux derivations on macOS).
#[derive(Debug)]
pub struct Local {
progress_bar: TaskProgress,
logs: String,
nix_options: Vec<String>,
}
impl Local {
pub fn new(nix_options: Vec<String>) -> Self {
Self {
progress_bar: TaskProgress::default(),
logs: String::new(),
nix_options,
}
}
}
#[async_trait]
impl Host for Local {
async fn copy_closure(&mut self, _closure: &StorePath, _direction: CopyDirection, _options: CopyOptions) -> NixResult<()> {
Ok(())
}
async fn realize_remote(&mut self, derivation: &StorePath) -> NixResult<Vec<StorePath>> {
let mut command = Command::new("nix-store");
command.args(self.nix_options.clone());
command
.arg("--no-gc-warning")
.arg("--realise")
.arg(derivation.as_path());
let mut execution = CommandExecution::new(command);
execution.set_progress_bar(self.progress_bar.clone());
let result = execution.run().await;
let (stdout, stderr) = execution.get_logs();
self.logs += stderr.unwrap();
match result {
Ok(()) => {
stdout.unwrap().lines().map(|p| p.to_string().try_into()).collect()
}
Err(e) => Err(e),
}
}
async fn upload_keys(&mut self, keys: &HashMap<String, Key>, require_ownership: bool) -> NixResult<()> {
for (name, key) in keys {
self.upload_key(&name, &key, require_ownership).await?;
}
Ok(())
}
async fn activate(&mut self, profile: &Profile, goal: Goal) -> NixResult<()> {
if goal.should_switch_profile() {
let path = profile.as_path().to_str().unwrap();
Command::new("nix-env")
.args(&["--profile", SYSTEM_PROFILE])
.args(&["--set", path])
.passthrough()
.await?;
}
let activation_command = profile.activation_command(goal).unwrap();
let mut command = Command::new(&activation_command[0]);
command
.args(&activation_command[1..]);
let mut execution = CommandExecution::new(command);
execution.set_progress_bar(self.progress_bar.clone());
let result = execution.run().await;
// FIXME: Bad - Order of lines is messed up
let (stdout, stderr) = execution.get_logs();
self.logs += stdout.unwrap();
self.logs += stderr.unwrap();
result
}
async fn active_derivation_known(&mut self) -> NixResult<bool> {
Ok(true)
}
fn set_progress_bar(&mut self, bar: TaskProgress) {
self.progress_bar = bar;
}
async fn dump_logs(&self) -> Option<&str> {
Some(&self.logs)
}
}
impl Local {
/// "Uploads" a single key.
async fn upload_key(&mut self, name: &str, key: &Key, require_ownership: bool) -> NixResult<()> {
self.progress_bar.log(&format!("Deploying key {}", name));
let dest_path = key.dest_dir().join(name);
let key_script = format!("'{}'", key_uploader::generate_script(key, &dest_path, require_ownership));
let mut command = Command::new("sh");
command.args(&["-c", &key_script]);
command.stdin(Stdio::piped());
command.stderr(Stdio::piped());
command.stdout(Stdio::piped());
let uploader = command.spawn()?;
key_uploader::feed_uploader(uploader, key, self.progress_bar.clone(), &mut self.logs).await
}
}
| 31.015504 | 127 | 0.593102 |
fce301cd20b8b7aa5bbd06b46e427bf7d5b98652
| 2,741 |
extern crate arduplayer;
extern crate rand;
use std::collections::HashMap;
use std::{env, thread };
use std::hash::Hash;
use std::time::Duration;
use arduplayer::{Player, PlayerOptions, Song};
use rand::Rng;
fn main() {
let songs = songs();
if let Some(song_name) = env::args().nth(1) {
let song_name = if song_name == "random" {
random_key(&songs)
} else {
&*song_name
};
let mut player = Player::new(6).expect("Could not initialize serial port");
// Delay to get serial connection set up
thread::sleep(Duration::from_millis(3000));
let song = Song::from_midi(format!("music/{}.mid", song_name));
let options = &songs[song_name];
player.play_song(song, options.borrow());
} else {
println!("Please specify a song:");
for song_name in songs.keys() {
println!("* {}", song_name);
}
}
}
fn random_key<K: Eq + Hash, V>(map: &HashMap<K, V>) -> &K {
let songs: Vec<_> = map.keys().collect();
let chosen = rand::thread_rng().choose(&songs);
*chosen.unwrap()
}
fn songs() -> HashMap<&'static str, PlayerOptions<'static>> {
let mut map = HashMap::new();
map.insert("PkmRS-Center", PlayerOptions {
tracks: &[(1, 0), (2, 0), (3, 0), (4, 0), (5, 0)],
delay_mul: 5.0
});
map.insert("SSBKirbyStage", PlayerOptions {
tracks: &[(1, 0), (2, 0), (4, 0), (5, 0), (6, 0), (8, 0), (9, 0), (10, 0), (11, 2), (13, 0)],
delay_mul: 3.0
});
map.insert("cliffs", PlayerOptions {
tracks: &[(2, 0)],
delay_mul: 0.5
});
map.insert("pacman", PlayerOptions {
tracks: &[(1, 0), (2, 0)],
delay_mul: 5.0
});
map.insert("smwintro", PlayerOptions {
tracks: &[(1, -1), (2, -1), (3, -1), (4, -1)],
delay_mul: 3.0
});
map.insert("OoTBoF", PlayerOptions {
tracks: &[(1, 0), (2, 0), (4, 0), (6, 0)],
delay_mul: 2.0
});
map.insert("SSB_hammer", PlayerOptions {
tracks: &[(1, 0), (2, 0)],
delay_mul: 1.0
});
map.insert("Fox_Wins", PlayerOptions {
tracks: &[(0, 0)],
delay_mul: 5.0
});
map.insert("HappyBirthday", PlayerOptions {
tracks: &[(1, 0), (2, 0), (3, 0)],
delay_mul: 3.0
});
map.insert("OoTSoT", PlayerOptions {
tracks: &[(1, 1), (2, 1), (4, 0)],
delay_mul: 2.0
});
map.insert("symph40", PlayerOptions {
tracks: &[(1, 0), (2, 0), (3, 0), (4, 0)],
delay_mul: 0.5
});
map.insert("Z64gerud", PlayerOptions {
tracks: &[(1, 0), (2, 0), (3, 0), (4, 0), (5, 0), (6, 0), (7, 0)],
delay_mul: 4.0
});
map
}
| 24.918182 | 101 | 0.503466 |
395e424c79e3b512c5783da5be414a8ded0207f9
| 1,673 |
// https://leetcode-cn.com/problems/ambiguous-coordinates/
// Runtime: 4 ms
// Memory Usage: 2.1 MB
pub fn ambiguous_coordinates(s: String) -> Vec<String> {
let s = s.chars().collect::<Vec<_>>();
let n = s.len();
let mut res = Vec::new();
for i in 2..n - 1 {
for l in nums(&s[1..i]) {
for r in nums(&s[i..n - 1]) {
res.push(format!("({}, {})", l, r));
}
}
}
res
}
fn nums(ch: &[char]) -> Vec<String> {
let n = ch.len();
let mut res = Vec::new();
for i in 1..=n {
let left = ch[..i].iter().collect::<String>();
let right = ch[i..].iter().collect::<String>();
if left.starts_with('0') && left != "0" {
continue;
}
if right.ends_with('0') {
continue;
}
res.push(format!(
"{}{}{}",
left,
if i == n { "" } else { "." },
right
));
}
res
}
// string
#[test]
fn test1_816() {
use leetcode_prelude::vec_string;
assert_eq!(
ambiguous_coordinates("(123)".to_string()),
vec_string!["(1, 2.3)", "(1, 23)", "(1.2, 3)", "(12, 3)"]
);
assert_eq!(
ambiguous_coordinates("(00011)".to_string()),
vec_string!["(0, 0.011)", "(0.001, 1)"]
);
assert_eq!(
ambiguous_coordinates("(0123)".to_string()),
vec_string![
"(0, 1.23)",
"(0, 12.3)",
"(0, 123)",
"(0.1, 2.3)",
"(0.1, 23)",
"(0.12, 3)"
]
);
assert_eq!(
ambiguous_coordinates("(100)".to_string()),
vec_string!["(10, 0)"]
);
}
| 24.970149 | 65 | 0.429169 |
5075a63f9faa69d285cfcc14bdc709c7d018eff8
| 16,316 |
#[macro_use]
extern crate timeit;
extern crate env_logger;
extern crate collenchyma as co;
extern crate leaf;
use co::prelude::*;
use std::sync::{Arc, RwLock};
use leaf::layers::*;
use leaf::layer::*;
use std::rc::Rc;
use std::env;
fn main() {
env_logger::init().unwrap();
let nets: Vec<String> = vec!("alexnet".to_string(), "overfeat".to_string(), "vgg".to_string());
if let Some(net) = env::args().nth(1) {
if nets.contains(&net) {
println!("Executing Model: {:?}", net);
if net == "alexnet".to_string() {
bench_alexnet();
} else if net == "overfeat".to_string() {
bench_overfeat();
} else if net == "vgg".to_string() {
bench_vgg_a();
}
} else {
println!("Sorry, no model found with name '{:?}'. Valid options: {:?}", net, nets);
}
} else {
println!("No `net` argument specified. Default: `alexnet`. Valid options: {:?}", nets);
bench_alexnet();
}
}
#[cfg(feature = "native")]
#[allow(dead_code)]
fn native_backend() -> Rc<Backend<Native>> {
let framework = Native::new();
let hardwares = &framework.hardwares().to_vec();
let backend_config = BackendConfig::new(framework, hardwares);
Rc::new(Backend::new(backend_config).unwrap())
}
#[cfg(feature = "cuda")]
#[allow(dead_code)]
fn cuda_backend() -> Rc<Backend<Cuda>> {
let framework = Cuda::new();
let hardwares = &framework.hardwares()[0..1].to_vec();
println!("Device: {:?}/{}", hardwares[0].hardware_type().unwrap(), hardwares[0].name().unwrap());
let backend_config = BackendConfig::new(framework, hardwares);
Rc::new(Backend::new(backend_config).unwrap())
}
#[cfg(feature = "opencl")]
#[allow(dead_code)]
fn opencl_backend() -> Rc<Backend<OpenCL>> {
let framework = OpenCL::new();
let hardwares = &framework.hardwares()[1..2].to_vec();
let backend_config = BackendConfig::new(framework, hardwares);
Rc::new(Backend::new(backend_config).unwrap())
}
#[inline(never)]
fn bench_profile<F: FnMut() -> ()>(
name: &str,
mut bench_func: F,
times: usize)
{
println!("Running benchmark {}", name);
println!("----------");
for _ in 0..10 {
bench_func();
}
let average_time = timeit_loops!(times, {
bench_func();
});
println!("----------");
println!("Average time {}", autoscale_time(average_time));
println!("");
}
fn autoscale_time(sec: f64) -> String {
let (div, unit_str) = get_time_scale(sec);
format!("{:.5} {}", sec / div, unit_str)
}
fn scale_time(sec: f64, unit: &str) -> String {
// let (div, unit_str) = get_time_scale(sec);
let div = match unit {
"s" => 1.0,
"ms" => 0.001,
"µs" => 0.000_001,
"ns" => 0.000_000_001,
_ => panic!()
};
format!("{:.5} {}", sec / div, unit)
}
// get fitting order of magnitude for a time measurement
fn get_time_scale<'a>(sec: f64) -> (f64, &'a str) {
if sec > 1.0 {
(1.0, "s")
} else if sec > 0.001 {
(0.001, "ms")
} else if sec > 0.000_001 {
(0.000_001, "µs")
} else {
(0.000_000_001, "ns")
}
}
#[cfg(feature="native")]
fn bench_alexnet() {
println!("Examples run only with CUDA support at the moment, because of missing native convolution implementation for the Collenchyma NN Plugin.");
println!("Try running with `cargo run --release --no-default-features --features cuda --example benchmarks alexnet`.");
}
#[cfg(all(feature="cuda", not(feature="native")))]
fn bench_alexnet() {
let mut cfg = SequentialConfig::default();
cfg.add_input("data", &vec![128, 3, 224, 224]);
let conv1_layer_cfg = ConvolutionConfig { num_output: 64, filter_shape: vec![11], padding: vec![2], stride: vec![4] };
cfg.add_layer(LayerConfig::new("conv1", conv1_layer_cfg));
cfg.add_layer(LayerConfig::new("conv1/relu", LayerType::ReLU));
let pool1_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![3], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool1", pool1_layer_cfg));
let conv2_layer_cfg = ConvolutionConfig { num_output: 192, filter_shape: vec![5], padding: vec![2], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv2", conv2_layer_cfg));
cfg.add_layer(LayerConfig::new("conv2/relu", LayerType::ReLU));
let pool2_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![3], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool2", pool2_layer_cfg));
let conv3_layer_cfg = ConvolutionConfig { num_output: 384, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv3", conv3_layer_cfg));
cfg.add_layer(LayerConfig::new("conv3/relu", LayerType::ReLU));
let conv4_layer_cfg = ConvolutionConfig { num_output: 256, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv4", conv4_layer_cfg));
cfg.add_layer(LayerConfig::new("conv4/relu", LayerType::ReLU));
let conv5_layer_cfg = ConvolutionConfig { num_output: 256, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv5", conv5_layer_cfg));
cfg.add_layer(LayerConfig::new("conv5/relu", LayerType::ReLU));
let pool3_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![3], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool3", pool3_layer_cfg));
cfg.add_layer(LayerConfig::new("fc1", LinearConfig { output_size: 4096 }));
cfg.add_layer(LayerConfig::new("fc2", LinearConfig { output_size: 4096 }));
cfg.add_layer(LayerConfig::new("fc3", LinearConfig { output_size: 1000 }));
let backend = cuda_backend();
// let native_backend = native_backend();
let mut network = Layer::from_config(backend.clone(), &LayerConfig::new("alexnet", LayerType::Sequential(cfg)));
{
let func = || {
let forward_time = timeit_loops!(1, {
{
let inp = SharedTensor::<f32>::new(backend.device(), &vec![128, 3, 224, 224]).unwrap();
let inp_lock = Arc::new(RwLock::new(inp));
network.forward(&[inp_lock.clone()]);
}
});
println!("Forward step: {}", scale_time(forward_time, "ms"));
};
{ bench_profile("alexnet_forward", func, 10); }
}
{
let func = || {
let backward_time = timeit_loops!(1, {
{
network.backward_input(&[]);
}
});
println!("backward input step: {}", scale_time(backward_time, "ms"));
};
{ bench_profile("alexnet_backward_input", func, 10); }
}
{
let func = || {
let backward_time = timeit_loops!(1, {
{
network.backward_parameters();
}
});
println!("backward parameters step: {}", scale_time(backward_time, "ms"));
};
{ bench_profile("alexnet_backward_parameters", func, 10); }
}
}
#[cfg(feature="native")]
fn bench_overfeat() {
println!("Examples run only with CUDA support at the moment, because of missing native convolution implementation for the Collenchyma NN Plugin.");
println!("Try running with `cargo run --release --no-default-features --features cuda --example benchmarks overfeat`.");
}
#[cfg(all(feature="cuda", not(feature="native")))]
fn bench_overfeat() {
let mut cfg = SequentialConfig::default();
cfg.add_input("data", &vec![128, 3, 231, 231]);
let conv1_layer_cfg = ConvolutionConfig { num_output: 96, filter_shape: vec![11], padding: vec![0], stride: vec![4] };
cfg.add_layer(LayerConfig::new("conv1", conv1_layer_cfg));
cfg.add_layer(LayerConfig::new("conv1/relu", LayerType::ReLU));
let pool1_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool1", pool1_layer_cfg));
let conv2_layer_cfg = ConvolutionConfig { num_output: 256, filter_shape: vec![5], padding: vec![0], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv2", conv2_layer_cfg));
cfg.add_layer(LayerConfig::new("conv2/relu", LayerType::ReLU));
let pool2_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool2", pool2_layer_cfg));
let conv3_layer_cfg = ConvolutionConfig { num_output: 512, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv3", conv3_layer_cfg));
cfg.add_layer(LayerConfig::new("conv3/relu", LayerType::ReLU));
let conv4_layer_cfg = ConvolutionConfig { num_output: 1024, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv4", conv4_layer_cfg));
cfg.add_layer(LayerConfig::new("conv4/relu", LayerType::ReLU));
let conv5_layer_cfg = ConvolutionConfig { num_output: 1024, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv5", conv5_layer_cfg));
cfg.add_layer(LayerConfig::new("conv5/relu", LayerType::ReLU));
let pool5_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool5", pool5_layer_cfg));
cfg.add_layer(LayerConfig::new("fc1", LinearConfig { output_size: 3072 }));
cfg.add_layer(LayerConfig::new("fc2", LinearConfig { output_size: 4096 }));
cfg.add_layer(LayerConfig::new("fc3", LinearConfig { output_size: 1000 }));
let backend = cuda_backend();
// let native_backend = native_backend();
let mut network = Layer::from_config(backend.clone(), &LayerConfig::new("overfeat", LayerType::Sequential(cfg)));
{
let func = || {
let forward_time = timeit_loops!(1, {
{
let inp = SharedTensor::<f32>::new(backend.device(), &vec![128, 3, 231, 231]).unwrap();
let inp_lock = Arc::new(RwLock::new(inp));
network.forward(&[inp_lock.clone()]);
}
});
println!("Forward step: {}", scale_time(forward_time, "ms"));
};
{ bench_profile("overfeat_forward", func, 10); }
}
{
let func = || {
let backward_time = timeit_loops!(1, {
{
network.backward_input(&[]);
}
});
println!("backward input step: {}", scale_time(backward_time, "ms"));
};
{ bench_profile("overfeat_backward_input", func, 10); }
}
{
let func = || {
let backward_time = timeit_loops!(1, {
{
network.backward_parameters();
}
});
println!("backward parameters step: {}", scale_time(backward_time, "ms"));
};
{ bench_profile("overfeat_backward_parameters", func, 10); }
}
}
#[cfg(feature="native")]
fn bench_vgg_a() {
println!("Examples run only with CUDA support at the moment, because of missing native convolution implementation for the Collenchyma NN Plugin.");
println!("Try running with `cargo run --release --no-default-features --features cuda --example benchmarks vgg`.");
}
#[cfg(all(feature="cuda", not(feature="native")))]
fn bench_vgg_a() {
let mut cfg = SequentialConfig::default();
cfg.add_input("data", &vec![64, 3, 224, 224]);
let conv1_layer_cfg = ConvolutionConfig { num_output: 64, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv1", conv1_layer_cfg));
cfg.add_layer(LayerConfig::new("conv1/relu", LayerType::ReLU));
let pool1_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool1", pool1_layer_cfg));
let conv2_layer_cfg = ConvolutionConfig { num_output: 128, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv2", conv2_layer_cfg));
cfg.add_layer(LayerConfig::new("conv2/relu", LayerType::ReLU));
let pool2_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool2", pool2_layer_cfg));
let conv3_layer_cfg = ConvolutionConfig { num_output: 256, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv3", conv3_layer_cfg));
cfg.add_layer(LayerConfig::new("conv3/relu", LayerType::ReLU));
let conv4_layer_cfg = ConvolutionConfig { num_output: 256, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv4", conv4_layer_cfg));
cfg.add_layer(LayerConfig::new("conv4/relu", LayerType::ReLU));
let pool3_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool3", pool3_layer_cfg));
let conv5_layer_cfg = ConvolutionConfig { num_output: 512, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv5", conv5_layer_cfg));
cfg.add_layer(LayerConfig::new("conv5/relu", LayerType::ReLU));
let conv6_layer_cfg = ConvolutionConfig { num_output: 512, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv6", conv6_layer_cfg));
cfg.add_layer(LayerConfig::new("conv6/relu", LayerType::ReLU));
let pool4_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool4", pool4_layer_cfg));
let conv7_layer_cfg = ConvolutionConfig { num_output: 512, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv7", conv7_layer_cfg));
cfg.add_layer(LayerConfig::new("conv7/relu", LayerType::ReLU));
let conv8_layer_cfg = ConvolutionConfig { num_output: 512, filter_shape: vec![3], padding: vec![1], stride: vec![1] };
cfg.add_layer(LayerConfig::new("conv8", conv8_layer_cfg));
cfg.add_layer(LayerConfig::new("conv8/relu", LayerType::ReLU));
let pool5_layer_cfg = PoolingConfig { mode: PoolingMode::Max, filter_shape: vec![2], stride: vec![2], padding: vec![0] };
cfg.add_layer(LayerConfig::new("pool5", pool5_layer_cfg));
cfg.add_layer(LayerConfig::new("fc1", LinearConfig { output_size: 4096 }));
cfg.add_layer(LayerConfig::new("fc2", LinearConfig { output_size: 4096 }));
cfg.add_layer(LayerConfig::new("fc3", LinearConfig { output_size: 1000 }));
let backend = cuda_backend();
// let native_backend = native_backend();
let mut network = Layer::from_config(backend.clone(), &LayerConfig::new("vgg_a", LayerType::Sequential(cfg)));
{
let func = || {
let forward_time = timeit_loops!(1, {
{
let inp = SharedTensor::<f32>::new(backend.device(), &vec![64, 3, 224, 224]).unwrap();
let inp_lock = Arc::new(RwLock::new(inp));
network.forward(&[inp_lock.clone()]);
}
});
println!("Forward step: {}", scale_time(forward_time, "ms"));
};
{ bench_profile("overfeat_forward", func, 10); }
}
{
let func = || {
let backward_time = timeit_loops!(1, {
{
network.backward_input(&[]);
}
});
println!("backward input step: {}", scale_time(backward_time, "ms"));
};
{ bench_profile("overfeat_backward_input", func, 10); }
}
{
let func = || {
let backward_time = timeit_loops!(1, {
{
network.backward_parameters();
}
});
println!("backward parameters step: {}", scale_time(backward_time, "ms"));
};
{ bench_profile("overfeat_backward_parameters", func, 10); }
}
}
| 43.509333 | 151 | 0.614918 |
ff56668b78b8f0ff24a260bcc54b9059cab5bf6a
| 3,396 |
use testcrate::*;
macro_rules! sum {
($($i:ty, $fn_add:ident, $fn_sub:ident);*;) => {
$(
fuzz_2(N, |x: $i, y: $i| {
let add0 = x.wrapping_add(y);
let sub0 = x.wrapping_sub(y);
let add1: $i = $fn_add(x, y);
let sub1: $i = $fn_sub(x, y);
if add0 != add1 {
panic!(
"{}({}, {}): std: {}, builtins: {}",
stringify!($fn_add), x, y, add0, add1
);
}
if sub0 != sub1 {
panic!(
"{}({}, {}): std: {}, builtins: {}",
stringify!($fn_sub), x, y, sub0, sub1
);
}
});
)*
};
}
macro_rules! overflowing_sum {
($($i:ty, $fn_add:ident, $fn_sub:ident);*;) => {
$(
fuzz_2(N, |x: $i, y: $i| {
let add0 = x.overflowing_add(y);
let sub0 = x.overflowing_sub(y);
let add1: ($i, bool) = $fn_add(x, y);
let sub1: ($i, bool) = $fn_sub(x, y);
if add0.0 != add1.0 || add0.1 != add1.1 {
panic!(
"{}({}, {}): std: {:?}, builtins: {:?}",
stringify!($fn_add), x, y, add0, add1
);
}
if sub0.0 != sub1.0 || sub0.1 != sub1.1 {
panic!(
"{}({}, {}): std: {:?}, builtins: {:?}",
stringify!($fn_sub), x, y, sub0, sub1
);
}
});
)*
};
}
#[test]
fn addsub() {
use compiler_builtins::int::addsub::{
__rust_i128_add, __rust_i128_addo, __rust_i128_sub, __rust_i128_subo, __rust_u128_add,
__rust_u128_addo, __rust_u128_sub, __rust_u128_subo,
};
// Integer addition and subtraction is very simple, so 100 fuzzing passes should be plenty.
sum!(
u128, __rust_u128_add, __rust_u128_sub;
i128, __rust_i128_add, __rust_i128_sub;
);
overflowing_sum!(
u128, __rust_u128_addo, __rust_u128_subo;
i128, __rust_i128_addo, __rust_i128_subo;
);
}
macro_rules! float_sum {
($($f:ty, $fn_add:ident, $fn_sub:ident);*;) => {
$(
fuzz_float_2(N, |x: $f, y: $f| {
let add0 = x + y;
let sub0 = x - y;
let add1: $f = $fn_add(x, y);
let sub1: $f = $fn_sub(x, y);
if !Float::eq_repr(add0, add1) {
panic!(
"{}({}, {}): std: {}, builtins: {}",
stringify!($fn_add), x, y, add0, add1
);
}
if !Float::eq_repr(sub0, sub1) {
panic!(
"{}({}, {}): std: {}, builtins: {}",
stringify!($fn_sub), x, y, sub0, sub1
);
}
});
)*
};
}
#[cfg(not(all(target_arch = "x86", not(target_feature = "sse"))))]
#[test]
fn float_addsub() {
use compiler_builtins::float::{
add::{__adddf3, __addsf3},
sub::{__subdf3, __subsf3},
Float,
};
float_sum!(
f32, __addsf3, __subsf3;
f64, __adddf3, __subdf3;
);
}
| 30.872727 | 95 | 0.38987 |
bb2c2734583eb9d19e91e8d362963e7a241624f9
| 302 |
#![feature(box_syntax, unboxed_closures)]
fn to_fn_once<A,F:FnOnce<A>>(f: F) -> F { f }
fn do_it(x: &isize) { }
fn main() {
let x: Box<_> = box 22;
let f = to_fn_once(move|| do_it(&*x));
to_fn_once(move|| {
f();
f();
//~^ ERROR: use of moved value: `f`
})()
}
| 20.133333 | 45 | 0.5 |
f917df7da381738759558bd90c50d8d86e5e4cd1
| 7,597 |
/*******************************************************************************
* Copyright 2021 Stefan Majewsky <[email protected]>
* SPDX-License-Identifier: Apache-2.0
* Refer to the file "LICENSE" for details.
*******************************************************************************/
//! This file contains the type definitions for the database payload. Because we want the payload
//! format to be an implementation detail, the entire module is private and hence these types are
//! not part of the public API.
use crate::*;
use std::convert::TryInto;
use std::marker::PhantomData;
////////////////////////////////////////////////////////////////////////////////
// generic machinery for iterating over ALL_DATA
pub(crate) trait FromPayload<const N: usize> {
///Given `&ALL_DATA[offset..]`, unmarshals the data starting from that offset into a value of
///self. Returns the unmarshaled value, as well as the amount of u32 that were consumed.
fn get(data: &[u32; N]) -> Self;
}
#[derive(Clone, Copy, Debug)]
pub(crate) struct Range<T: FromPayload<N>, const N: usize> {
pub start: usize,
pub end: usize,
pub phantom: PhantomData<T>,
}
impl<T: FromPayload<N>, const N: usize> Range<T, N> {
pub(crate) fn new(start: u32, end: u32) -> Self {
Self {
start: start.try_into().unwrap(),
end: end.try_into().unwrap(),
phantom: PhantomData,
}
}
}
impl<T: FromPayload<N>, const N: usize> std::iter::Iterator for Range<T, N> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if self.start < self.end {
let data = &as_u32_slice(ALL_DATA)[self.start..(self.start + N)];
let item = T::get(data.try_into().unwrap());
self.start += N;
Some(item)
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let count = (self.end - self.start) / N;
(count, Some(count))
}
}
impl<T: FromPayload<N>, const N: usize> std::iter::ExactSizeIterator for Range<T, N> {
fn len(&self) -> usize {
(self.end - self.start) / N
}
}
////////////////////////////////////////////////////////////////////////////////
// concrete types
pub(crate) fn entry_count() -> usize {
as_u32_slice(ALL_ENTRY_OFFSETS).len()
}
pub(crate) fn get_entry(idx: usize) -> Entry {
let offset: usize = as_u32_slice(ALL_ENTRY_OFFSETS)[idx].try_into().unwrap();
let data = &as_u32_slice(ALL_DATA)[offset..(offset + 4)];
let (start, end) = (data[0], data[1]);
let mid1 = start + (data[2] & 0x0000FFFF);
let mid2 = start + ((data[2] & 0xFFFF0000) >> 16);
Entry {
number: data[3],
kanji_elements_iter: Range::new(start, mid1).into(),
reading_elements_iter: Range::new(mid1, mid2).into(),
senses_iter: Range::new(mid2, end).into(),
}
}
impl FromPayload<5> for KanjiElement {
fn get(data: &[u32; 5]) -> Self {
Self {
priority: jmdict_enums::EnumPayload::from_u32(data[0]),
text: get_str(data[1], data[2]),
info_iter: Range::new(data[3], data[4]).into(),
}
}
}
impl FromPayload<1> for KanjiInfo {
fn get(data: &[u32; 1]) -> Self {
jmdict_enums::EnumPayload::from_u32(data[0])
}
}
impl FromPayload<5> for ReadingElement {
fn get(data: &[u32; 5]) -> Self {
Self {
priority: jmdict_enums::EnumPayload::from_u32(data[0]),
text: get_str(data[1], data[2]),
info_iter: Range::new(data[3], data[4]).into(),
}
}
}
impl FromPayload<1> for ReadingInfo {
fn get(data: &[u32; 1]) -> Self {
jmdict_enums::EnumPayload::from_u32(data[0])
}
}
impl FromPayload<5> for Sense {
fn get(data: &[u32; 5]) -> Self {
let (start, end) = (data[0], data[1]);
let mid1 = start + (data[2] & 0x000000FF);
let mid2 = start + ((data[2] & 0x0000FF00) >> 8);
let mid3 = start + ((data[2] & 0x00FF0000) >> 16);
let mid4 = start + ((data[2] & 0xFF000000) >> 24);
let mid5 = start + (data[3] & 0x000000FF);
let mid6 = start + ((data[3] & 0x0000FF00) >> 8);
let mid7 = start + ((data[3] & 0x00FF0000) >> 16);
let mid8 = start + ((data[3] & 0xFF000000) >> 24);
let mid9 = start + (data[4] & 0x000000FF);
let mid10 = start + ((data[4] & 0x0000FF00) >> 8);
Self {
stagk_iter: Range::new(start, mid1).into(),
stagr_iter: Range::new(mid1, mid2).into(),
pos_iter: Range::new(mid2, mid3).into(),
cross_refs_iter: Range::new(mid3, mid4).into(),
antonyms_iter: Range::new(mid4, mid5).into(),
topics_iter: Range::new(mid5, mid6).into(),
info_iter: Range::new(mid6, mid7).into(),
freetext_info_iter: Range::new(mid7, mid8).into(),
loanword_sources_iter: Range::new(mid8, mid9).into(),
dialects_iter: Range::new(mid9, mid10).into(),
glosses_iter: Range::new(mid10, end).into(),
}
}
}
impl FromPayload<1> for PartOfSpeech {
fn get(data: &[u32; 1]) -> Self {
jmdict_enums::EnumPayload::from_u32(data[0])
}
}
impl FromPayload<1> for SenseTopic {
fn get(data: &[u32; 1]) -> Self {
jmdict_enums::EnumPayload::from_u32(data[0])
}
}
impl FromPayload<1> for SenseInfo {
fn get(data: &[u32; 1]) -> Self {
jmdict_enums::EnumPayload::from_u32(data[0])
}
}
impl FromPayload<4> for LoanwordSource {
fn get(data: &[u32; 4]) -> Self {
Self {
text: get_str(data[0] & 0x0FFFFFFF, data[1]),
language: get_str(data[2], data[3]),
is_partial: (data[0] & 0x10000000) == 0x10000000,
is_wasei: (data[0] & 0x20000000) == 0x20000000,
}
}
}
impl FromPayload<1> for Dialect {
fn get(data: &[u32; 1]) -> Self {
jmdict_enums::EnumPayload::from_u32(data[0])
}
}
impl FromPayload<2> for Gloss {
fn get(data: &[u32; 2]) -> Self {
let lang_code = (data[0] & 0xF0000000) >> 28;
let type_code = (data[1] & 0xF0000000) >> 28;
Gloss {
text: get_str(data[0] & 0x0FFFFFFF, data[1] & 0x0FFFFFFF),
language: jmdict_enums::EnumPayload::from_u32(lang_code),
gloss_type: jmdict_enums::EnumPayload::from_u32(type_code),
}
}
}
impl FromPayload<2> for &'static str {
fn get(data: &[u32; 2]) -> Self {
get_str(data[0], data[1])
}
}
fn get_str(start: u32, end: u32) -> &'static str {
let start = start.try_into().unwrap();
let end = end.try_into().unwrap();
&ALL_TEXTS[start..end]
}
////////////////////////////////////////////////////////////////////////////////
// embedded data
//NOTE: We would only need 4-byte alignment, but 16-byte is the smallest alignment interval that
//the align_data crate offers.
//
//NOTE 2: as_u32_slice() cannot be made const because from_raw_parts() is not const, so we have to
//use it on every read access to the respective arrays.
use align_data::{include_aligned, Align16};
fn as_u32_slice(input: &'static [u8]) -> &'static [u32] {
unsafe {
let ptr = input.as_ptr() as *const u32;
std::slice::from_raw_parts(ptr, input.len() / 4)
}
}
static ALL_ENTRY_OFFSETS: &[u8] =
include_aligned!(Align16, concat!(env!("OUT_DIR"), "/entry_offsets.dat"));
static ALL_DATA: &[u8] = include_aligned!(Align16, concat!(env!("OUT_DIR"), "/payload.dat"));
static ALL_TEXTS: &str = include_str!(concat!(env!("OUT_DIR"), "/strings.txt"));
| 32.60515 | 98 | 0.555877 |
720dba8d6bf50c837a785e5ac0fdd22afc42639d
| 6,089 |
//! Infrastructure for `ruby` CLI.
//!
//! Exported as `ruby` and `artichoke` binaries.
use artichoke_backend::convert::Convert;
use artichoke_backend::eval::Context;
use artichoke_backend::fs;
use artichoke_backend::sys;
use artichoke_core::eval::Eval;
use artichoke_core::ArtichokeError;
use bstr::BStr;
use std::ffi::OsString;
use std::io::{self, Read};
use std::path::{Path, PathBuf};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(name = "artichoke", about = "Artichoke is a Ruby made with Rust.")]
struct Opt {
#[structopt(long)]
/// print the copyright
copyright: bool,
#[structopt(short = "e", parse(from_os_str))]
/// one line of script. Several -e's allowed. Omit [programfile]
commands: Vec<OsString>,
#[structopt(long = "with-fixture", parse(from_os_str))]
fixture: Option<PathBuf>,
#[structopt(parse(from_os_str))]
programfile: Option<PathBuf>,
}
/// Error from Ruby CLI frontend
pub enum Error {
/// Error from Artichoke interpreter.
Artichoke(ArtichokeError),
/// Fatal error from CLI internals.
Fail(String),
}
impl From<ArtichokeError> for Error {
fn from(err: ArtichokeError) -> Self {
Self::Artichoke(err)
}
}
impl From<String> for Error {
fn from(err: String) -> Self {
Self::Fail(err)
}
}
impl From<&'static str> for Error {
fn from(err: &'static str) -> Self {
Self::Fail(err.to_owned())
}
}
/// Main entrypoint for Artichoke's version of the `ruby` CLI.
pub fn entrypoint() -> Result<(), Error> {
let opt = Opt::from_args();
if opt.copyright {
let interp = artichoke_backend::interpreter()?;
interp.eval(b"puts RUBY_COPYRIGHT")?;
Ok(())
} else if !opt.commands.is_empty() {
execute_inline_eval(opt.commands, opt.fixture.as_ref().map(Path::new))
} else if let Some(programfile) = opt.programfile {
execute_program_file(programfile.as_path(), opt.fixture.as_ref().map(Path::new))
} else {
let mut program = Vec::new();
let result = io::stdin().read_to_end(&mut program);
if result.is_ok() {
let interp = artichoke_backend::interpreter()?;
interp.eval(program.as_slice())?;
Ok(())
} else {
Err(Error::from("Could not read program from STDIN"))
}
}
}
fn execute_inline_eval(commands: Vec<OsString>, fixture: Option<&Path>) -> Result<(), Error> {
let interp = artichoke_backend::interpreter()?;
interp.push_context(Context::new(b"-e".as_ref()));
if let Some(ref fixture) = fixture {
let data = std::fs::read(fixture).map_err(|_| {
if let Ok(file) = fs::osstr_to_bytes(&interp, fixture.as_os_str()) {
let file = format!("{:?}", <&BStr>::from(file));
format!(
"No such file or directory -- {} (LoadError)",
&file[1..file.len() - 1]
)
} else {
format!("No such file or directory -- {:?} (LoadError)", fixture)
}
})?;
let sym = interp.0.borrow_mut().sym_intern(b"$fixture".as_ref());
let mrb = interp.0.borrow().mrb;
let value = interp.convert(data);
unsafe {
sys::mrb_gv_set(mrb, sym, value.inner());
}
}
for command in commands {
if let Ok(command) = fs::osstr_to_bytes(&interp, command.as_os_str()) {
interp.eval(command)?;
} else {
return Err(Error::from(
"Unable to parse non-UTF-8 command line arguments on this platform",
));
}
}
Ok(())
}
fn execute_program_file(programfile: &Path, fixture: Option<&Path>) -> Result<(), Error> {
let interp = artichoke_backend::interpreter()?;
if let Some(ref fixture) = fixture {
let data = std::fs::read(fixture).map_err(|_| {
if let Ok(file) = fs::osstr_to_bytes(&interp, fixture.as_os_str()) {
let file = format!("{:?}", <&BStr>::from(file));
format!(
"No such file or directory -- {} (LoadError)",
&file[1..file.len() - 1]
)
} else {
format!("No such file or directory -- {:?} (LoadError)", fixture)
}
})?;
let sym = interp.0.borrow_mut().sym_intern(b"$fixture".as_ref());
let mrb = interp.0.borrow().mrb;
let value = interp.convert(data);
unsafe {
sys::mrb_gv_set(mrb, sym, value.inner());
}
}
let program = std::fs::read(programfile).map_err(|err| match err.kind() {
io::ErrorKind::NotFound => {
if let Ok(file) = fs::osstr_to_bytes(&interp, programfile.as_os_str()) {
let file = format!("{:?}", <&BStr>::from(file));
format!(
"No such file or directory -- {} (LoadError)",
&file[1..file.len() - 1]
)
} else {
format!("No such file or directory -- {:?} (LoadError)", programfile)
}
}
io::ErrorKind::PermissionDenied => {
if let Ok(file) = fs::osstr_to_bytes(&interp, programfile.as_os_str()) {
let file = format!("{:?}", <&BStr>::from(file));
format!(
"Permission denied -- {} (LoadError)",
&file[1..file.len() - 1]
)
} else {
format!("Permission denied -- {:?} (LoadError)", programfile)
}
}
_ => {
if let Ok(file) = fs::osstr_to_bytes(&interp, programfile.as_os_str()) {
let file = format!("{:?}", <&BStr>::from(file));
format!(
"Could not read file -- {} (LoadError)",
&file[1..file.len() - 1]
)
} else {
format!("Could not read file -- {:?} (LoadError)", programfile)
}
}
})?;
interp.eval(program.as_slice())?;
Ok(())
}
| 34.207865 | 94 | 0.534571 |
677aa42976c1fe4e93777c863741248d39c49b30
| 627 |
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct User {
#[prost(int32, tag="1")]
pub id: i32,
#[prost(message, optional, tag="2")]
pub created_at: ::std::option::Option<::prost_types::Timestamp>,
#[prost(message, optional, tag="3")]
pub last_login: ::std::option::Option<::prost_types::Timestamp>,
/// e.g. "17327706758"
#[prost(string, tag="4")]
pub phone: std::string::String,
/// e.g. "[email protected]"
#[prost(string, tag="5")]
pub email: std::string::String,
/// e.g. "boozinbuffalo"
#[prost(string, tag="6")]
pub username: std::string::String,
}
| 33 | 68 | 0.602871 |
69644211b8a11c750ed94a70c46a338f4a62a34a
| 267 |
// ignore-tidy-linelength
fn main() {
println!(“hello world”);
//~^ ERROR unknown start of token: \u{201c}
//~^^ HELP Unicode characters '“' (Left Double Quotation Mark) and '”' (Right Double Quotation Mark) look like '"' (Quotation Mark), but are not
}
| 33.375 | 148 | 0.64794 |
e912ef7b20202f6595d993709757eeed01987a3e
| 29,969 |
use std::fmt::{self, Display};
use rustc_errors::DiagnosticBuilder;
use rustc_hir as hir;
use rustc_hir::def::{DefKind, Res};
use rustc_middle::ty::print::RegionHighlightMode;
use rustc_middle::ty::subst::{GenericArgKind, SubstsRef};
use rustc_middle::ty::{self, RegionVid, Ty};
use rustc_span::symbol::kw;
use rustc_span::{symbol::Symbol, Span, DUMMY_SP};
use crate::borrow_check::{nll::ToRegionVid, universal_regions::DefiningTy, MirBorrowckCtxt};
/// A name for a particular region used in emitting diagnostics. This name could be a generated
/// name like `'1`, a name used by the user like `'a`, or a name like `'static`.
#[derive(Debug, Clone)]
crate struct RegionName {
/// The name of the region (interned).
crate name: Symbol,
/// Where the region comes from.
crate source: RegionNameSource,
}
/// Denotes the source of a region that is named by a `RegionName`. For example, a free region that
/// was named by the user would get `NamedFreeRegion` and `'static` lifetime would get `Static`.
/// This helps to print the right kinds of diagnostics.
#[derive(Debug, Clone)]
crate enum RegionNameSource {
/// A bound (not free) region that was substituted at the def site (not an HRTB).
NamedEarlyBoundRegion(Span),
/// A free region that the user has a name (`'a`) for.
NamedFreeRegion(Span),
/// The `'static` region.
Static,
/// The free region corresponding to the environment of a closure.
SynthesizedFreeEnvRegion(Span, String),
/// The region name corresponds to a region where the type annotation is completely missing
/// from the code, e.g. in a closure arguments `|x| { ... }`, where `x` is a reference.
CannotMatchHirTy(Span, String),
/// The region name corresponds a reference that was found by traversing the type in the HIR.
MatchedHirTy(Span),
/// A region name from the generics list of a struct/enum/union.
MatchedAdtAndSegment(Span),
/// The region corresponding to a closure upvar.
AnonRegionFromUpvar(Span, String),
/// The region corresponding to the return type of a closure.
AnonRegionFromOutput(Span, String, String),
/// The region from a type yielded by a generator.
AnonRegionFromYieldTy(Span, String),
/// An anonymous region from an async fn.
AnonRegionFromAsyncFn(Span),
}
impl RegionName {
crate fn was_named(&self) -> bool {
match self.source {
RegionNameSource::NamedEarlyBoundRegion(..)
| RegionNameSource::NamedFreeRegion(..)
| RegionNameSource::Static => true,
RegionNameSource::SynthesizedFreeEnvRegion(..)
| RegionNameSource::CannotMatchHirTy(..)
| RegionNameSource::MatchedHirTy(..)
| RegionNameSource::MatchedAdtAndSegment(..)
| RegionNameSource::AnonRegionFromUpvar(..)
| RegionNameSource::AnonRegionFromOutput(..)
| RegionNameSource::AnonRegionFromYieldTy(..)
| RegionNameSource::AnonRegionFromAsyncFn(..) => false,
}
}
crate fn highlight_region_name(&self, diag: &mut DiagnosticBuilder<'_>) {
match &self.source {
RegionNameSource::NamedFreeRegion(span)
| RegionNameSource::NamedEarlyBoundRegion(span) => {
diag.span_label(*span, format!("lifetime `{}` defined here", self));
}
RegionNameSource::SynthesizedFreeEnvRegion(span, note) => {
diag.span_label(
*span,
format!("lifetime `{}` represents this closure's body", self),
);
diag.note(¬e);
}
RegionNameSource::CannotMatchHirTy(span, type_name) => {
diag.span_label(*span, format!("has type `{}`", type_name));
}
RegionNameSource::MatchedHirTy(span)
| RegionNameSource::AnonRegionFromAsyncFn(span) => {
diag.span_label(
*span,
format!("let's call the lifetime of this reference `{}`", self),
);
}
RegionNameSource::MatchedAdtAndSegment(span) => {
diag.span_label(*span, format!("let's call this `{}`", self));
}
RegionNameSource::AnonRegionFromUpvar(span, upvar_name) => {
diag.span_label(
*span,
format!("lifetime `{}` appears in the type of `{}`", self, upvar_name),
);
}
RegionNameSource::AnonRegionFromOutput(span, mir_description, type_name) => {
diag.span_label(*span, format!("return type{} is {}", mir_description, type_name));
}
RegionNameSource::AnonRegionFromYieldTy(span, type_name) => {
diag.span_label(*span, format!("yield type is {}", type_name));
}
RegionNameSource::Static => {}
}
}
}
impl Display for RegionName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)
}
}
impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
/// Generate a synthetic region named `'N`, where `N` is the next value of the counter. Then,
/// increment the counter.
///
/// This is _not_ idempotent. Call `give_region_a_name` when possible.
fn synthesize_region_name(&self) -> Symbol {
let c = self.next_region_name.replace_with(|counter| *counter + 1);
Symbol::intern(&format!("'{:?}", c))
}
/// Maps from an internal MIR region vid to something that we can
/// report to the user. In some cases, the region vids will map
/// directly to lifetimes that the user has a name for (e.g.,
/// `'static`). But frequently they will not, in which case we
/// have to find some way to identify the lifetime to the user. To
/// that end, this function takes a "diagnostic" so that it can
/// create auxiliary notes as needed.
///
/// The names are memoized, so this is both cheap to recompute and idempotent.
///
/// Example (function arguments):
///
/// Suppose we are trying to give a name to the lifetime of the
/// reference `x`:
///
/// ```
/// fn foo(x: &u32) { .. }
/// ```
///
/// This function would create a label like this:
///
/// ```text
/// | fn foo(x: &u32) { .. }
/// ------- fully elaborated type of `x` is `&'1 u32`
/// ```
///
/// and then return the name `'1` for us to use.
crate fn give_region_a_name(&self, fr: RegionVid) -> Option<RegionName> {
debug!(
"give_region_a_name(fr={:?}, counter={:?})",
fr,
self.next_region_name.try_borrow().unwrap()
);
assert!(self.regioncx.universal_regions().is_universal_region(fr));
if let Some(value) = self.region_names.try_borrow_mut().unwrap().get(&fr) {
return Some(value.clone());
}
let value = self
.give_name_from_error_region(fr)
.or_else(|| self.give_name_if_anonymous_region_appears_in_arguments(fr))
.or_else(|| self.give_name_if_anonymous_region_appears_in_upvars(fr))
.or_else(|| self.give_name_if_anonymous_region_appears_in_output(fr))
.or_else(|| self.give_name_if_anonymous_region_appears_in_yield_ty(fr));
if let Some(ref value) = value {
self.region_names.try_borrow_mut().unwrap().insert(fr, value.clone());
}
debug!("give_region_a_name: gave name {:?}", value);
value
}
/// Checks for the case where `fr` maps to something that the
/// *user* has a name for. In that case, we'll be able to map
/// `fr` to a `Region<'tcx>`, and that region will be one of
/// named variants.
fn give_name_from_error_region(&self, fr: RegionVid) -> Option<RegionName> {
let error_region = self.to_error_region(fr)?;
let tcx = self.infcx.tcx;
debug!("give_region_a_name: error_region = {:?}", error_region);
match error_region {
ty::ReEarlyBound(ebr) => {
if ebr.has_name() {
let span = tcx.hir().span_if_local(ebr.def_id).unwrap_or(DUMMY_SP);
Some(RegionName {
name: ebr.name,
source: RegionNameSource::NamedEarlyBoundRegion(span),
})
} else {
None
}
}
ty::ReStatic => {
Some(RegionName { name: kw::StaticLifetime, source: RegionNameSource::Static })
}
ty::ReFree(free_region) => match free_region.bound_region {
ty::BoundRegion::BrNamed(region_def_id, name) => {
// Get the span to point to, even if we don't use the name.
let span = tcx.hir().span_if_local(region_def_id).unwrap_or(DUMMY_SP);
debug!(
"bound region named: {:?}, is_named: {:?}",
name,
free_region.bound_region.is_named()
);
if free_region.bound_region.is_named() {
// A named region that is actually named.
Some(RegionName { name, source: RegionNameSource::NamedFreeRegion(span) })
} else {
// If we spuriously thought that the region is named, we should let the
// system generate a true name for error messages. Currently this can
// happen if we have an elided name in an async fn for example: the
// compiler will generate a region named `'_`, but reporting such a name is
// not actually useful, so we synthesize a name for it instead.
let name = self.synthesize_region_name();
Some(RegionName {
name,
source: RegionNameSource::AnonRegionFromAsyncFn(span),
})
}
}
ty::BoundRegion::BrEnv => {
let mir_hir_id =
self.infcx.tcx.hir().as_local_hir_id(self.mir_def_id.expect_local());
let def_ty = self.regioncx.universal_regions().defining_ty;
if let DefiningTy::Closure(_, substs) = def_ty {
let args_span = if let hir::ExprKind::Closure(_, _, _, span, _) =
tcx.hir().expect_expr(mir_hir_id).kind
{
span
} else {
bug!("Closure is not defined by a closure expr");
};
let region_name = self.synthesize_region_name();
let closure_kind_ty = substs.as_closure().kind_ty();
let note = match closure_kind_ty.to_opt_closure_kind() {
Some(ty::ClosureKind::Fn) => {
"closure implements `Fn`, so references to captured variables \
can't escape the closure"
}
Some(ty::ClosureKind::FnMut) => {
"closure implements `FnMut`, so references to captured variables \
can't escape the closure"
}
Some(ty::ClosureKind::FnOnce) => {
bug!("BrEnv in a `FnOnce` closure");
}
None => bug!("Closure kind not inferred in borrow check"),
};
Some(RegionName {
name: region_name,
source: RegionNameSource::SynthesizedFreeEnvRegion(
args_span,
note.to_string(),
),
})
} else {
// Can't have BrEnv in functions, constants or generators.
bug!("BrEnv outside of closure.");
}
}
ty::BoundRegion::BrAnon(_) => None,
},
ty::ReLateBound(..)
| ty::ReVar(..)
| ty::RePlaceholder(..)
| ty::ReEmpty(_)
| ty::ReErased => None,
}
}
/// Finds an argument that contains `fr` and label it with a fully
/// elaborated type, returning something like `'1`. Result looks
/// like:
///
/// ```text
/// | fn foo(x: &u32) { .. }
/// ------- fully elaborated type of `x` is `&'1 u32`
/// ```
fn give_name_if_anonymous_region_appears_in_arguments(
&self,
fr: RegionVid,
) -> Option<RegionName> {
let implicit_inputs = self.regioncx.universal_regions().defining_ty.implicit_inputs();
let argument_index = self.regioncx.get_argument_index_for_region(self.infcx.tcx, fr)?;
let arg_ty = self.regioncx.universal_regions().unnormalized_input_tys
[implicit_inputs + argument_index];
if let Some(region_name) =
self.give_name_if_we_can_match_hir_ty_from_argument(fr, arg_ty, argument_index)
{
return Some(region_name);
}
self.give_name_if_we_cannot_match_hir_ty(fr, arg_ty)
}
fn give_name_if_we_can_match_hir_ty_from_argument(
&self,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
argument_index: usize,
) -> Option<RegionName> {
let mir_hir_id = self.infcx.tcx.hir().as_local_hir_id(self.mir_def_id.as_local()?);
let fn_decl = self.infcx.tcx.hir().fn_decl_by_hir_id(mir_hir_id)?;
let argument_hir_ty: &hir::Ty<'_> = fn_decl.inputs.get(argument_index)?;
match argument_hir_ty.kind {
// This indicates a variable with no type annotation, like
// `|x|`... in that case, we can't highlight the type but
// must highlight the variable.
// NOTE(eddyb) this is handled in/by the sole caller
// (`give_name_if_anonymous_region_appears_in_arguments`).
hir::TyKind::Infer => None,
_ => self.give_name_if_we_can_match_hir_ty(needle_fr, argument_ty, argument_hir_ty),
}
}
/// Attempts to highlight the specific part of a type in an argument
/// that has no type annotation.
/// For example, we might produce an annotation like this:
///
/// ```text
/// | foo(|a, b| b)
/// | - -
/// | | |
/// | | has type `&'1 u32`
/// | has type `&'2 u32`
/// ```
fn give_name_if_we_cannot_match_hir_ty(
&self,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
) -> Option<RegionName> {
let counter = *self.next_region_name.try_borrow().unwrap();
let mut highlight = RegionHighlightMode::default();
highlight.highlighting_region_vid(needle_fr, counter);
let type_name = self.infcx.extract_type_name(&argument_ty, Some(highlight)).0;
debug!(
"give_name_if_we_cannot_match_hir_ty: type_name={:?} needle_fr={:?}",
type_name, needle_fr
);
let assigned_region_name = if type_name.find(&format!("'{}", counter)).is_some() {
// Only add a label if we can confirm that a region was labelled.
let argument_index =
self.regioncx.get_argument_index_for_region(self.infcx.tcx, needle_fr)?;
let (_, span) = self.regioncx.get_argument_name_and_span_for_region(
&self.body,
&self.local_names,
argument_index,
);
Some(RegionName {
// This counter value will already have been used, so this function will increment
// it so the next value will be used next and return the region name that would
// have been used.
name: self.synthesize_region_name(),
source: RegionNameSource::CannotMatchHirTy(span, type_name),
})
} else {
None
};
assigned_region_name
}
/// Attempts to highlight the specific part of a type annotation
/// that contains the anonymous reference we want to give a name
/// to. For example, we might produce an annotation like this:
///
/// ```text
/// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item = &T>> {
/// | - let's call the lifetime of this reference `'1`
/// ```
///
/// the way this works is that we match up `argument_ty`, which is
/// a `Ty<'tcx>` (the internal form of the type) with
/// `argument_hir_ty`, a `hir::Ty` (the syntax of the type
/// annotation). We are descending through the types stepwise,
/// looking in to find the region `needle_fr` in the internal
/// type. Once we find that, we can use the span of the `hir::Ty`
/// to add the highlight.
///
/// This is a somewhat imperfect process, so along the way we also
/// keep track of the **closest** type we've found. If we fail to
/// find the exact `&` or `'_` to highlight, then we may fall back
/// to highlighting that closest type instead.
fn give_name_if_we_can_match_hir_ty(
&self,
needle_fr: RegionVid,
argument_ty: Ty<'tcx>,
argument_hir_ty: &hir::Ty<'_>,
) -> Option<RegionName> {
let search_stack: &mut Vec<(Ty<'tcx>, &hir::Ty<'_>)> =
&mut vec![(argument_ty, argument_hir_ty)];
while let Some((ty, hir_ty)) = search_stack.pop() {
match (&ty.kind, &hir_ty.kind) {
// Check if the `argument_ty` is `&'X ..` where `'X`
// is the region we are looking for -- if so, and we have a `&T`
// on the RHS, then we want to highlight the `&` like so:
//
// &
// - let's call the lifetime of this reference `'1`
(
ty::Ref(region, referent_ty, _),
hir::TyKind::Rptr(_lifetime, referent_hir_ty),
) => {
if region.to_region_vid() == needle_fr {
let region_name = self.synthesize_region_name();
// Just grab the first character, the `&`.
let source_map = self.infcx.tcx.sess.source_map();
let ampersand_span = source_map.start_point(hir_ty.span);
return Some(RegionName {
name: region_name,
source: RegionNameSource::MatchedHirTy(ampersand_span),
});
}
// Otherwise, let's descend into the referent types.
search_stack.push((referent_ty, &referent_hir_ty.ty));
}
// Match up something like `Foo<'1>`
(
ty::Adt(_adt_def, substs),
hir::TyKind::Path(hir::QPath::Resolved(None, path)),
) => {
match path.res {
// Type parameters of the type alias have no reason to
// be the same as those of the ADT.
// FIXME: We should be able to do something similar to
// match_adt_and_segment in this case.
Res::Def(DefKind::TyAlias, _) => (),
_ => {
if let Some(last_segment) = path.segments.last() {
if let Some(name) = self.match_adt_and_segment(
substs,
needle_fr,
last_segment,
search_stack,
) {
return Some(name);
}
}
}
}
}
// The following cases don't have lifetimes, so we
// just worry about trying to match up the rustc type
// with the HIR types:
(ty::Tuple(elem_tys), hir::TyKind::Tup(elem_hir_tys)) => {
search_stack.extend(elem_tys.iter().map(|k| k.expect_ty()).zip(*elem_hir_tys));
}
(ty::Slice(elem_ty), hir::TyKind::Slice(elem_hir_ty))
| (ty::Array(elem_ty, _), hir::TyKind::Array(elem_hir_ty, _)) => {
search_stack.push((elem_ty, elem_hir_ty));
}
(ty::RawPtr(mut_ty), hir::TyKind::Ptr(mut_hir_ty)) => {
search_stack.push((mut_ty.ty, &mut_hir_ty.ty));
}
_ => {
// FIXME there are other cases that we could trace
}
}
}
None
}
/// We've found an enum/struct/union type with the substitutions
/// `substs` and -- in the HIR -- a path type with the final
/// segment `last_segment`. Try to find a `'_` to highlight in
/// the generic args (or, if not, to produce new zipped pairs of
/// types+hir to search through).
fn match_adt_and_segment<'hir>(
&self,
substs: SubstsRef<'tcx>,
needle_fr: RegionVid,
last_segment: &'hir hir::PathSegment<'hir>,
search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty<'hir>)>,
) -> Option<RegionName> {
// Did the user give explicit arguments? (e.g., `Foo<..>`)
let args = last_segment.args.as_ref()?;
let lifetime =
self.try_match_adt_and_generic_args(substs, needle_fr, args, search_stack)?;
match lifetime.name {
hir::LifetimeName::Param(_)
| hir::LifetimeName::Error
| hir::LifetimeName::Static
| hir::LifetimeName::Underscore => {
let region_name = self.synthesize_region_name();
let ampersand_span = lifetime.span;
Some(RegionName {
name: region_name,
source: RegionNameSource::MatchedAdtAndSegment(ampersand_span),
})
}
hir::LifetimeName::ImplicitObjectLifetimeDefault | hir::LifetimeName::Implicit => {
// In this case, the user left off the lifetime; so
// they wrote something like:
//
// ```
// x: Foo<T>
// ```
//
// where the fully elaborated form is `Foo<'_, '1,
// T>`. We don't consider this a match; instead we let
// the "fully elaborated" type fallback above handle
// it.
None
}
}
}
/// We've found an enum/struct/union type with the substitutions
/// `substs` and -- in the HIR -- a path with the generic
/// arguments `args`. If `needle_fr` appears in the args, return
/// the `hir::Lifetime` that corresponds to it. If not, push onto
/// `search_stack` the types+hir to search through.
fn try_match_adt_and_generic_args<'hir>(
&self,
substs: SubstsRef<'tcx>,
needle_fr: RegionVid,
args: &'hir hir::GenericArgs<'hir>,
search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty<'hir>)>,
) -> Option<&'hir hir::Lifetime> {
for (kind, hir_arg) in substs.iter().zip(args.args) {
match (kind.unpack(), hir_arg) {
(GenericArgKind::Lifetime(r), hir::GenericArg::Lifetime(lt)) => {
if r.to_region_vid() == needle_fr {
return Some(lt);
}
}
(GenericArgKind::Type(ty), hir::GenericArg::Type(hir_ty)) => {
search_stack.push((ty, hir_ty));
}
(GenericArgKind::Const(_ct), hir::GenericArg::Const(_hir_ct)) => {
// Lifetimes cannot be found in consts, so we don't need
// to search anything here.
}
(
GenericArgKind::Lifetime(_)
| GenericArgKind::Type(_)
| GenericArgKind::Const(_),
_,
) => {
// I *think* that HIR lowering should ensure this
// doesn't happen, even in erroneous
// programs. Else we should use delay-span-bug.
span_bug!(
hir_arg.span(),
"unmatched subst and hir arg: found {:?} vs {:?}",
kind,
hir_arg,
);
}
}
}
None
}
/// Finds a closure upvar that contains `fr` and label it with a
/// fully elaborated type, returning something like `'1`. Result
/// looks like:
///
/// ```text
/// | let x = Some(&22);
/// - fully elaborated type of `x` is `Option<&'1 u32>`
/// ```
fn give_name_if_anonymous_region_appears_in_upvars(&self, fr: RegionVid) -> Option<RegionName> {
let upvar_index = self.regioncx.get_upvar_index_for_region(self.infcx.tcx, fr)?;
let (upvar_name, upvar_span) = self.regioncx.get_upvar_name_and_span_for_region(
self.infcx.tcx,
&self.upvars,
upvar_index,
);
let region_name = self.synthesize_region_name();
Some(RegionName {
name: region_name,
source: RegionNameSource::AnonRegionFromUpvar(upvar_span, upvar_name.to_string()),
})
}
/// Checks for arguments appearing in the (closure) return type. It
/// must be a closure since, in a free fn, such an argument would
/// have to either also appear in an argument (if using elision)
/// or be early bound (named, not in argument).
fn give_name_if_anonymous_region_appears_in_output(&self, fr: RegionVid) -> Option<RegionName> {
let tcx = self.infcx.tcx;
let return_ty = self.regioncx.universal_regions().unnormalized_output_ty;
debug!("give_name_if_anonymous_region_appears_in_output: return_ty = {:?}", return_ty);
if !tcx.any_free_region_meets(&return_ty, |r| r.to_region_vid() == fr) {
return None;
}
let mut highlight = RegionHighlightMode::default();
highlight.highlighting_region_vid(fr, *self.next_region_name.try_borrow().unwrap());
let type_name = self.infcx.extract_type_name(&return_ty, Some(highlight)).0;
let mir_hir_id = tcx.hir().as_local_hir_id(self.mir_def_id.expect_local());
let (return_span, mir_description) = match tcx.hir().get(mir_hir_id) {
hir::Node::Expr(hir::Expr {
kind: hir::ExprKind::Closure(_, return_ty, _, span, gen_move),
..
}) => (
match return_ty.output {
hir::FnRetTy::DefaultReturn(_) => tcx.sess.source_map().end_point(*span),
hir::FnRetTy::Return(_) => return_ty.output.span(),
},
if gen_move.is_some() { " of generator" } else { " of closure" },
),
hir::Node::ImplItem(hir::ImplItem {
kind: hir::ImplItemKind::Fn(method_sig, _),
..
}) => (method_sig.decl.output.span(), ""),
_ => (self.body.span, ""),
};
Some(RegionName {
// This counter value will already have been used, so this function will increment it
// so the next value will be used next and return the region name that would have been
// used.
name: self.synthesize_region_name(),
source: RegionNameSource::AnonRegionFromOutput(
return_span,
mir_description.to_string(),
type_name,
),
})
}
fn give_name_if_anonymous_region_appears_in_yield_ty(
&self,
fr: RegionVid,
) -> Option<RegionName> {
// Note: generators from `async fn` yield `()`, so we don't have to
// worry about them here.
let yield_ty = self.regioncx.universal_regions().yield_ty?;
debug!("give_name_if_anonymous_region_appears_in_yield_ty: yield_ty = {:?}", yield_ty,);
let tcx = self.infcx.tcx;
if !tcx.any_free_region_meets(&yield_ty, |r| r.to_region_vid() == fr) {
return None;
}
let mut highlight = RegionHighlightMode::default();
highlight.highlighting_region_vid(fr, *self.next_region_name.try_borrow().unwrap());
let type_name = self.infcx.extract_type_name(&yield_ty, Some(highlight)).0;
let mir_hir_id = tcx.hir().as_local_hir_id(self.mir_def_id.expect_local());
let yield_span = match tcx.hir().get(mir_hir_id) {
hir::Node::Expr(hir::Expr {
kind: hir::ExprKind::Closure(_, _, _, span, _), ..
}) => (tcx.sess.source_map().end_point(*span)),
_ => self.body.span,
};
debug!(
"give_name_if_anonymous_region_appears_in_yield_ty: \
type_name = {:?}, yield_span = {:?}",
yield_span, type_name,
);
Some(RegionName {
name: self.synthesize_region_name(),
source: RegionNameSource::AnonRegionFromYieldTy(yield_span, type_name),
})
}
}
| 42.209859 | 100 | 0.535754 |
4882bf0446dccdd18e3ac37925af9ba053d1d7dd
| 1,935 |
extern crate phf_codegen;
use std::env;
use std::fs::File;
use std::io::{BufWriter, Write, BufRead, BufReader};
use std::path::Path;
fn main() {
let path = Path::new(&env::var("OUT_DIR").unwrap()).join("hmm_prob.rs");
let hmm_file = File::open("src/data/hmm.model").expect("cannot open hmm.model");
let mut file = BufWriter::new(File::create(&path).unwrap());
let reader = BufReader::new(hmm_file);
let mut lines = reader.lines().map(|x| x.unwrap()).skip_while(|x| x.starts_with("#"));
let prob_start = lines.next().unwrap();
write!(&mut file, "static INITIAL_PROBS: StatusSet = [").unwrap();
for prob in prob_start.split(' ') {
write!(&mut file, "{}, ", prob).unwrap();
}
write!(&mut file, "];\n\n").unwrap();
write!(&mut file, "static TRANS_PROBS: [StatusSet; 4] = [").unwrap();
for line in lines.by_ref().skip_while(|x| x.starts_with("#")).take_while(|x| !x.starts_with("#")) {
write!(&mut file, "[").unwrap();
for prob in line.split(' ') {
write!(&mut file, "{}, ", prob).unwrap();
}
write!(&mut file, "],\n").unwrap();
}
write!(&mut file, "];\n\n").unwrap();
let mut i = 0;
for line in lines {
if line.starts_with("#") {
continue;
}
write!(&mut file, "static EMIT_PROB_{}: phf::Map<&'static str, f64> = ", i).unwrap();
let mut map = phf_codegen::Map::new();
for word_prob in line.split(',') {
let mut parts = word_prob.split(':');
let word = parts.next().unwrap();
let prob = parts.next().unwrap();
map.entry(word.to_string(), prob);
}
map.build(&mut file).unwrap();
write!(&mut file, ";\n").unwrap();
i += 1;
}
write!(&mut file, "static EMIT_PROBS: [&'static phf::Map<&'static str, f64>; 4] = [&EMIT_PROB_0, &EMIT_PROB_1, &EMIT_PROB_2, &EMIT_PROB_3];\n").unwrap();
}
| 40.3125 | 157 | 0.552972 |
bbea705a03a29306d162626057a296288cc2b871
| 87 |
mod query;
pub use crate::query::{PlatformRegistryQueryMsg, AddressBaseInfoResponse};
| 21.75 | 74 | 0.816092 |
2222c4d2bd59585aecac1763a77da6e6c711f1d4
| 26,630 |
/*
* MailSlurp API
*
* MailSlurp is an API for sending and receiving emails from dynamically allocated email addresses. It's designed for developers and QA teams to test applications, process inbound emails, send templated notifications, attachments, and more. ## Resources - [Homepage](https://www.mailslurp.com) - Get an [API KEY](https://app.mailslurp.com/sign-up/) - Generated [SDK Clients](https://www.mailslurp.com/docs/) - [Examples](https://github.com/mailslurp/examples) repository
*
* The version of the OpenAPI document: 6.5.2
*
* Generated by: https://openapi-generator.tech
*/
use reqwest;
use crate::apis::ResponseContent;
use super::{Error, configuration};
/// struct for passing parameters to the method `wait_for`
#[derive(Clone, Debug)]
pub struct WaitForParams {
/// Conditions to apply to emails that you are waiting for
pub wait_for_conditions: Option<crate::models::WaitForConditions>
}
/// struct for passing parameters to the method `wait_for_email_count`
#[derive(Clone, Debug)]
pub struct WaitForEmailCountParams {
/// Filter for emails that were received before the given timestamp
pub before: Option<String>,
/// Number of emails to wait for. Must be greater that 1
pub count: Option<i32>,
/// Max milliseconds delay between calls
pub delay: Option<i64>,
/// Id of the inbox we are fetching emails from
pub inbox_id: Option<String>,
/// Filter for emails that were received after the given timestamp
pub since: Option<String>,
/// Sort direction
pub sort: Option<String>,
/// Max milliseconds to wait
pub timeout: Option<i64>,
/// Optional filter for unread only
pub unread_only: Option<bool>
}
/// struct for passing parameters to the method `wait_for_latest_email`
#[derive(Clone, Debug)]
pub struct WaitForLatestEmailParams {
/// Filter for emails that were before after the given timestamp
pub before: Option<String>,
/// Max milliseconds delay between calls
pub delay: Option<i64>,
/// Id of the inbox we are fetching emails from
pub inbox_id: Option<String>,
/// Filter for emails that were received after the given timestamp
pub since: Option<String>,
/// Sort direction
pub sort: Option<String>,
/// Max milliseconds to wait
pub timeout: Option<i64>,
/// Optional filter for unread only.
pub unread_only: Option<bool>
}
/// struct for passing parameters to the method `wait_for_matching_emails`
#[derive(Clone, Debug)]
pub struct WaitForMatchingEmailsParams {
/// matchOptions
pub match_options: crate::models::MatchOptions,
/// Filter for emails that were received before the given timestamp
pub before: Option<String>,
/// Number of emails to wait for. Must be greater or equal to 1
pub count: Option<i32>,
/// Max milliseconds delay between calls
pub delay: Option<i64>,
/// Id of the inbox we are fetching emails from
pub inbox_id: Option<String>,
/// Filter for emails that were received after the given timestamp
pub since: Option<String>,
/// Sort direction
pub sort: Option<String>,
/// Max milliseconds to wait
pub timeout: Option<i64>,
/// Optional filter for unread only
pub unread_only: Option<bool>
}
/// struct for passing parameters to the method `wait_for_matching_first_email`
#[derive(Clone, Debug)]
pub struct WaitForMatchingFirstEmailParams {
/// matchOptions
pub match_options: crate::models::MatchOptions,
/// Filter for emails that were received before the given timestamp
pub before: Option<String>,
/// Max milliseconds delay between calls
pub delay: Option<i64>,
/// Id of the inbox we are matching an email for
pub inbox_id: Option<String>,
/// Filter for emails that were received after the given timestamp
pub since: Option<String>,
/// Sort direction
pub sort: Option<String>,
/// Max milliseconds to wait
pub timeout: Option<i64>,
/// Optional filter for unread only
pub unread_only: Option<bool>
}
/// struct for passing parameters to the method `wait_for_nth_email`
#[derive(Clone, Debug)]
pub struct WaitForNthEmailParams {
/// Filter for emails that were received before the given timestamp
pub before: Option<String>,
/// Max milliseconds delay between calls
pub delay: Option<i64>,
/// Id of the inbox you are fetching emails from
pub inbox_id: Option<String>,
/// Zero based index of the email to wait for. If an inbox has 1 email already and you want to wait for the 2nd email pass index=1
pub index: Option<i32>,
/// Filter for emails that were received after the given timestamp
pub since: Option<String>,
/// Sort direction
pub sort: Option<String>,
/// Max milliseconds to wait for the nth email if not already present
pub timeout: Option<i64>,
/// Optional filter for unread only
pub unread_only: Option<bool>
}
/// struct for typed errors of method `wait_for`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WaitForError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `wait_for_email_count`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WaitForEmailCountError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `wait_for_latest_email`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WaitForLatestEmailError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `wait_for_matching_emails`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WaitForMatchingEmailsError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `wait_for_matching_first_email`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WaitForMatchingFirstEmailError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// struct for typed errors of method `wait_for_nth_email`
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum WaitForNthEmailError {
Status401(),
Status403(),
Status404(),
UnknownValue(serde_json::Value),
}
/// Generic waitFor method that will wait until an inbox meets given conditions or return immediately if already met
pub async fn wait_for(configuration: &configuration::Configuration, params: WaitForParams) -> Result<Vec<crate::models::EmailPreview>, Error<WaitForError>> {
// unbox the parameters
let wait_for_conditions = params.wait_for_conditions;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/waitFor", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&wait_for_conditions);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<WaitForError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// If inbox contains count or more emails at time of request then return count worth of emails. If not wait until the count is reached and return those or return an error if timeout is exceeded.
pub async fn wait_for_email_count(configuration: &configuration::Configuration, params: WaitForEmailCountParams) -> Result<Vec<crate::models::EmailPreview>, Error<WaitForEmailCountError>> {
// unbox the parameters
let before = params.before;
let count = params.count;
let delay = params.delay;
let inbox_id = params.inbox_id;
let since = params.since;
let sort = params.sort;
let timeout = params.timeout;
let unread_only = params.unread_only;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/waitForEmailCount", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = count {
local_var_req_builder = local_var_req_builder.query(&[("count", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = delay {
local_var_req_builder = local_var_req_builder.query(&[("delay", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = inbox_id {
local_var_req_builder = local_var_req_builder.query(&[("inboxId", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = timeout {
local_var_req_builder = local_var_req_builder.query(&[("timeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = unread_only {
local_var_req_builder = local_var_req_builder.query(&[("unreadOnly", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<WaitForEmailCountError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Will return either the last received email or wait for an email to arrive and return that. If you need to wait for an email for a non-empty inbox set `unreadOnly=true` or see the other receive methods such as `waitForNthEmail` or `waitForEmailCount`.
pub async fn wait_for_latest_email(configuration: &configuration::Configuration, params: WaitForLatestEmailParams) -> Result<crate::models::Email, Error<WaitForLatestEmailError>> {
// unbox the parameters
let before = params.before;
let delay = params.delay;
let inbox_id = params.inbox_id;
let since = params.since;
let sort = params.sort;
let timeout = params.timeout;
let unread_only = params.unread_only;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/waitForLatestEmail", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = delay {
local_var_req_builder = local_var_req_builder.query(&[("delay", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = inbox_id {
local_var_req_builder = local_var_req_builder.query(&[("inboxId", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = timeout {
local_var_req_builder = local_var_req_builder.query(&[("timeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = unread_only {
local_var_req_builder = local_var_req_builder.query(&[("unreadOnly", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<WaitForLatestEmailError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Perform a search of emails in an inbox with the given patterns. If results match expected count then return or else retry the search until results are found or timeout is reached. Match options allow simple CONTAINS or EQUALS filtering on SUBJECT, TO, BCC, CC, and FROM. See the `MatchOptions` object for options. An example payload is `{ matches: [{field: 'SUBJECT',should:'CONTAIN',value:'needle'}] }`. You can use an array of matches and they will be applied sequentially to filter out emails. If you want to perform matches and extractions of content using Regex patterns see the EmailController `getEmailContentMatch` method.
pub async fn wait_for_matching_emails(configuration: &configuration::Configuration, params: WaitForMatchingEmailsParams) -> Result<Vec<crate::models::EmailPreview>, Error<WaitForMatchingEmailsError>> {
// unbox the parameters
let match_options = params.match_options;
let before = params.before;
let count = params.count;
let delay = params.delay;
let inbox_id = params.inbox_id;
let since = params.since;
let sort = params.sort;
let timeout = params.timeout;
let unread_only = params.unread_only;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/waitForMatchingEmails", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = count {
local_var_req_builder = local_var_req_builder.query(&[("count", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = delay {
local_var_req_builder = local_var_req_builder.query(&[("delay", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = inbox_id {
local_var_req_builder = local_var_req_builder.query(&[("inboxId", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = timeout {
local_var_req_builder = local_var_req_builder.query(&[("timeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = unread_only {
local_var_req_builder = local_var_req_builder.query(&[("unreadOnly", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&match_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<WaitForMatchingEmailsError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// Perform a search of emails in an inbox with the given patterns. If a result if found then return or else retry the search until a result is found or timeout is reached. Match options allow simple CONTAINS or EQUALS filtering on SUBJECT, TO, BCC, CC, and FROM. See the `MatchOptions` object for options. An example payload is `{ matches: [{field: 'SUBJECT',should:'CONTAIN',value:'needle'}] }`. You can use an array of matches and they will be applied sequentially to filter out emails. If you want to perform matches and extractions of content using Regex patterns see the EmailController `getEmailContentMatch` method.
pub async fn wait_for_matching_first_email(configuration: &configuration::Configuration, params: WaitForMatchingFirstEmailParams) -> Result<crate::models::Email, Error<WaitForMatchingFirstEmailError>> {
// unbox the parameters
let match_options = params.match_options;
let before = params.before;
let delay = params.delay;
let inbox_id = params.inbox_id;
let since = params.since;
let sort = params.sort;
let timeout = params.timeout;
let unread_only = params.unread_only;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/waitForMatchingFirstEmail", configuration.base_path);
let mut local_var_req_builder = local_var_client.post(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = delay {
local_var_req_builder = local_var_req_builder.query(&[("delay", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = inbox_id {
local_var_req_builder = local_var_req_builder.query(&[("inboxId", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = timeout {
local_var_req_builder = local_var_req_builder.query(&[("timeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = unread_only {
local_var_req_builder = local_var_req_builder.query(&[("unreadOnly", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
local_var_req_builder = local_var_req_builder.json(&match_options);
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<WaitForMatchingFirstEmailError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
/// If nth email is already present in inbox then return it. If not hold the connection open until timeout expires or the nth email is received and returned.
pub async fn wait_for_nth_email(configuration: &configuration::Configuration, params: WaitForNthEmailParams) -> Result<crate::models::Email, Error<WaitForNthEmailError>> {
// unbox the parameters
let before = params.before;
let delay = params.delay;
let inbox_id = params.inbox_id;
let index = params.index;
let since = params.since;
let sort = params.sort;
let timeout = params.timeout;
let unread_only = params.unread_only;
let local_var_client = &configuration.client;
let local_var_uri_str = format!("{}/waitForNthEmail", configuration.base_path);
let mut local_var_req_builder = local_var_client.get(local_var_uri_str.as_str());
if let Some(ref local_var_str) = before {
local_var_req_builder = local_var_req_builder.query(&[("before", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = delay {
local_var_req_builder = local_var_req_builder.query(&[("delay", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = inbox_id {
local_var_req_builder = local_var_req_builder.query(&[("inboxId", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = index {
local_var_req_builder = local_var_req_builder.query(&[("index", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = since {
local_var_req_builder = local_var_req_builder.query(&[("since", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = sort {
local_var_req_builder = local_var_req_builder.query(&[("sort", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = timeout {
local_var_req_builder = local_var_req_builder.query(&[("timeout", &local_var_str.to_string())]);
}
if let Some(ref local_var_str) = unread_only {
local_var_req_builder = local_var_req_builder.query(&[("unreadOnly", &local_var_str.to_string())]);
}
if let Some(ref local_var_user_agent) = configuration.user_agent {
local_var_req_builder = local_var_req_builder.header(reqwest::header::USER_AGENT, local_var_user_agent.clone());
}
if let Some(ref local_var_apikey) = configuration.api_key {
let local_var_key = local_var_apikey.key.clone();
let local_var_value = match local_var_apikey.prefix {
Some(ref local_var_prefix) => format!("{} {}", local_var_prefix, local_var_key),
None => local_var_key,
};
local_var_req_builder = local_var_req_builder.header("x-api-key", local_var_value);
};
let local_var_req = local_var_req_builder.build()?;
let local_var_resp = local_var_client.execute(local_var_req).await?;
let local_var_status = local_var_resp.status();
let local_var_content = local_var_resp.text().await?;
if !local_var_status.is_client_error() && !local_var_status.is_server_error() {
serde_json::from_str(&local_var_content).map_err(Error::from)
} else {
let local_var_entity: Option<WaitForNthEmailError> = serde_json::from_str(&local_var_content).ok();
let local_var_error = ResponseContent { status: local_var_status, content: local_var_content, entity: local_var_entity };
Err(Error::ResponseError(local_var_error))
}
}
| 46.637478 | 634 | 0.716748 |
625af14311d0005500c15cf8654a2ce9b858c2e3
| 1,269 |
// Copyright 2017, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use std::convert::TryFrom;
use std::fmt;
use std::ops::Deref;
use enums::{Format, SurfaceType};
use error::Error;
use ffi;
#[cfg(feature = "use_glib")]
use glib::translate::*;
use surface::Surface;
use ffi::CGContextRef;
declare_surface!(QuartzSurface, SurfaceType::Quartz);
impl QuartzSurface {
pub fn create(format: Format, width: u32, height: u32) -> Result<QuartzSurface, Error> {
unsafe {
Self::from_raw_full(ffi::cairo_quartz_surface_create(
format.into(),
width,
height,
))
}
}
pub fn create_for_cg_context(
cg_context: CGContextRef,
width: u32,
height: u32,
) -> Result<QuartzSurface, Error> {
unsafe {
Self::from_raw_full(ffi::cairo_quartz_surface_create_for_cg_context(
cg_context, width, height,
))
}
}
pub fn get_cg_context(&self) -> CGContextRef {
unsafe { ffi::cairo_quartz_surface_get_cg_context(self.to_raw_none()) }
}
}
| 27 | 95 | 0.628842 |
28422c60164bd499b386a91174f92413eb0ccaef
| 1,036 |
use crate::interpreter::Value;
use crate::Identifier;
use std::collections::HashMap;
use std::rc::Rc;
/// The `Environment` contains all variable and function bindings.
#[derive(Default)]
pub struct Environment {
env: HashMap<Identifier, Rc<Value>>,
}
impl Environment {
/// Create a new `Environment`.
pub fn new() -> Environment {
Environment::default()
}
/// Get a reference to a variable from the `Environment`.
pub fn get(&self, key: &Identifier) -> Option<Rc<Value>> {
self.env.get(key).map(Clone::clone)
}
/// Take a value from the `Environment`, getting ownership of the value and removing the
/// variable from the `Environment`.
pub fn take(&mut self, key: &Identifier) -> Option<Rc<Value>> {
self.env.remove(key)
}
/// Set a variable in the `Environment` and returns the previous value of that variable, if
/// any.
pub fn set(&mut self, key: Identifier, value: Rc<Value>) -> Option<Rc<Value>> {
self.env.insert(key, value)
}
}
| 29.6 | 95 | 0.642857 |
331feba202b357e725998f4b6e7ba66300226d9d
| 13,871 |
// Copyright 2018-2020 the Deno authors. All rights reserved. MIT license.
pub use anyhow::anyhow;
pub use anyhow::bail;
pub use anyhow::Context;
use rusty_v8 as v8;
use std::borrow::Cow;
use std::convert::TryFrom;
use std::convert::TryInto;
use std::error::Error;
use std::fmt;
use std::fmt::Debug;
use std::fmt::Display;
use std::fmt::Formatter;
use std::io;
/// A generic wrapper that can encapsulate any concrete error type.
pub type AnyError = anyhow::Error;
/// Creates a new error with a caller-specified error class name and message.
pub fn custom_error(
class: &'static str,
message: impl Into<Cow<'static, str>>,
) -> AnyError {
CustomError {
class,
message: message.into(),
}
.into()
}
pub fn generic_error(message: impl Into<Cow<'static, str>>) -> AnyError {
custom_error("Error", message)
}
pub fn type_error(message: impl Into<Cow<'static, str>>) -> AnyError {
custom_error("TypeError", message)
}
pub fn uri_error(message: impl Into<Cow<'static, str>>) -> AnyError {
custom_error("URIError", message)
}
pub fn last_os_error() -> AnyError {
io::Error::last_os_error().into()
}
pub fn bad_resource(message: impl Into<Cow<'static, str>>) -> AnyError {
custom_error("BadResource", message)
}
pub fn bad_resource_id() -> AnyError {
custom_error("BadResource", "Bad resource ID")
}
pub fn not_supported() -> AnyError {
custom_error("NotSupported", "The operation is not supported")
}
pub fn resource_unavailable() -> AnyError {
custom_error(
"Busy",
"Resource is unavailable because it is in use by a promise",
)
}
/// A simple error type that lets the creator specify both the error message and
/// the error class name. This type is private; externally it only ever appears
/// wrapped in an `AnyError`. To retrieve the error class name from a wrapped
/// `CustomError`, use the function `get_custom_error_class()`.
#[derive(Debug)]
struct CustomError {
class: &'static str,
message: Cow<'static, str>,
}
impl Display for CustomError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
f.write_str(&self.message)
}
}
impl Error for CustomError {}
/// If this error was crated with `custom_error()`, return the specified error
/// class name. In all other cases this function returns `None`.
pub fn get_custom_error_class(error: &AnyError) -> Option<&'static str> {
error.downcast_ref::<CustomError>().map(|e| e.class)
}
/// A `JsError` represents an exception coming from V8, with stack frames and
/// line numbers. The deno_cli crate defines another `JsError` type, which wraps
/// the one defined here, that adds source map support and colorful formatting.
#[derive(Debug, PartialEq, Clone)]
pub struct JsError {
pub message: String,
pub source_line: Option<String>,
pub script_resource_name: Option<String>,
pub line_number: Option<i64>,
pub start_column: Option<i64>, // 0-based
pub end_column: Option<i64>, // 0-based
pub frames: Vec<JsStackFrame>,
pub stack: Option<String>,
}
#[derive(Debug, PartialEq, Clone)]
pub struct JsStackFrame {
pub type_name: Option<String>,
pub function_name: Option<String>,
pub method_name: Option<String>,
pub file_name: Option<String>,
pub line_number: Option<i64>,
pub column_number: Option<i64>,
pub eval_origin: Option<String>,
pub is_top_level: Option<bool>,
pub is_eval: bool,
pub is_native: bool,
pub is_constructor: bool,
pub is_async: bool,
pub is_promise_all: bool,
pub promise_index: Option<i64>,
}
impl JsStackFrame {
pub fn from_location(
file_name: Option<String>,
line_number: Option<i64>,
column_number: Option<i64>,
) -> Self {
Self {
type_name: None,
function_name: None,
method_name: None,
file_name,
line_number,
column_number,
eval_origin: None,
is_top_level: None,
is_eval: false,
is_native: false,
is_constructor: false,
is_async: false,
is_promise_all: false,
promise_index: None,
}
}
}
fn get_property<'a>(
scope: &mut v8::HandleScope<'a>,
object: v8::Local<v8::Object>,
key: &str,
) -> Option<v8::Local<'a, v8::Value>> {
let key = v8::String::new(scope, key).unwrap();
object.get(scope, key.into())
}
impl JsError {
pub(crate) fn create(js_error: Self) -> AnyError {
js_error.into()
}
pub fn from_v8_exception(
scope: &mut v8::HandleScope,
exception: v8::Local<v8::Value>,
) -> Self {
// Create a new HandleScope because we're creating a lot of new local
// handles below.
let scope = &mut v8::HandleScope::new(scope);
let msg = v8::Exception::create_message(scope, exception);
let (message, frames, stack) = if exception.is_native_error() {
// The exception is a JS Error object.
let exception: v8::Local<v8::Object> =
exception.clone().try_into().unwrap();
// Get the message by formatting error.name and error.message.
let name = get_property(scope, exception, "name")
.filter(|v| !v.is_undefined())
.and_then(|m| m.to_string(scope))
.map(|s| s.to_rust_string_lossy(scope))
.unwrap_or_else(|| "Error".to_string());
let message_prop = get_property(scope, exception, "message")
.filter(|v| !v.is_undefined())
.and_then(|m| m.to_string(scope))
.map(|s| s.to_rust_string_lossy(scope))
.unwrap_or_else(|| "".to_string());
let message = if name != "" && message_prop != "" {
format!("Uncaught {}: {}", name, message_prop)
} else if name != "" {
format!("Uncaught {}", name)
} else if message_prop != "" {
format!("Uncaught {}", message_prop)
} else {
"Uncaught".to_string()
};
// Access error.stack to ensure that prepareStackTrace() has been called.
// This should populate error.__callSiteEvals.
let stack: Option<v8::Local<v8::String>> =
get_property(scope, exception, "stack")
.unwrap()
.try_into()
.ok();
let stack = stack.map(|s| s.to_rust_string_lossy(scope));
// FIXME(bartlmieju): the rest of this function is CLI only
// Read an array of structured frames from error.__callSiteEvals.
let frames_v8 = get_property(scope, exception, "__callSiteEvals");
let frames_v8: Option<v8::Local<v8::Array>> =
frames_v8.and_then(|a| a.try_into().ok());
// Convert them into Vec<JSStack> and Vec<String> respectively.
let mut frames: Vec<JsStackFrame> = vec![];
if let Some(frames_v8) = frames_v8 {
for i in 0..frames_v8.length() {
let call_site: v8::Local<v8::Object> =
frames_v8.get_index(scope, i).unwrap().try_into().unwrap();
let type_name: Option<v8::Local<v8::String>> =
get_property(scope, call_site, "typeName")
.unwrap()
.try_into()
.ok();
let type_name = type_name.map(|s| s.to_rust_string_lossy(scope));
let function_name: Option<v8::Local<v8::String>> =
get_property(scope, call_site, "functionName")
.unwrap()
.try_into()
.ok();
let function_name =
function_name.map(|s| s.to_rust_string_lossy(scope));
let method_name: Option<v8::Local<v8::String>> =
get_property(scope, call_site, "methodName")
.unwrap()
.try_into()
.ok();
let method_name = method_name.map(|s| s.to_rust_string_lossy(scope));
let file_name: Option<v8::Local<v8::String>> =
get_property(scope, call_site, "fileName")
.unwrap()
.try_into()
.ok();
let file_name = file_name.map(|s| s.to_rust_string_lossy(scope));
let line_number: Option<v8::Local<v8::Integer>> =
get_property(scope, call_site, "lineNumber")
.unwrap()
.try_into()
.ok();
let line_number = line_number.map(|n| n.value());
let column_number: Option<v8::Local<v8::Integer>> =
get_property(scope, call_site, "columnNumber")
.unwrap()
.try_into()
.ok();
let column_number = column_number.map(|n| n.value());
let eval_origin: Option<v8::Local<v8::String>> =
get_property(scope, call_site, "evalOrigin")
.unwrap()
.try_into()
.ok();
let eval_origin = eval_origin.map(|s| s.to_rust_string_lossy(scope));
let is_top_level: Option<v8::Local<v8::Boolean>> =
get_property(scope, call_site, "isToplevel")
.unwrap()
.try_into()
.ok();
let is_top_level = is_top_level.map(|b| b.is_true());
let is_eval: v8::Local<v8::Boolean> =
get_property(scope, call_site, "isEval")
.unwrap()
.try_into()
.unwrap();
let is_eval = is_eval.is_true();
let is_native: v8::Local<v8::Boolean> =
get_property(scope, call_site, "isNative")
.unwrap()
.try_into()
.unwrap();
let is_native = is_native.is_true();
let is_constructor: v8::Local<v8::Boolean> =
get_property(scope, call_site, "isConstructor")
.unwrap()
.try_into()
.unwrap();
let is_constructor = is_constructor.is_true();
let is_async: v8::Local<v8::Boolean> =
get_property(scope, call_site, "isAsync")
.unwrap()
.try_into()
.unwrap();
let is_async = is_async.is_true();
let is_promise_all: v8::Local<v8::Boolean> =
get_property(scope, call_site, "isPromiseAll")
.unwrap()
.try_into()
.unwrap();
let is_promise_all = is_promise_all.is_true();
let promise_index: Option<v8::Local<v8::Integer>> =
get_property(scope, call_site, "columnNumber")
.unwrap()
.try_into()
.ok();
let promise_index = promise_index.map(|n| n.value());
frames.push(JsStackFrame {
type_name,
function_name,
method_name,
file_name,
line_number,
column_number,
eval_origin,
is_top_level,
is_eval,
is_native,
is_constructor,
is_async,
is_promise_all,
promise_index,
});
}
}
(message, frames, stack)
} else {
// The exception is not a JS Error object.
// Get the message given by V8::Exception::create_message(), and provide
// empty frames.
(msg.get(scope).to_rust_string_lossy(scope), vec![], None)
};
Self {
message,
script_resource_name: msg
.get_script_resource_name(scope)
.and_then(|v| v8::Local::<v8::String>::try_from(v).ok())
.map(|v| v.to_rust_string_lossy(scope)),
source_line: msg
.get_source_line(scope)
.map(|v| v.to_rust_string_lossy(scope)),
line_number: msg.get_line_number(scope).and_then(|v| v.try_into().ok()),
start_column: msg.get_start_column().try_into().ok(),
end_column: msg.get_end_column().try_into().ok(),
frames,
stack,
}
}
}
impl Error for JsError {}
fn format_source_loc(
file_name: &str,
line_number: i64,
column_number: i64,
) -> String {
let line_number = line_number;
let column_number = column_number;
format!("{}:{}:{}", file_name, line_number, column_number)
}
impl Display for JsError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if let Some(stack) = &self.stack {
let stack_lines = stack.lines();
if stack_lines.count() > 1 {
return write!(f, "{}", stack);
}
}
write!(f, "{}", self.message)?;
if let Some(script_resource_name) = &self.script_resource_name {
if self.line_number.is_some() && self.start_column.is_some() {
let source_loc = format_source_loc(
script_resource_name,
self.line_number.unwrap(),
self.start_column.unwrap(),
);
write!(f, "\n at {}", source_loc)?;
}
}
Ok(())
}
}
pub(crate) fn attach_handle_to_error(
scope: &mut v8::Isolate,
err: AnyError,
handle: v8::Local<v8::Value>,
) -> AnyError {
// TODO(bartomieju): this is a special case...
ErrWithV8Handle::new(scope, err, handle).into()
}
// TODO(piscisaureus): rusty_v8 should implement the Error trait on
// values of type v8::Global<T>.
pub(crate) struct ErrWithV8Handle {
err: AnyError,
handle: v8::Global<v8::Value>,
}
impl ErrWithV8Handle {
pub fn new(
scope: &mut v8::Isolate,
err: AnyError,
handle: v8::Local<v8::Value>,
) -> Self {
let handle = v8::Global::new(scope, handle);
Self { err, handle }
}
pub fn get_handle<'s>(
&self,
scope: &mut v8::HandleScope<'s>,
) -> v8::Local<'s, v8::Value> {
v8::Local::new(scope, &self.handle)
}
}
unsafe impl Send for ErrWithV8Handle {}
unsafe impl Sync for ErrWithV8Handle {}
impl Error for ErrWithV8Handle {}
impl Display for ErrWithV8Handle {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
<AnyError as Display>::fmt(&self.err, f)
}
}
impl Debug for ErrWithV8Handle {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
<Self as Display>::fmt(self, f)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_bad_resource() {
let err = bad_resource("Resource has been closed");
assert_eq!(err.to_string(), "Resource has been closed");
}
#[test]
fn test_bad_resource_id() {
let err = bad_resource_id();
assert_eq!(err.to_string(), "Bad resource ID");
}
}
| 30.620309 | 80 | 0.606157 |
f7658f9b7ad79a16e9427ff8e3a1ad9a898df60c
| 18,079 |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::OTG_HS_HCINTMSK11 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct XFRCMR {
bits: bool,
}
impl XFRCMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct CHHMR {
bits: bool,
}
impl CHHMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct AHBERRR {
bits: bool,
}
impl AHBERRR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct STALLMR {
bits: bool,
}
impl STALLMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NAKMR {
bits: bool,
}
impl NAKMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct ACKMR {
bits: bool,
}
impl ACKMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NYETR {
bits: bool,
}
impl NYETR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct TXERRMR {
bits: bool,
}
impl TXERRMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BBERRMR {
bits: bool,
}
impl BBERRMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct FRMORMR {
bits: bool,
}
impl FRMORMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DTERRMR {
bits: bool,
}
impl DTERRMR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Proxy"]
pub struct _XFRCMW<'a> {
w: &'a mut W,
}
impl<'a> _XFRCMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _CHHMW<'a> {
w: &'a mut W,
}
impl<'a> _CHHMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _AHBERRW<'a> {
w: &'a mut W,
}
impl<'a> _AHBERRW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _STALLMW<'a> {
w: &'a mut W,
}
impl<'a> _STALLMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _NAKMW<'a> {
w: &'a mut W,
}
impl<'a> _NAKMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _ACKMW<'a> {
w: &'a mut W,
}
impl<'a> _ACKMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _NYETW<'a> {
w: &'a mut W,
}
impl<'a> _NYETW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _TXERRMW<'a> {
w: &'a mut W,
}
impl<'a> _TXERRMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BBERRMW<'a> {
w: &'a mut W,
}
impl<'a> _BBERRMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _FRMORMW<'a> {
w: &'a mut W,
}
impl<'a> _FRMORMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _DTERRMW<'a> {
w: &'a mut W,
}
impl<'a> _DTERRMW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Transfer completed mask"]
#[inline]
pub fn xfrcm(&self) -> XFRCMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
XFRCMR { bits }
}
#[doc = "Bit 1 - Channel halted mask"]
#[inline]
pub fn chhm(&self) -> CHHMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) != 0
};
CHHMR { bits }
}
#[doc = "Bit 2 - AHB error"]
#[inline]
pub fn ahberr(&self) -> AHBERRR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) != 0
};
AHBERRR { bits }
}
#[doc = "Bit 3 - STALL response received interrupt mask"]
#[inline]
pub fn stallm(&self) -> STALLMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) != 0
};
STALLMR { bits }
}
#[doc = "Bit 4 - NAK response received interrupt mask"]
#[inline]
pub fn nakm(&self) -> NAKMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NAKMR { bits }
}
#[doc = "Bit 5 - ACK response received/transmitted interrupt mask"]
#[inline]
pub fn ackm(&self) -> ACKMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) != 0
};
ACKMR { bits }
}
#[doc = "Bit 6 - response received interrupt mask"]
#[inline]
pub fn nyet(&self) -> NYETR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NYETR { bits }
}
#[doc = "Bit 7 - Transaction error mask"]
#[inline]
pub fn txerrm(&self) -> TXERRMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) != 0
};
TXERRMR { bits }
}
#[doc = "Bit 8 - Babble error mask"]
#[inline]
pub fn bberrm(&self) -> BBERRMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BBERRMR { bits }
}
#[doc = "Bit 9 - Frame overrun mask"]
#[inline]
pub fn frmorm(&self) -> FRMORMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
FRMORMR { bits }
}
#[doc = "Bit 10 - Data toggle error mask"]
#[inline]
pub fn dterrm(&self) -> DTERRMR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
DTERRMR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Transfer completed mask"]
#[inline]
pub fn xfrcm(&mut self) -> _XFRCMW {
_XFRCMW { w: self }
}
#[doc = "Bit 1 - Channel halted mask"]
#[inline]
pub fn chhm(&mut self) -> _CHHMW {
_CHHMW { w: self }
}
#[doc = "Bit 2 - AHB error"]
#[inline]
pub fn ahberr(&mut self) -> _AHBERRW {
_AHBERRW { w: self }
}
#[doc = "Bit 3 - STALL response received interrupt mask"]
#[inline]
pub fn stallm(&mut self) -> _STALLMW {
_STALLMW { w: self }
}
#[doc = "Bit 4 - NAK response received interrupt mask"]
#[inline]
pub fn nakm(&mut self) -> _NAKMW {
_NAKMW { w: self }
}
#[doc = "Bit 5 - ACK response received/transmitted interrupt mask"]
#[inline]
pub fn ackm(&mut self) -> _ACKMW {
_ACKMW { w: self }
}
#[doc = "Bit 6 - response received interrupt mask"]
#[inline]
pub fn nyet(&mut self) -> _NYETW {
_NYETW { w: self }
}
#[doc = "Bit 7 - Transaction error mask"]
#[inline]
pub fn txerrm(&mut self) -> _TXERRMW {
_TXERRMW { w: self }
}
#[doc = "Bit 8 - Babble error mask"]
#[inline]
pub fn bberrm(&mut self) -> _BBERRMW {
_BBERRMW { w: self }
}
#[doc = "Bit 9 - Frame overrun mask"]
#[inline]
pub fn frmorm(&mut self) -> _FRMORMW {
_FRMORMW { w: self }
}
#[doc = "Bit 10 - Data toggle error mask"]
#[inline]
pub fn dterrm(&mut self) -> _DTERRMW {
_DTERRMW { w: self }
}
}
| 25.320728 | 71 | 0.495381 |
8f7a384e93f68b726e89162d957ef3afba375574
| 243,461 |
#![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> azure_core::error::Result<azure_core::Response> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn attestations(&self) -> attestations::Client {
attestations::Client(self.clone())
}
pub fn policy_events(&self) -> policy_events::Client {
policy_events::Client(self.clone())
}
pub fn policy_metadata(&self) -> policy_metadata::Client {
policy_metadata::Client(self.clone())
}
pub fn policy_restrictions(&self) -> policy_restrictions::Client {
policy_restrictions::Client(self.clone())
}
pub fn policy_states(&self) -> policy_states::Client {
policy_states::Client(self.clone())
}
pub fn policy_tracked_resources(&self) -> policy_tracked_resources::Client {
policy_tracked_resources::Client(self.clone())
}
pub fn remediations(&self) -> remediations::Client {
remediations::Client(self.clone())
}
}
#[non_exhaustive]
#[derive(Debug, thiserror :: Error)]
#[allow(non_camel_case_types)]
pub enum Error {
#[error(transparent)]
PolicyTrackedResources_ListQueryResultsForManagementGroup(
#[from] policy_tracked_resources::list_query_results_for_management_group::Error,
),
#[error(transparent)]
PolicyTrackedResources_ListQueryResultsForSubscription(#[from] policy_tracked_resources::list_query_results_for_subscription::Error),
#[error(transparent)]
PolicyTrackedResources_ListQueryResultsForResourceGroup(#[from] policy_tracked_resources::list_query_results_for_resource_group::Error),
#[error(transparent)]
PolicyTrackedResources_ListQueryResultsForResource(#[from] policy_tracked_resources::list_query_results_for_resource::Error),
#[error(transparent)]
Remediations_ListDeploymentsAtManagementGroup(#[from] remediations::list_deployments_at_management_group::Error),
#[error(transparent)]
Remediations_CancelAtManagementGroup(#[from] remediations::cancel_at_management_group::Error),
#[error(transparent)]
Remediations_ListForManagementGroup(#[from] remediations::list_for_management_group::Error),
#[error(transparent)]
Remediations_GetAtManagementGroup(#[from] remediations::get_at_management_group::Error),
#[error(transparent)]
Remediations_CreateOrUpdateAtManagementGroup(#[from] remediations::create_or_update_at_management_group::Error),
#[error(transparent)]
Remediations_DeleteAtManagementGroup(#[from] remediations::delete_at_management_group::Error),
#[error(transparent)]
Remediations_ListDeploymentsAtSubscription(#[from] remediations::list_deployments_at_subscription::Error),
#[error(transparent)]
Remediations_CancelAtSubscription(#[from] remediations::cancel_at_subscription::Error),
#[error(transparent)]
Remediations_ListForSubscription(#[from] remediations::list_for_subscription::Error),
#[error(transparent)]
Remediations_GetAtSubscription(#[from] remediations::get_at_subscription::Error),
#[error(transparent)]
Remediations_CreateOrUpdateAtSubscription(#[from] remediations::create_or_update_at_subscription::Error),
#[error(transparent)]
Remediations_DeleteAtSubscription(#[from] remediations::delete_at_subscription::Error),
#[error(transparent)]
Remediations_ListDeploymentsAtResourceGroup(#[from] remediations::list_deployments_at_resource_group::Error),
#[error(transparent)]
Remediations_CancelAtResourceGroup(#[from] remediations::cancel_at_resource_group::Error),
#[error(transparent)]
Remediations_ListForResourceGroup(#[from] remediations::list_for_resource_group::Error),
#[error(transparent)]
Remediations_GetAtResourceGroup(#[from] remediations::get_at_resource_group::Error),
#[error(transparent)]
Remediations_CreateOrUpdateAtResourceGroup(#[from] remediations::create_or_update_at_resource_group::Error),
#[error(transparent)]
Remediations_DeleteAtResourceGroup(#[from] remediations::delete_at_resource_group::Error),
#[error(transparent)]
Remediations_ListDeploymentsAtResource(#[from] remediations::list_deployments_at_resource::Error),
#[error(transparent)]
Remediations_CancelAtResource(#[from] remediations::cancel_at_resource::Error),
#[error(transparent)]
Remediations_ListForResource(#[from] remediations::list_for_resource::Error),
#[error(transparent)]
Remediations_GetAtResource(#[from] remediations::get_at_resource::Error),
#[error(transparent)]
Remediations_CreateOrUpdateAtResource(#[from] remediations::create_or_update_at_resource::Error),
#[error(transparent)]
Remediations_DeleteAtResource(#[from] remediations::delete_at_resource::Error),
#[error(transparent)]
PolicyEvents_NextLink(#[from] policy_events::next_link::Error),
#[error(transparent)]
PolicyStates_NextLink(#[from] policy_states::next_link::Error),
#[error(transparent)]
PolicyMetadata_GetResource(#[from] policy_metadata::get_resource::Error),
#[error(transparent)]
PolicyMetadata_List(#[from] policy_metadata::list::Error),
#[error(transparent)]
PolicyRestrictions_CheckAtSubscriptionScope(#[from] policy_restrictions::check_at_subscription_scope::Error),
#[error(transparent)]
PolicyRestrictions_CheckAtResourceGroupScope(#[from] policy_restrictions::check_at_resource_group_scope::Error),
#[error(transparent)]
PolicyRestrictions_CheckAtManagementGroupScope(#[from] policy_restrictions::check_at_management_group_scope::Error),
#[error(transparent)]
Attestations_ListForSubscription(#[from] attestations::list_for_subscription::Error),
#[error(transparent)]
Attestations_GetAtSubscription(#[from] attestations::get_at_subscription::Error),
#[error(transparent)]
Attestations_CreateOrUpdateAtSubscription(#[from] attestations::create_or_update_at_subscription::Error),
#[error(transparent)]
Attestations_DeleteAtSubscription(#[from] attestations::delete_at_subscription::Error),
#[error(transparent)]
Attestations_ListForResourceGroup(#[from] attestations::list_for_resource_group::Error),
#[error(transparent)]
Attestations_GetAtResourceGroup(#[from] attestations::get_at_resource_group::Error),
#[error(transparent)]
Attestations_CreateOrUpdateAtResourceGroup(#[from] attestations::create_or_update_at_resource_group::Error),
#[error(transparent)]
Attestations_DeleteAtResourceGroup(#[from] attestations::delete_at_resource_group::Error),
#[error(transparent)]
Attestations_ListForResource(#[from] attestations::list_for_resource::Error),
#[error(transparent)]
Attestations_GetAtResource(#[from] attestations::get_at_resource::Error),
#[error(transparent)]
Attestations_CreateOrUpdateAtResource(#[from] attestations::create_or_update_at_resource::Error),
#[error(transparent)]
Attestations_DeleteAtResource(#[from] attestations::delete_at_resource::Error),
}
pub mod policy_tracked_resources {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_query_results_for_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_name: impl Into<String>,
policy_tracked_resources_resource: impl Into<String>,
) -> list_query_results_for_management_group::Builder {
list_query_results_for_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_name: management_group_name.into(),
policy_tracked_resources_resource: policy_tracked_resources_resource.into(),
top: None,
filter: None,
}
}
pub fn list_query_results_for_subscription(
&self,
policy_tracked_resources_resource: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_query_results_for_subscription::Builder {
list_query_results_for_subscription::Builder {
client: self.0.clone(),
policy_tracked_resources_resource: policy_tracked_resources_resource.into(),
subscription_id: subscription_id.into(),
top: None,
filter: None,
}
}
pub fn list_query_results_for_resource_group(
&self,
resource_group_name: impl Into<String>,
policy_tracked_resources_resource: impl Into<String>,
subscription_id: impl Into<String>,
) -> list_query_results_for_resource_group::Builder {
list_query_results_for_resource_group::Builder {
client: self.0.clone(),
resource_group_name: resource_group_name.into(),
policy_tracked_resources_resource: policy_tracked_resources_resource.into(),
subscription_id: subscription_id.into(),
top: None,
filter: None,
}
}
pub fn list_query_results_for_resource(
&self,
resource_id: impl Into<String>,
policy_tracked_resources_resource: impl Into<String>,
) -> list_query_results_for_resource::Builder {
list_query_results_for_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
policy_tracked_resources_resource: policy_tracked_resources_resource.into(),
top: None,
filter: None,
}
}
}
pub mod list_query_results_for_management_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::QueryFailure,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_name: String,
pub(crate) policy_tracked_resources_resource: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyTrackedResourcesQueryResults, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/policyTrackedResources/{}/queryResults",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_name,
&self.policy_tracked_resources_resource
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2018-07-01-preview");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyTrackedResourcesQueryResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryFailure =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_query_results_for_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::QueryFailure,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) policy_tracked_resources_resource: String,
pub(crate) subscription_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyTrackedResourcesQueryResults, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/policyTrackedResources/{}/queryResults",
self.client.endpoint(),
&self.subscription_id,
&self.policy_tracked_resources_resource
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2018-07-01-preview");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyTrackedResourcesQueryResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryFailure =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_query_results_for_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::QueryFailure,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_group_name: String,
pub(crate) policy_tracked_resources_resource: String,
pub(crate) subscription_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyTrackedResourcesQueryResults, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/policyTrackedResources/{}/queryResults",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.policy_tracked_resources_resource
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2018-07-01-preview");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyTrackedResourcesQueryResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryFailure =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_query_results_for_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::QueryFailure,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) policy_tracked_resources_resource: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyTrackedResourcesQueryResults, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/policyTrackedResources/{}/queryResults",
self.client.endpoint(),
&self.resource_id,
&self.policy_tracked_resources_resource
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2018-07-01-preview");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyTrackedResourcesQueryResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryFailure =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod remediations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_deployments_at_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> list_deployments_at_management_group::Builder {
list_deployments_at_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
remediation_name: remediation_name.into(),
top: None,
}
}
pub fn cancel_at_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> cancel_at_management_group::Builder {
cancel_at_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_for_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
) -> list_for_management_group::Builder {
list_for_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
top: None,
filter: None,
}
}
pub fn get_at_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> get_at_management_group::Builder {
get_at_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn create_or_update_at_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
remediation_name: impl Into<String>,
parameters: impl Into<models::Remediation>,
) -> create_or_update_at_management_group::Builder {
create_or_update_at_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
remediation_name: remediation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_management_group(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> delete_at_management_group::Builder {
delete_at_management_group::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_deployments_at_subscription(
&self,
subscription_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> list_deployments_at_subscription::Builder {
list_deployments_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
remediation_name: remediation_name.into(),
top: None,
}
}
pub fn cancel_at_subscription(
&self,
subscription_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> cancel_at_subscription::Builder {
cancel_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_for_subscription(&self, subscription_id: impl Into<String>) -> list_for_subscription::Builder {
list_for_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
top: None,
filter: None,
}
}
pub fn get_at_subscription(
&self,
subscription_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> get_at_subscription::Builder {
get_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn create_or_update_at_subscription(
&self,
subscription_id: impl Into<String>,
remediation_name: impl Into<String>,
parameters: impl Into<models::Remediation>,
) -> create_or_update_at_subscription::Builder {
create_or_update_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
remediation_name: remediation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_subscription(
&self,
subscription_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> delete_at_subscription::Builder {
delete_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_deployments_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
remediation_name: impl Into<String>,
) -> list_deployments_at_resource_group::Builder {
list_deployments_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
remediation_name: remediation_name.into(),
top: None,
}
}
pub fn cancel_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
remediation_name: impl Into<String>,
) -> cancel_at_resource_group::Builder {
cancel_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_for_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_for_resource_group::Builder {
list_for_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
top: None,
filter: None,
}
}
pub fn get_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
remediation_name: impl Into<String>,
) -> get_at_resource_group::Builder {
get_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
remediation_name: remediation_name.into(),
}
}
pub fn create_or_update_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
remediation_name: impl Into<String>,
parameters: impl Into<models::Remediation>,
) -> create_or_update_at_resource_group::Builder {
create_or_update_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
remediation_name: remediation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
remediation_name: impl Into<String>,
) -> delete_at_resource_group::Builder {
delete_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_deployments_at_resource(
&self,
resource_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> list_deployments_at_resource::Builder {
list_deployments_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
remediation_name: remediation_name.into(),
top: None,
}
}
pub fn cancel_at_resource(
&self,
resource_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> cancel_at_resource::Builder {
cancel_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn list_for_resource(&self, resource_id: impl Into<String>) -> list_for_resource::Builder {
list_for_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
top: None,
filter: None,
}
}
pub fn get_at_resource(&self, resource_id: impl Into<String>, remediation_name: impl Into<String>) -> get_at_resource::Builder {
get_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
remediation_name: remediation_name.into(),
}
}
pub fn create_or_update_at_resource(
&self,
resource_id: impl Into<String>,
remediation_name: impl Into<String>,
parameters: impl Into<models::Remediation>,
) -> create_or_update_at_resource::Builder {
create_or_update_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
remediation_name: remediation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_resource(
&self,
resource_id: impl Into<String>,
remediation_name: impl Into<String>,
) -> delete_at_resource::Builder {
delete_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
remediation_name: remediation_name.into(),
}
}
}
pub mod list_deployments_at_management_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) remediation_name: String,
pub(crate) top: Option<i32>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationDeploymentsListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}/listDeployments",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationDeploymentsListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod cancel_at_management_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}/cancel",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_for_management_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/remediations",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_management_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_management_group {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
Created201(models::Remediation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) remediation_name: String,
pub(crate) parameters: models::Remediation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_management_group {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_deployments_at_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) remediation_name: String,
pub(crate) top: Option<i32>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationDeploymentsListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/remediations/{}/listDeployments",
self.client.endpoint(),
&self.subscription_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationDeploymentsListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod cancel_at_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/remediations/{}/cancel",
self.client.endpoint(),
&self.subscription_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_for_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/remediations",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_subscription {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
Created201(models::Remediation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) remediation_name: String,
pub(crate) parameters: models::Remediation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_subscription {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_deployments_at_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) remediation_name: String,
pub(crate) top: Option<i32>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationDeploymentsListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}/listDeployments",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationDeploymentsListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod cancel_at_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}/cancel",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_for_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/remediations",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_resource_group {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
Created201(models::Remediation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) remediation_name: String,
pub(crate) parameters: models::Remediation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_resource_group {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_deployments_at_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) remediation_name: String,
pub(crate) top: Option<i32>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn into_future(
self,
) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationDeploymentsListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/remediations/{}/listDeployments",
self.client.endpoint(),
&self.resource_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationDeploymentsListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod cancel_at_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/remediations/{}/cancel",
self.client.endpoint(),
&self.resource_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_for_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::RemediationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/remediations",
self.client.endpoint(),
&self.resource_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::RemediationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Remediation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.resource_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_resource {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
Created201(models::Remediation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) remediation_name: String,
pub(crate) parameters: models::Remediation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.resource_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_resource {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Remediation),
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) remediation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/remediations/{}",
self.client.endpoint(),
&self.resource_id,
&self.remediation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Remediation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod policy_events {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn next_link(&self, next_link: impl Into<String>) -> next_link::Builder {
next_link::Builder {
client: self.0.clone(),
next_link: next_link.into(),
skiptoken: None,
}
}
}
pub mod next_link {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::QueryFailure,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) next_link: String,
pub(crate) skiptoken: Option<String>,
}
impl Builder {
pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self {
self.skiptoken = Some(skiptoken.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyEventsQueryResults, Error>> {
Box::pin(async move {
let url_str = &format!("{}{}?Next paging op for policy events", self.client.endpoint(), &self.next_link);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-10-01");
if let Some(skiptoken) = &self.skiptoken {
url.query_pairs_mut().append_pair("$skiptoken", skiptoken);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyEventsQueryResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryFailure =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod policy_states {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn next_link(&self, next_link: impl Into<String>) -> next_link::Builder {
next_link::Builder {
client: self.0.clone(),
next_link: next_link.into(),
skiptoken: None,
}
}
}
pub mod next_link {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::QueryFailure,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) next_link: String,
pub(crate) skiptoken: Option<String>,
}
impl Builder {
pub fn skiptoken(mut self, skiptoken: impl Into<String>) -> Self {
self.skiptoken = Some(skiptoken.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyStatesQueryResults, Error>> {
Box::pin(async move {
let url_str = &format!("{}{}?Next paging op for policy states", self.client.endpoint(), &self.next_link);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-10-01");
if let Some(skiptoken) = &self.skiptoken {
url.query_pairs_mut().append_pair("$skiptoken", skiptoken);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.header(http::header::CONTENT_LENGTH, 0);
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyStatesQueryResults =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::QueryFailure =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod policy_metadata {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn get_resource(&self, resource_name: impl Into<String>) -> get_resource::Builder {
get_resource::Builder {
client: self.0.clone(),
resource_name: resource_name.into(),
}
}
pub fn list(&self) -> list::Builder {
list::Builder {
client: self.0.clone(),
top: None,
}
}
}
pub mod get_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyMetadata, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/Microsoft.PolicyInsights/policyMetadata/{}",
self.client.endpoint(),
&self.resource_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-10-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyMetadata =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) top: Option<i32>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::PolicyMetadataCollection, Error>> {
Box::pin(async move {
let url_str = &format!("{}/providers/Microsoft.PolicyInsights/policyMetadata", self.client.endpoint(),);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2019-10-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::PolicyMetadataCollection =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod policy_restrictions {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn check_at_subscription_scope(
&self,
subscription_id: impl Into<String>,
parameters: impl Into<models::CheckRestrictionsRequest>,
) -> check_at_subscription_scope::Builder {
check_at_subscription_scope::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
parameters: parameters.into(),
}
}
pub fn check_at_resource_group_scope(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
parameters: impl Into<models::CheckRestrictionsRequest>,
) -> check_at_resource_group_scope::Builder {
check_at_resource_group_scope::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
parameters: parameters.into(),
}
}
pub fn check_at_management_group_scope(
&self,
management_groups_namespace: impl Into<String>,
management_group_id: impl Into<String>,
parameters: impl Into<models::CheckManagementGroupRestrictionsRequest>,
) -> check_at_management_group_scope::Builder {
check_at_management_group_scope::Builder {
client: self.0.clone(),
management_groups_namespace: management_groups_namespace.into(),
management_group_id: management_group_id.into(),
parameters: parameters.into(),
}
}
}
pub mod check_at_subscription_scope {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) parameters: models::CheckRestrictionsRequest,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckRestrictionsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/checkPolicyRestrictions",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2022-03-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CheckRestrictionsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod check_at_resource_group_scope {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) parameters: models::CheckRestrictionsRequest,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckRestrictionsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/checkPolicyRestrictions",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2022-03-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CheckRestrictionsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod check_at_management_group_scope {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) management_groups_namespace: String,
pub(crate) management_group_id: String,
pub(crate) parameters: models::CheckManagementGroupRestrictionsRequest,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::CheckRestrictionsResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/providers/{}/managementGroups/{}/providers/Microsoft.PolicyInsights/checkPolicyRestrictions",
self.client.endpoint(),
&self.management_groups_namespace,
&self.management_group_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::POST);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2022-03-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::CheckRestrictionsResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
pub mod attestations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list_for_subscription(&self, subscription_id: impl Into<String>) -> list_for_subscription::Builder {
list_for_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
top: None,
filter: None,
}
}
pub fn get_at_subscription(
&self,
subscription_id: impl Into<String>,
attestation_name: impl Into<String>,
) -> get_at_subscription::Builder {
get_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
attestation_name: attestation_name.into(),
}
}
pub fn create_or_update_at_subscription(
&self,
subscription_id: impl Into<String>,
attestation_name: impl Into<String>,
parameters: impl Into<models::Attestation>,
) -> create_or_update_at_subscription::Builder {
create_or_update_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
attestation_name: attestation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_subscription(
&self,
subscription_id: impl Into<String>,
attestation_name: impl Into<String>,
) -> delete_at_subscription::Builder {
delete_at_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
attestation_name: attestation_name.into(),
}
}
pub fn list_for_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_for_resource_group::Builder {
list_for_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
top: None,
filter: None,
}
}
pub fn get_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
attestation_name: impl Into<String>,
) -> get_at_resource_group::Builder {
get_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
attestation_name: attestation_name.into(),
}
}
pub fn create_or_update_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
attestation_name: impl Into<String>,
parameters: impl Into<models::Attestation>,
) -> create_or_update_at_resource_group::Builder {
create_or_update_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
attestation_name: attestation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
attestation_name: impl Into<String>,
) -> delete_at_resource_group::Builder {
delete_at_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
attestation_name: attestation_name.into(),
}
}
pub fn list_for_resource(&self, resource_id: impl Into<String>) -> list_for_resource::Builder {
list_for_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
top: None,
filter: None,
}
}
pub fn get_at_resource(&self, resource_id: impl Into<String>, attestation_name: impl Into<String>) -> get_at_resource::Builder {
get_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
attestation_name: attestation_name.into(),
}
}
pub fn create_or_update_at_resource(
&self,
resource_id: impl Into<String>,
attestation_name: impl Into<String>,
parameters: impl Into<models::Attestation>,
) -> create_or_update_at_resource::Builder {
create_or_update_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
attestation_name: attestation_name.into(),
parameters: parameters.into(),
}
}
pub fn delete_at_resource(
&self,
resource_id: impl Into<String>,
attestation_name: impl Into<String>,
) -> delete_at_resource::Builder {
delete_at_resource::Builder {
client: self.0.clone(),
resource_id: resource_id.into(),
attestation_name: attestation_name.into(),
}
}
}
pub mod list_for_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AttestationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/attestations",
self.client.endpoint(),
&self.subscription_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AttestationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_subscription {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) attestation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Attestation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_subscription {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Attestation),
Created201(models::Attestation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) attestation_name: String,
pub(crate) parameters: models::Attestation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_subscription {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) attestation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_for_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AttestationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/attestations",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AttestationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_resource_group {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) attestation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Attestation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_resource_group {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Attestation),
Created201(models::Attestation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) attestation_name: String,
pub(crate) parameters: models::Attestation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_resource_group {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) attestation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/subscriptions/{}/resourceGroups/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.subscription_id,
&self.resource_group_name,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod list_for_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) top: Option<i32>,
pub(crate) filter: Option<String>,
}
impl Builder {
pub fn top(mut self, top: i32) -> Self {
self.top = Some(top);
self
}
pub fn filter(mut self, filter: impl Into<String>) -> Self {
self.filter = Some(filter.into());
self
}
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::AttestationListResult, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/attestations",
self.client.endpoint(),
&self.resource_id
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
if let Some(top) = &self.top {
url.query_pairs_mut().append_pair("$top", &top.to_string());
}
if let Some(filter) = &self.filter {
url.query_pairs_mut().append_pair("$filter", filter);
}
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::AttestationListResult =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod get_at_resource {
use super::models;
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) attestation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<models::Attestation, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.resource_id,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(rsp_value)
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod create_or_update_at_resource {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200(models::Attestation),
Created201(models::Attestation),
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) attestation_name: String,
pub(crate) parameters: models::Attestation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.resource_id,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&self.parameters).map_err(Error::Serialize)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Ok200(rsp_value))
}
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::Attestation =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Ok(Response::Created201(rsp_value))
}
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
pub mod delete_at_resource {
use super::models;
#[derive(Debug)]
pub enum Response {
Ok200,
NoContent204,
}
#[derive(Debug, thiserror :: Error)]
pub enum Error {
#[error("HTTP status code {}", status_code)]
DefaultResponse {
status_code: http::StatusCode,
value: models::ErrorResponse,
},
#[error("Failed to parse request URL")]
ParseUrl(#[source] url::ParseError),
#[error("Failed to build request")]
BuildRequest(#[source] http::Error),
#[error("Failed to serialize request body")]
Serialize(#[source] serde_json::Error),
#[error("Failed to get access token")]
GetToken(#[source] azure_core::Error),
#[error("Failed to execute request")]
SendRequest(#[source] azure_core::error::Error),
#[error("Failed to get response bytes")]
ResponseBytes(#[source] azure_core::error::Error),
#[error("Failed to deserialize response, body: {1:?}")]
Deserialize(#[source] serde_json::Error, bytes::Bytes),
}
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) resource_id: String,
pub(crate) attestation_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, std::result::Result<Response, Error>> {
Box::pin(async move {
let url_str = &format!(
"{}/{}/providers/Microsoft.PolicyInsights/attestations/{}",
self.client.endpoint(),
&self.resource_id,
&self.attestation_name
);
let mut url = url::Url::parse(url_str).map_err(Error::ParseUrl)?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = self.client.token_credential();
let token_response = credential
.get_token(&self.client.scopes().join(" "))
.await
.map_err(Error::GetToken)?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2021-01-01");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder.body(req_body).map_err(Error::BuildRequest)?;
let rsp = self.client.send(req).await.map_err(Error::SendRequest)?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(Response::Ok200),
http::StatusCode::NO_CONTENT => Ok(Response::NoContent204),
status_code => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await.map_err(Error::ResponseBytes)?;
let rsp_value: models::ErrorResponse =
serde_json::from_slice(&rsp_body).map_err(|source| Error::Deserialize(source, rsp_body.clone()))?;
Err(Error::DefaultResponse {
status_code,
value: rsp_value,
})
}
}
})
}
}
}
}
| 51.833298 | 140 | 0.519775 |
56390e32857cfad1e4d9654e2cbce06c69f52169
| 6,918 |
//! Contains functions related to printing information about system running Youki
use std::{collections::HashSet, fs, path::Path};
use anyhow::Result;
use clap::Clap;
use procfs::{CpuInfo, Meminfo};
use cgroups::{self, common::CgroupSetup, v2::controller_type::ControllerType};
/// Show information about the system
#[derive(Clap, Debug)]
pub struct Info {}
impl Info {
pub fn exec(&self) -> Result<()> {
print_youki();
print_kernel();
print_os();
print_hardware();
print_cgroups();
print_namespaces();
Ok(())
}
}
/// print Version of Youki
pub fn print_youki() {
println!("{:<18}{}", "Version", env!("CARGO_PKG_VERSION"));
}
/// Print Kernel Release, Version and Architecture
pub fn print_kernel() {
let uname = nix::sys::utsname::uname();
println!("{:<18}{}", "Kernel-Release", uname.release());
println!("{:<18}{}", "Kernel-Version", uname.version());
println!("{:<18}{}", "Architecture", uname.machine());
}
/// Prints OS Distribution information
// see https://www.freedesktop.org/software/systemd/man/os-release.html
pub fn print_os() {
if let Some(os) = try_read_os_from("/etc/os-release") {
println!("{:<18}{}", "Operating System", os);
} else if let Some(os) = try_read_os_from("/usr/lib/os-release") {
println!("{:<18}{}", "Operating System", os);
}
}
/// Helper function to read the OS Distribution info
fn try_read_os_from<P: AsRef<Path>>(path: P) -> Option<String> {
let os_release = path.as_ref();
if !os_release.exists() {
return None;
}
if let Ok(release_content) = fs::read_to_string(path) {
let pretty = find_parameter(&release_content, "PRETTY_NAME");
if let Some(pretty) = pretty {
return Some(pretty.trim_matches('"').to_owned());
}
let name = find_parameter(&release_content, "NAME");
let version = find_parameter(&release_content, "VERSION");
if let Some((name, version)) = name.zip(version) {
return Some(format!(
"{} {}",
name.trim_matches('"'),
version.trim_matches('"')
));
}
}
None
}
/// Helper function to find keyword values in OS info string
fn find_parameter<'a>(content: &'a str, param_name: &str) -> Option<&'a str> {
content
.lines()
.find(|l| l.starts_with(param_name))
.and_then(|l| l.split_terminator('=').last())
}
/// Print Hardware information of system
pub fn print_hardware() {
if let Ok(cpu_info) = CpuInfo::new() {
println!("{:<18}{}", "Cores", cpu_info.num_cores());
}
if let Ok(mem_info) = Meminfo::new() {
println!(
"{:<18}{}",
"Total Memory",
mem_info.mem_total / u64::pow(1024, 2)
);
}
}
/// Print cgroups info of system
pub fn print_cgroups() {
let cgroup_setup = cgroups::common::get_cgroup_setup();
if let Ok(cgroup_setup) = &cgroup_setup {
println!("{:<18}{}", "Cgroup setup", cgroup_setup);
}
println!("Cgroup mounts");
if let Ok(v1_mounts) = cgroups::v1::util::list_supported_mount_points() {
let mut v1_mounts: Vec<String> = v1_mounts
.iter()
.map(|kv| format!(" {:<16}{}", kv.0.to_string(), kv.1.display()))
.collect();
v1_mounts.sort();
for cgroup_mount in v1_mounts {
println!("{}", cgroup_mount);
}
}
let unified = cgroups::v2::util::get_unified_mount_point();
if let Ok(mount_point) = &unified {
println!(" {:<16}{}", "unified", mount_point.display());
}
if let Ok(cgroup_setup) = cgroup_setup {
if let Ok(unified) = &unified {
if matches!(cgroup_setup, CgroupSetup::Hybrid | CgroupSetup::Unified) {
if let Ok(controllers) = cgroups::v2::util::get_available_controllers(unified) {
println!("CGroup v2 controllers");
let active_controllers: HashSet<ControllerType> =
controllers.into_iter().collect();
for controller in cgroups::v2::controller_type::CONTROLLER_TYPES {
let status = if active_controllers.contains(controller) {
"attached"
} else {
"detached"
};
println!(" {:<16}{}", controller.to_string(), status);
}
}
if let Some(config) = read_kernel_config() {
let display = FeatureDisplay::with_status("device", "attached", "detached");
print_feature_status(&config, "CONFIG_CGROUP_BPF", display);
}
}
}
}
}
fn read_kernel_config() -> Option<String> {
let uname = nix::sys::utsname::uname();
let kernel_config = Path::new("/boot").join(format!("config-{}", uname.release()));
if !kernel_config.exists() {
return None;
}
fs::read_to_string(kernel_config).ok()
}
pub fn print_namespaces() {
if let Some(content) = read_kernel_config() {
if let Some(ns_enabled) = find_parameter(&content, "CONFIG_NAMESPACES") {
if ns_enabled == "y" {
println!("{:<18}enabled", "Namespaces");
} else {
println!("{:<18}disabled", "Namespaces");
return;
}
}
// mount namespace is always enabled if namespaces are enabled
println!(" {:<16}enabled", "mount");
print_feature_status(&content, "CONFIG_UTS_NS", FeatureDisplay::new("uts"));
print_feature_status(&content, "CONFIG_IPC_NS", FeatureDisplay::new("ipc"));
print_feature_status(&content, "CONFIG_USER_NS", FeatureDisplay::new("user"));
print_feature_status(&content, "CONFIG_PID_NS", FeatureDisplay::new("pid"));
print_feature_status(&content, "CONFIG_NET_NS", FeatureDisplay::new("network"));
}
}
fn print_feature_status(config: &str, feature: &str, display: FeatureDisplay) {
if let Some(status_flag) = find_parameter(config, feature) {
let status = if status_flag == "y" {
display.enabled
} else {
display.disabled
};
println!(" {:<16}{}", display.name, status);
} else {
println!(" {:<16}{}", display.name, display.disabled);
}
}
struct FeatureDisplay<'a> {
name: &'a str,
enabled: &'a str,
disabled: &'a str,
}
impl<'a> FeatureDisplay<'a> {
fn new(name: &'a str) -> Self {
Self {
name,
enabled: "enabled",
disabled: "disabled",
}
}
fn with_status(name: &'a str, enabled: &'a str, disabled: &'a str) -> Self {
Self {
name,
enabled,
disabled,
}
}
}
| 31.162162 | 96 | 0.559266 |
0819f08f525ee22037c23b5aa3a11ecf99ed7546
| 2,094 |
use futures_io::{self as io, AsyncBufRead, AsyncRead};
use pin_utils::{unsafe_pinned, unsafe_unpinned};
use std::{
marker::Unpin,
pin::Pin,
task::{Context, Poll},
};
/// Reader for the [`interleave_pending`](super::AsyncReadTestExt::interleave_pending) method.
#[derive(Debug)]
pub struct InterleavePending<R: AsyncRead> {
reader: R,
pended: bool,
}
impl<R: AsyncRead + Unpin> Unpin for InterleavePending<R> {}
impl<R: AsyncRead> InterleavePending<R> {
unsafe_pinned!(reader: R);
unsafe_unpinned!(pended: bool);
pub(crate) fn new(reader: R) -> InterleavePending<R> {
InterleavePending {
reader,
pended: false,
}
}
fn project<'a>(self: Pin<&'a mut Self>) -> (Pin<&'a mut R>, &'a mut bool) {
unsafe {
let this = self.get_unchecked_mut();
(Pin::new_unchecked(&mut this.reader), &mut this.pended)
}
}
}
impl<R: AsyncRead> AsyncRead for InterleavePending<R> {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut [u8],
) -> Poll<io::Result<usize>> {
let (reader, pended) = self.project();
if *pended {
let next = reader.poll_read(cx, buf);
if next.is_ready() {
*pended = false;
}
next
} else {
cx.waker().wake_by_ref();
*pended = true;
Poll::Pending
}
}
}
impl<R: AsyncBufRead> AsyncBufRead for InterleavePending<R> {
fn poll_fill_buf<'a>(
self: Pin<&'a mut Self>,
cx: &mut Context<'_>,
) -> Poll<io::Result<&'a [u8]>> {
let (reader, pended) = self.project();
if *pended {
let next = reader.poll_fill_buf(cx);
if next.is_ready() {
*pended = false;
}
next
} else {
cx.waker().wake_by_ref();
*pended = true;
Poll::Pending
}
}
fn consume(self: Pin<&mut Self>, amount: usize) {
self.reader().consume(amount)
}
}
| 25.851852 | 94 | 0.530086 |
0a060d007c7fd12e3cc1ae5f0a60a76a3c6be732
| 10,179 |
//! Generate SyntaxKind definitions as well as typed AST definitions for nodes and tokens.
//! This is derived from rust-analyzer/xtask/codegen
use std::vec;
use super::{
kinds_src::{AstSrc, Field},
to_lower_snake_case, Mode,
};
use crate::generate_syntax_factory::generate_syntax_factory;
use crate::kinds_src::{AstListSeparatorConfiguration, AstListSrc, TokenKind};
use crate::{
generate_macros::generate_macros,
generate_nodes::generate_nodes,
generate_syntax_kinds::generate_syntax_kinds,
kinds_src::{AstEnumSrc, AstNodeSrc, KINDS_SRC},
update,
};
use ungrammar::{Grammar, Rule, Token};
use xtask::{project_root, Result};
// these node won't generate any code
pub const SYNTAX_ELEMENT_TYPE: &str = "SyntaxElement";
pub fn generate_ast(mode: Mode) -> Result<()> {
let mut ast = load_ast();
ast.sort();
let ast_nodes_file = project_root().join(crate::AST_NODES);
let contents = generate_nodes(&ast)?;
update(ast_nodes_file.as_path(), &contents, mode)?;
let syntax_kinds_file = project_root().join(crate::SYNTAX_KINDS);
let contents = generate_syntax_kinds(KINDS_SRC)?;
update(syntax_kinds_file.as_path(), &contents, mode)?;
let syntax_factory_file = project_root().join(crate::SYNTAX_FACTORY);
let contents = generate_syntax_factory(&ast)?;
update(syntax_factory_file.as_path(), &contents, mode)?;
let ast_macros_file = project_root().join(crate::AST_MACROS);
let contents = generate_macros(&ast)?;
update(ast_macros_file.as_path(), &contents, mode)?;
Ok(())
}
pub(crate) fn load_ast() -> AstSrc {
let grammar_src = include_str!("../js.ungram");
let grammar: Grammar = grammar_src.parse().unwrap();
make_ast(&grammar)
}
fn make_ast(grammar: &Grammar) -> AstSrc {
let mut ast = AstSrc::default();
for node in grammar.iter() {
let name = grammar[node].name.clone();
if name == SYNTAX_ELEMENT_TYPE {
continue;
}
let rule = &grammar[node].rule;
match classify_node_rule(grammar, rule) {
NodeRuleClassification::Union(variants) => ast.unions.push(AstEnumSrc {
documentation: vec![],
name,
variants,
}),
NodeRuleClassification::Node => {
let mut fields = vec![];
handle_rule(&mut fields, grammar, rule, None, false);
ast.nodes.push(AstNodeSrc {
documentation: vec![],
name,
fields,
})
}
NodeRuleClassification::Unknown => ast.unknowns.push(name),
NodeRuleClassification::List {
separator,
element_name,
} => {
ast.push_list(
name.as_str(),
AstListSrc {
element_name,
separator,
},
);
}
}
}
ast
}
/// Classification of a node rule.
/// Determined by matching the top level production of any node.
enum NodeRuleClassification {
/// Union of the form `A = B | C`
Union(Vec<String>),
/// Regular node containing tokens or sub nodes of the form `A = B 'c'
Node,
/// An Unknown node of the form `A = SyntaxElement*`
Unknown,
/// A list node of the form `A = B*` or `A = (B (',' B)*)` or `A = (B (',' B)* ','?)`
List {
/// Name of the nodes stored in this list (`B` in the example above)
element_name: String,
/// [None] if this is a node list or [Some] if this is a separated list
separator: Option<AstListSeparatorConfiguration>,
},
}
fn classify_node_rule(grammar: &Grammar, rule: &Rule) -> NodeRuleClassification {
match rule {
// this is for enums
Rule::Alt(alternatives) => {
let mut all_alternatives = vec![];
for alternative in alternatives {
match alternative {
Rule::Node(it) => all_alternatives.push(grammar[*it].name.clone()),
Rule::Token(it) if grammar[*it].name == ";" => (),
_ => return NodeRuleClassification::Node,
}
}
NodeRuleClassification::Union(all_alternatives)
}
// A*
Rule::Rep(rule) => {
let element_type = match rule.as_ref() {
Rule::Node(node) => &grammar[*node].name,
_ => {
panic!("Lists should only be over node types");
}
};
if element_type == SYNTAX_ELEMENT_TYPE {
NodeRuleClassification::Unknown
} else {
NodeRuleClassification::List {
separator: None,
element_name: element_type.to_string(),
}
}
}
Rule::Seq(rules) => {
// (T (',' T)* ','?)
// (T (',' T)*)
if let Some(comma_list) = handle_comma_list(grammar, rules.as_slice()) {
NodeRuleClassification::List {
separator: Some(AstListSeparatorConfiguration {
allow_trailing: comma_list.trailing_separator,
separator_token: comma_list.separator_name.to_string(),
}),
element_name: comma_list.node_name.to_string(),
}
} else {
NodeRuleClassification::Node
}
}
_ => NodeRuleClassification::Node,
}
}
fn clean_token_name(grammar: &Grammar, token: &Token) -> String {
let mut name = grammar[*token].name.clone();
// These tokens, when parsed to proc_macro2::TokenStream, generates a stream of bytes
// that can't be recognized by [quote].
// Hence, they need to be decorated with single quotes.
if "[]{}()`".contains(&name) {
name = format!("'{}'", name);
}
name
}
fn handle_rule(
fields: &mut Vec<Field>,
grammar: &Grammar,
rule: &Rule,
label: Option<&str>,
optional: bool,
) {
match rule {
Rule::Labeled { label, rule } => {
// Some methods need to be manually implemented because they need some custom logic;
// we use the prefix "manual__" to exclude labelled nodes.
if handle_tokens_in_unions(fields, grammar, rule, label, optional) {
return;
}
handle_rule(fields, grammar, rule, Some(label), optional)
}
Rule::Node(node) => {
let ty = grammar[*node].name.clone();
let name = label
.map(String::from)
.unwrap_or_else(|| to_lower_snake_case(&ty));
let field = Field::Node { name, ty, optional };
fields.push(field);
}
Rule::Token(token) => {
let name = clean_token_name(grammar, token);
if name == "''" {
// array hole
return;
}
let field = Field::Token {
name: label.map(String::from).unwrap_or_else(|| name.clone()),
kind: TokenKind::Single(name),
optional,
};
fields.push(field);
}
Rule::Rep(_) => {
panic!("Create a list node for *many* children {:?}", label);
}
Rule::Opt(rule) => {
handle_rule(fields, grammar, rule, label, true);
}
Rule::Alt(rules) => {
for rule in rules {
handle_rule(fields, grammar, rule, label, false);
}
}
Rule::Seq(rules) => {
for rule in rules {
handle_rule(fields, grammar, rule, label, false);
}
}
};
}
#[derive(Debug)]
struct CommaList<'a> {
node_name: &'a str,
separator_name: &'a str,
trailing_separator: bool,
}
// (T (',' T)* ','?)
// (T (',' T)*)
fn handle_comma_list<'a>(grammar: &'a Grammar, rules: &[Rule]) -> Option<CommaList<'a>> {
// Does it match (T * ',')?
let (node, repeat, trailing_separator) = match rules {
[Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_separator)] => {
(node, repeat, Some(trailing_separator))
}
[Rule::Node(node), Rule::Rep(repeat)] => (node, repeat, None),
_ => return None,
};
// Is the repeat a ()*?
let repeat = match &**repeat {
Rule::Seq(it) => it,
_ => return None,
};
// Does the repeat match (token node))
let comma = match repeat.as_slice() {
[comma, Rule::Node(n)] => {
let separator_matches_trailing = if let Some(trailing) = trailing_separator {
&**trailing == comma
} else {
true
};
if n != node || !separator_matches_trailing {
return None;
}
comma
}
_ => return None,
};
let separator_name = match comma {
Rule::Token(token) => &grammar[*token].name,
_ => panic!("The separator in rule {:?} must be a token", rules),
};
Some(CommaList {
node_name: &grammar[*node].name,
trailing_separator: trailing_separator.is_some(),
separator_name,
})
}
// handle cases like: `op: ('-' | '+' | '*')`
fn handle_tokens_in_unions(
fields: &mut Vec<Field>,
grammar: &Grammar,
rule: &Rule,
label: &str,
optional: bool,
) -> bool {
let (rule, optional) = match rule {
Rule::Opt(rule) => (&**rule, true),
_ => (rule, optional),
};
let rule = match rule {
Rule::Alt(rule) => rule,
_ => return false,
};
let mut token_kinds = vec![];
for rule in rule.iter() {
match rule {
Rule::Token(token) => token_kinds.push(clean_token_name(grammar, token)),
_ => return false,
}
}
let field = Field::Token {
name: label.to_string(),
kind: TokenKind::Many(token_kinds),
optional,
};
fields.push(field);
true
}
| 30.567568 | 96 | 0.533451 |
79f852d8a95e739e0ed1c10a4c11ff4c5b2cf4f0
| 2,765 |
use crate::infer::lexical_region_resolve::RegionResolutionError;
use crate::infer::lexical_region_resolve::RegionResolutionError::*;
use crate::infer::InferCtxt;
use rustc_errors::{DiagnosticBuilder, ErrorGuaranteed};
use rustc_middle::ty::{self, TyCtxt};
use rustc_span::source_map::Span;
mod different_lifetimes;
pub mod find_anon_type;
mod mismatched_static_lifetime;
mod named_anon_conflict;
mod placeholder_error;
mod static_impl_trait;
mod trait_impl_difference;
mod util;
pub use static_impl_trait::suggest_new_region_bound;
pub use util::find_param_with_region;
impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
pub fn try_report_nice_region_error(&self, error: &RegionResolutionError<'tcx>) -> bool {
NiceRegionError::new(self, error.clone()).try_report().is_some()
}
}
pub struct NiceRegionError<'cx, 'tcx> {
infcx: &'cx InferCtxt<'cx, 'tcx>,
error: Option<RegionResolutionError<'tcx>>,
regions: Option<(Span, ty::Region<'tcx>, ty::Region<'tcx>)>,
}
impl<'cx, 'tcx> NiceRegionError<'cx, 'tcx> {
pub fn new(infcx: &'cx InferCtxt<'cx, 'tcx>, error: RegionResolutionError<'tcx>) -> Self {
Self { infcx, error: Some(error), regions: None }
}
pub fn new_from_span(
infcx: &'cx InferCtxt<'cx, 'tcx>,
span: Span,
sub: ty::Region<'tcx>,
sup: ty::Region<'tcx>,
) -> Self {
Self { infcx, error: None, regions: Some((span, sub, sup)) }
}
fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx
}
pub fn try_report_from_nll(&self) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
// Due to the improved diagnostics returned by the MIR borrow checker, only a subset of
// the nice region errors are required when running under the MIR borrow checker.
self.try_report_named_anon_conflict().or_else(|| self.try_report_placeholder_conflict())
}
pub fn try_report(&self) -> Option<ErrorGuaranteed> {
self.try_report_from_nll()
.map(|mut diag| diag.emit())
.or_else(|| self.try_report_impl_not_conforming_to_trait())
.or_else(|| self.try_report_anon_anon_conflict())
.or_else(|| self.try_report_static_impl_trait())
.or_else(|| self.try_report_mismatched_static_lifetime())
}
pub(super) fn regions(&self) -> Option<(Span, ty::Region<'tcx>, ty::Region<'tcx>)> {
match (&self.error, self.regions) {
(Some(ConcreteFailure(origin, sub, sup)), None) => Some((origin.span(), *sub, *sup)),
(Some(SubSupConflict(_, _, origin, sub, _, sup, _)), None) => {
Some((origin.span(), *sub, *sup))
}
(None, Some((span, sub, sup))) => Some((span, sub, sup)),
_ => None,
}
}
}
| 36.381579 | 97 | 0.644485 |
e4f1ef7424e03eb5a4be0f877eff59799a46aa38
| 2,788 |
use crate::cli_output::utils::print_warning;
use anyhow::{Context, Result};
use git2::{Commit, IndexAddOption, ObjectType, Repository};
use std::path::Path;
pub struct Git {
repo: Repository,
}
impl Git {
pub fn new(csv: &str) -> Option<Self> {
let mut dir = Path::new(csv);
// Traverse the directory tree looking for the git repo
let repo = loop {
match dir.parent() {
None => {
print_warning("It appears the CSV file is not in a git repo. Use --no-commit to suppress this message");
return None;
}
Some(new_dir) => {
dir = new_dir;
let repo_result = Repository::open(dir);
if let Ok(repo) = repo_result {
break repo;
}
}
}
};
Some(Self { repo })
}
pub fn is_clean(&self) -> Result<bool> {
let statuses = self
.repo
.statuses(None)
.context("Could not get git status")?;
// https://github.com/rust-lang/git2-rs/blob/master/examples/status.rs#L174
let is_dirty = statuses.iter().any(|e| e.status() != git2::Status::CURRENT);
Ok(!is_dirty)
}
pub fn add_and_commit_bookmark(&self, url: &str, description: &str) -> Result<()> {
self.add_and_commit(format!("Add bookmark for {url} - {description}").as_str())?;
Ok(())
}
// https://zsiciarz.github.io/24daysofrust/book/vol2/day16.html
// https://github.com/rust-lang/git2-rs/blob/master/examples/add.rs#L71
pub fn add_and_commit(&self, msg: &str) -> Result<()> {
// add
let oid = {
let mut index = self.repo.index()?;
// Since we check to make sure that there are no previous uncommitted changes, it is safe to add all
index.add_all(["*"].iter(), IndexAddOption::DEFAULT, None)?;
// I don't know why we need to double write, but it is necessary in order for the commit to go through
index.write()?;
index.write_tree()?
};
// commit
let tree = self.repo.find_tree(oid)?;
let parent_commit = self.find_last_commit()?;
let signature = self.repo.signature()?;
self.repo.commit(
Some("HEAD"),
&signature,
&signature,
msg,
&tree,
&[&parent_commit],
)?;
Ok(())
}
fn find_last_commit(&self) -> Result<Commit, git2::Error> {
let obj = self.repo.head()?.resolve()?.peel(ObjectType::Commit)?;
obj.into_commit()
.map_err(|_| git2::Error::from_str("Couldn't find last commit"))
}
}
| 31.681818 | 124 | 0.529053 |
cc89639db723e4dbfddd440014fdbd47cd3460a3
| 4,643 |
use std::fmt;
use std::io;
use bytes::Bytes;
use futures::{future, stream, Async, Future, Poll, Stream};
use tokio::io::AsyncRead;
/// Stream of bytes.
pub struct ByteStream {
size_hint: Option<usize>,
inner: Box<dyn Stream<Item = Bytes, Error = io::Error> + Send>,
}
impl ByteStream {
/// Create a new `ByteStream` by wrapping a `futures` stream.
pub fn new<S>(stream: S) -> ByteStream
where
S: Stream<Item = Bytes, Error = io::Error> + Send + 'static,
{
ByteStream {
size_hint: None,
inner: Box::new(stream),
}
}
pub(crate) fn size_hint(&self) -> Option<usize> {
self.size_hint
}
/// Return an implementation of `AsyncRead` that uses async i/o to consume the stream.
pub fn into_async_read(self) -> impl AsyncRead + Send {
ImplAsyncRead::new(self.inner)
}
/// Return an implementation of `Read` that uses blocking i/o to consume the stream.
pub fn into_blocking_read(self) -> impl io::Read + Send {
ImplBlockingRead::new(self.inner)
}
}
impl From<Vec<u8>> for ByteStream {
fn from(buf: Vec<u8>) -> ByteStream {
ByteStream {
size_hint: Some(buf.len()),
inner: Box::new(stream::once(Ok(Bytes::from(buf)))),
}
}
}
impl fmt::Debug for ByteStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "<ByteStream size_hint={:?}>", self.size_hint)
}
}
impl Stream for ByteStream {
type Item = Bytes;
type Error = io::Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
self.inner.poll()
}
}
struct ImplAsyncRead {
buffer: io::Cursor<Bytes>,
stream: stream::Fuse<Box<dyn Stream<Item = Bytes, Error = io::Error> + Send>>,
}
impl ImplAsyncRead {
fn new(stream: Box<dyn Stream<Item = Bytes, Error = io::Error> + Send>) -> Self {
ImplAsyncRead {
buffer: io::Cursor::new(Bytes::new()),
stream: stream.fuse(),
}
}
}
impl io::Read for ImplAsyncRead {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if buf.is_empty() {
return Ok(0);
}
loop {
let n = self.buffer.read(buf)?;
if n > 0 {
return Ok(n);
}
match self.stream.poll()? {
Async::NotReady => {
return Err(io::ErrorKind::WouldBlock.into());
}
Async::Ready(Some(buffer)) => {
self.buffer = io::Cursor::new(buffer);
continue;
}
Async::Ready(None) => {
return Ok(0);
}
}
}
}
}
impl AsyncRead for ImplAsyncRead {}
struct ImplBlockingRead {
inner: ImplAsyncRead,
}
impl ImplBlockingRead {
fn new(stream: Box<dyn Stream<Item = Bytes, Error = io::Error> + Send>) -> Self {
ImplBlockingRead {
inner: ImplAsyncRead::new(stream),
}
}
}
impl io::Read for ImplBlockingRead {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
future::poll_fn(|| self.inner.poll_read(buf)).wait()
}
}
#[test]
fn test_async_read() {
use bytes::Bytes;
use std::io::Read;
let chunks = vec![Bytes::from_static(b"1234"), Bytes::from_static(b"5678")];
let stream = ByteStream::new(stream::iter_ok(chunks));
let mut async_read = stream.into_async_read();
let mut buf = [0u8; 3];
assert_eq!(async_read.read(&mut buf).unwrap(), 3);
assert_eq!(&buf[..3], b"123");
assert_eq!(async_read.read(&mut buf).unwrap(), 1);
assert_eq!(&buf[..1], b"4");
assert_eq!(async_read.read(&mut buf).unwrap(), 3);
assert_eq!(&buf[..3], b"567");
assert_eq!(async_read.read(&mut buf).unwrap(), 1);
assert_eq!(&buf[..1], b"8");
assert_eq!(async_read.read(&mut buf).unwrap(), 0);
}
#[test]
fn test_blocking_read() {
use bytes::Bytes;
use std::io::Read;
let chunks = vec![Bytes::from_static(b"1234"), Bytes::from_static(b"5678")];
let stream = ByteStream::new(stream::iter_ok(chunks));
let mut async_read = stream.into_blocking_read();
let mut buf = [0u8; 3];
assert_eq!(async_read.read(&mut buf).unwrap(), 3);
assert_eq!(&buf[..3], b"123");
assert_eq!(async_read.read(&mut buf).unwrap(), 1);
assert_eq!(&buf[..1], b"4");
assert_eq!(async_read.read(&mut buf).unwrap(), 3);
assert_eq!(&buf[..3], b"567");
assert_eq!(async_read.read(&mut buf).unwrap(), 1);
assert_eq!(&buf[..1], b"8");
assert_eq!(async_read.read(&mut buf).unwrap(), 0);
}
| 27.96988 | 90 | 0.56429 |
26a47a10dd5488be981270c7111b36a598b1c52e
| 25,354 |
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
mod error;
mod genesis;
mod layout;
mod secure_backend;
mod validator_config;
use crate::{error::Error, layout::SetLayout, secure_backend::SecureBackend};
use libra_crypto::ed25519::Ed25519PublicKey;
use libra_secure_storage::{Storage, Value};
use libra_types::{transaction::Transaction, waypoint::Waypoint};
use std::{convert::TryInto, fmt::Write, str::FromStr};
use structopt::StructOpt;
pub mod constants {
pub const ASSOCIATION_KEY: &str = "association";
pub const COMMON_NS: &str = "common";
pub const CONSENSUS_KEY: &str = "consensus";
pub const EPOCH: &str = "epoch";
pub const FULLNODE_NETWORK_KEY: &str = "fullnode_network";
pub const LAST_VOTED_ROUND: &str = "last_voted_round";
pub const LAYOUT: &str = "layout";
pub const OWNER_KEY: &str = "owner";
pub const OPERATOR_KEY: &str = "operator";
pub const PREFERRED_ROUND: &str = "preferred_round";
pub const VALIDATOR_CONFIG: &str = "validator_config";
pub const VALIDATOR_NETWORK_KEY: &str = "validator_network";
pub const WAYPOINT: &str = "waypoint";
pub const GAS_UNIT_PRICE: u64 = 0;
pub const MAX_GAS_AMOUNT: u64 = 1_000_000;
pub const TXN_EXPIRATION_SECS: u64 = 3600;
}
#[derive(Debug, StructOpt)]
#[structopt(about = "Tool used to manage Libra Validators")]
pub enum Command {
#[structopt(about = "Submits an Ed25519PublicKey for the association")]
AssociationKey(SecureBackends),
#[structopt(about = "Retrieves data from a store to produce genesis")]
Genesis(crate::genesis::Genesis),
#[structopt(about = "Submits an Ed25519PublicKey for the operator")]
OperatorKey(SecureBackends),
#[structopt(about = "Submits an Ed25519PublicKey for the owner")]
OwnerKey(SecureBackends),
#[structopt(about = "Submits a Layout doc to a shared storage")]
SetLayout(SetLayout),
#[structopt(about = "Constructs and signs a ValidatorConfig")]
ValidatorConfig(crate::validator_config::ValidatorConfig),
#[structopt(about = "Verifies and prints the current configuration state")]
Verify(SingleBackend),
}
#[derive(Debug, PartialEq)]
pub enum CommandName {
AssociationKey,
Genesis,
OperatorKey,
OwnerKey,
SetLayout,
ValidatorConfig,
Verify,
}
impl From<&Command> for CommandName {
fn from(command: &Command) -> Self {
match command {
Command::AssociationKey(_) => CommandName::AssociationKey,
Command::Genesis(_) => CommandName::Genesis,
Command::OperatorKey(_) => CommandName::OperatorKey,
Command::OwnerKey(_) => CommandName::OwnerKey,
Command::SetLayout(_) => CommandName::SetLayout,
Command::ValidatorConfig(_) => CommandName::ValidatorConfig,
Command::Verify(_) => CommandName::Verify,
}
}
}
impl std::fmt::Display for CommandName {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let name = match self {
CommandName::AssociationKey => "association-key",
CommandName::Genesis => "genesis",
CommandName::OperatorKey => "operator-key",
CommandName::OwnerKey => "owner-key",
CommandName::SetLayout => "set-layout",
CommandName::ValidatorConfig => "validator-config",
CommandName::Verify => "verify",
};
write!(f, "{}", name)
}
}
impl Command {
pub fn execute(self) -> String {
match &self {
Command::AssociationKey(_) => self.association_key().unwrap().to_string(),
Command::Genesis(_) => format!("{:?}", self.genesis().unwrap()),
Command::OperatorKey(_) => self.operator_key().unwrap().to_string(),
Command::OwnerKey(_) => self.owner_key().unwrap().to_string(),
Command::SetLayout(_) => self.set_layout().unwrap().to_string(),
Command::ValidatorConfig(_) => format!("{:?}", self.validator_config().unwrap()),
Command::Verify(_) => self.verify().unwrap(),
}
}
pub fn association_key(self) -> Result<Ed25519PublicKey, Error> {
if let Command::AssociationKey(secure_backends) = self {
Self::submit_key(constants::ASSOCIATION_KEY, secure_backends)
} else {
Err(Error::UnexpectedCommand(
CommandName::AssociationKey,
CommandName::from(&self),
))
}
}
pub fn genesis(self) -> Result<Transaction, Error> {
if let Command::Genesis(genesis) = self {
genesis.execute()
} else {
Err(Error::UnexpectedCommand(
CommandName::Genesis,
CommandName::from(&self),
))
}
}
pub fn operator_key(self) -> Result<Ed25519PublicKey, Error> {
if let Command::OperatorKey(secure_backends) = self {
Self::submit_key(constants::OPERATOR_KEY, secure_backends)
} else {
Err(Error::UnexpectedCommand(
CommandName::OperatorKey,
CommandName::from(&self),
))
}
}
pub fn owner_key(self) -> Result<Ed25519PublicKey, Error> {
if let Command::OwnerKey(secure_backends) = self {
Self::submit_key(constants::OWNER_KEY, secure_backends)
} else {
Err(Error::UnexpectedCommand(
CommandName::OwnerKey,
CommandName::from(&self),
))
}
}
pub fn set_layout(self) -> Result<crate::layout::Layout, Error> {
if let Command::SetLayout(set_layout) = self {
set_layout.execute()
} else {
Err(Error::UnexpectedCommand(
CommandName::SetLayout,
CommandName::from(&self),
))
}
}
pub fn validator_config(self) -> Result<Transaction, Error> {
if let Command::ValidatorConfig(config) = self {
config.execute()
} else {
Err(Error::UnexpectedCommand(
CommandName::ValidatorConfig,
CommandName::from(&self),
))
}
}
pub fn verify(self) -> Result<String, Error> {
if let Command::Verify(backend) = self {
let storage: Box<dyn Storage> = backend.backend.try_into()?;
if !storage.available() {
return Err(Error::LocalStorageUnavailable);
}
let mut buffer = String::new();
writeln!(buffer, "Data stored in SecureStorage:").unwrap();
writeln!(buffer, "=================================================").unwrap();
writeln!(buffer, "Keys").unwrap();
writeln!(buffer, "=================================================").unwrap();
Self::write_key(storage.as_ref(), &mut buffer, constants::CONSENSUS_KEY);
Self::write_key(
storage.as_ref(),
&mut buffer,
constants::FULLNODE_NETWORK_KEY,
);
Self::write_key(storage.as_ref(), &mut buffer, constants::OWNER_KEY);
Self::write_key(storage.as_ref(), &mut buffer, constants::OPERATOR_KEY);
Self::write_key(
storage.as_ref(),
&mut buffer,
constants::VALIDATOR_NETWORK_KEY,
);
writeln!(buffer, "=================================================").unwrap();
writeln!(buffer, "Data").unwrap();
writeln!(buffer, "=================================================").unwrap();
Self::write_u64(storage.as_ref(), &mut buffer, constants::EPOCH);
Self::write_u64(storage.as_ref(), &mut buffer, constants::LAST_VOTED_ROUND);
Self::write_u64(storage.as_ref(), &mut buffer, constants::PREFERRED_ROUND);
Self::write_waypoint(storage.as_ref(), &mut buffer, constants::WAYPOINT);
writeln!(buffer, "=================================================").unwrap();
Ok(buffer)
} else {
panic!("Expected Command::Verify");
}
}
fn write_key(storage: &dyn Storage, buffer: &mut String, key: &str) {
let value = storage
.get_public_key(key)
.map(|c| c.public_key.to_string())
.unwrap_or_else(|e| format!("{:?}", e));
writeln!(buffer, "{} - {}", key, value).unwrap();
}
fn write_u64(storage: &dyn Storage, buffer: &mut String, key: &str) {
let value = storage
.get(key)
.and_then(|c| c.value.u64())
.map(|c| c.to_string())
.unwrap_or_else(|e| format!("{:?}", e));
writeln!(buffer, "{} - {}", key, value).unwrap();
}
fn write_waypoint(storage: &dyn Storage, buffer: &mut String, key: &str) {
let value = storage
.get(key)
.and_then(|c| c.value.string())
.map(|value| {
if value.is_empty() {
"empty".into()
} else {
Waypoint::from_str(&value)
.map(|c| c.to_string())
.unwrap_or_else(|_| "Invalid waypoint".into())
}
})
.unwrap_or_else(|e| format!("{:?}", e));
writeln!(buffer, "{} - {}", key, value).unwrap();
}
fn submit_key(
key_name: &str,
secure_backends: SecureBackends,
) -> Result<Ed25519PublicKey, Error> {
let local: Box<dyn Storage> = secure_backends.local.try_into()?;
if !local.available() {
return Err(Error::LocalStorageUnavailable);
}
let key = local
.get_public_key(key_name)
.map_err(|e| Error::LocalStorageReadError(e.to_string()))?
.public_key;
if let Some(remote) = secure_backends.remote {
let key = Value::Ed25519PublicKey(key.clone());
let mut remote: Box<dyn Storage> = remote.try_into()?;
if !remote.available() {
return Err(Error::RemoteStorageUnavailable);
}
remote
.create_with_default_policy(key_name, key)
.map_err(|e| Error::RemoteStorageWriteError(e.to_string()))?;
}
Ok(key)
}
}
#[derive(Debug, StructOpt)]
pub struct SecureBackends {
/// The local secure backend, this is the source of data. Secure
/// backends are represented as a semi-colon deliminted key value
/// pair: "k0=v0;k1=v1;...". The current supported formats are:
/// Vault: "backend=vault;server=URL;token=TOKEN"
/// vault has an optional namespace: "namespace=NAMESPACE"
/// InMemory: "backend=memory"
/// OnDisk: "backend=disk;path=LOCAL_PATH"
#[structopt(long, verbatim_doc_comment)]
local: SecureBackend,
/// The remote secure backend, this is where data is stored. See
/// the comments for the local backend for usage.
#[structopt(long)]
remote: Option<SecureBackend>,
}
#[derive(Debug, StructOpt)]
pub struct SingleBackend {
/// The secure backend. Secure backends are represented as a semi-colon
/// deliminted key value pair: "k0=v0;k1=v1;...".
/// The current supported formats are:
/// Vault: "backend=vault;server=URL;token=TOKEN"
/// vault has an optional namespace: "namespace=NAMESPACE"
/// InMemory: "backend=memory"
/// OnDisk: "backend=disk;path=LOCAL_PATH"
#[structopt(long, verbatim_doc_comment)]
backend: SecureBackend,
}
/// These tests depends on running Vault, which can be done by using the provided docker run script
/// in `docker/vault/run.sh`.
/// Note: Some of these tests may fail if you run them too quickly one after another due to data
/// sychronization issues within Vault. It would seem the only way to fix it would be to restart
/// the Vault service between runs.
#[cfg(test)]
pub mod tests {
use super::*;
use libra_network_address::NetworkAddress;
use libra_secure_storage::{Policy, Value, VaultStorage};
use libra_types::account_address::AccountAddress;
use std::{fs::File, io::Write};
const VAULT_HOST: &str = "http://localhost:8200";
const VAULT_ROOT_TOKEN: &str = "root_token";
#[test]
#[ignore]
fn test_end_to_end() {
// Each identity works in their own namespace
// Alice, Bob, and Carol are operators, implicitly mapped 1:1 with owners.
// Dave is the association.
// Each user will upload their contents to *_ns + "shared"
// Common is used by the technical staff for coordination.
let alice_ns = "alice";
let bob_ns = "bob";
let carol_ns = "carol";
let dave_ns = "dave";
let shared = "_shared";
// Step 1) Define and upload the layout specifying which identities have which roles. This
// is uplaoded to the common namespace.
let mut common = default_storage(constants::COMMON_NS.into());
common.reset_and_clear().unwrap();
// Note: owners are irrelevant currently
let layout_text = "\
operators = [\"alice_shared\", \"bob_shared\", \"carol_shared\"]\n\
owners = []\n\
association = [\"dave_shared\"]\n\
";
let temppath = libra_temppath::TempPath::new();
temppath.create_as_file().unwrap();
let mut file = File::create(temppath.path()).unwrap();
file.write_all(&layout_text.to_string().into_bytes())
.unwrap();
file.sync_all().unwrap();
set_layout(temppath.path().to_str().unwrap(), constants::COMMON_NS).unwrap();
// Step 2) Upload the association key:
let mut association = default_storage(dave_ns.into());
initialize_storage(association.as_mut());
let mut association_shared = default_storage(dave_ns.to_string() + shared);
association_shared.reset_and_clear().unwrap();
association_key(dave_ns, &(dave_ns.to_string() + shared)).unwrap();
// Step 3) Upload each operators key and then a signed transaction:
for ns in [alice_ns, bob_ns, carol_ns].iter() {
let mut local = default_storage((*ns).to_string());
initialize_storage(local.as_mut());
let mut remote = default_storage((*ns).to_string() + shared);
remote.reset_and_clear().unwrap();
operator_key(ns, &((*ns).to_string() + shared)).unwrap();
validator_config(
AccountAddress::random(),
"/ip4/0.0.0.0/tcp/6180".parse().unwrap(),
"/ip4/0.0.0.0/tcp/6180".parse().unwrap(),
ns,
&((*ns).to_string() + shared),
)
.unwrap();
}
// Step 4) Produce genesis
genesis().unwrap();
}
#[test]
#[ignore]
fn test_set_layout() {
let namespace = "set_layout";
let mut storage = default_storage(namespace.into());
storage.reset_and_clear().unwrap();
let temppath = libra_temppath::TempPath::new();
set_layout(temppath.path().to_str().unwrap(), namespace).unwrap_err();
temppath.create_as_file().unwrap();
let mut file = File::create(temppath.path()).unwrap();
let layout_text = "\
operators = [\"alice\", \"bob\"]\n\
owners = [\"carol\"]\n\
association = [\"dave\"]\n\
";
file.write_all(&layout_text.to_string().into_bytes())
.unwrap();
file.sync_all().unwrap();
set_layout(temppath.path().to_str().unwrap(), namespace).unwrap();
let stored_layout = storage
.get(constants::LAYOUT)
.unwrap()
.value
.string()
.unwrap();
assert_eq!(layout_text, stored_layout);
}
#[test]
#[ignore]
fn test_validator_config() {
let local_ns = "local_validator_config";
let remote_ns = "remote_validator_config";
let mut local = default_storage(local_ns.into());
initialize_storage(local.as_mut());
let mut remote = default_storage(remote_ns.into());
remote.reset_and_clear().unwrap();
let local_txn = validator_config(
AccountAddress::random(),
"/ip4/0.0.0.0/tcp/6180".parse().unwrap(),
"/ip4/0.0.0.0/tcp/6180".parse().unwrap(),
local_ns,
remote_ns,
)
.unwrap();
let remote_txn = remote.get(constants::VALIDATOR_CONFIG).unwrap().value;
let remote_txn = remote_txn.transaction().unwrap();
assert_eq!(local_txn, remote_txn);
}
#[test]
#[ignore]
fn test_verify() {
let namespace = "verify";
let mut storage = default_storage(namespace.into());
storage.reset_and_clear().unwrap();
let output = verify(namespace).unwrap().split("KeyNotSet").count();
assert_eq!(output, 10); // 9 KeyNotSet results in 9 splits
initialize_storage(storage.as_mut());
let output = verify(namespace).unwrap().split("KeyNotSet").count();
assert_eq!(output, 1); // 0 KeyNotSet results in 1 split
}
#[test]
#[ignore]
fn test_owner_key() {
test_key(constants::OWNER_KEY, owner_key);
}
#[test]
#[ignore]
fn test_operator_key() {
test_key(constants::OPERATOR_KEY, operator_key);
}
fn test_key(key_name: &str, op: fn(&str, &str) -> Result<Ed25519PublicKey, Error>) {
let local_ns = format!("local_{}_key", key_name);
let remote_ns = format!("remote_{}_key", key_name);
let mut local = default_storage(local_ns.clone());
local.reset_and_clear().unwrap();
op(&local_ns, &remote_ns).unwrap_err();
initialize_storage(local.as_mut());
let local_key = local.get_public_key(key_name).unwrap().public_key;
let mut remote = default_storage(remote_ns.clone());
remote.reset_and_clear().unwrap();
let output_key = op(&local_ns, &remote_ns).unwrap();
let remote_key = remote
.get(key_name)
.unwrap()
.value
.ed25519_public_key()
.unwrap();
assert_eq!(local_key, output_key);
assert_eq!(local_key, remote_key);
}
fn default_storage(namespace: String) -> Box<dyn Storage> {
Box::new(VaultStorage::new(
VAULT_HOST.into(),
VAULT_ROOT_TOKEN.into(),
Some(namespace),
))
}
fn initialize_storage(storage: &mut dyn Storage) {
let policy = Policy::public();
storage.reset_and_clear().unwrap();
storage
.create_key(constants::ASSOCIATION_KEY, &policy)
.unwrap();
storage
.create_key(constants::CONSENSUS_KEY, &policy)
.unwrap();
storage
.create_key(constants::FULLNODE_NETWORK_KEY, &policy)
.unwrap();
storage.create_key(constants::OWNER_KEY, &policy).unwrap();
storage
.create_key(constants::OPERATOR_KEY, &policy)
.unwrap();
storage
.create_key(constants::VALIDATOR_NETWORK_KEY, &policy)
.unwrap();
storage
.create(constants::EPOCH, Value::U64(0), &policy)
.unwrap();
storage
.create(constants::LAST_VOTED_ROUND, Value::U64(0), &policy)
.unwrap();
storage
.create(constants::PREFERRED_ROUND, Value::U64(0), &policy)
.unwrap();
storage
.create(constants::WAYPOINT, Value::String("".into()), &policy)
.unwrap();
}
fn association_key(local_ns: &str, remote_ns: &str) -> Result<Ed25519PublicKey, Error> {
let args = format!(
"
management
association-key
--local backend={backend};\
server={server};\
token={token};\
namespace={local_ns}
--remote backend={backend};\
server={server};\
token={token};\
namespace={remote_ns}\
",
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
local_ns = local_ns,
remote_ns = remote_ns,
);
let command = Command::from_iter(args.split_whitespace());
command.association_key()
}
fn genesis() -> Result<Transaction, Error> {
let args = format!(
"
management
genesis
--backend backend={backend};\
server={server};\
token={token}
",
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
);
let command = Command::from_iter(args.split_whitespace());
command.genesis()
}
fn operator_key(local_ns: &str, remote_ns: &str) -> Result<Ed25519PublicKey, Error> {
let args = format!(
"
management
operator-key
--local backend={backend};\
server={server};\
token={token};\
namespace={local_ns}
--remote backend={backend};\
server={server};\
token={token};\
namespace={remote_ns}\
",
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
local_ns = local_ns,
remote_ns = remote_ns,
);
let command = Command::from_iter(args.split_whitespace());
command.operator_key()
}
fn owner_key(local_ns: &str, remote_ns: &str) -> Result<Ed25519PublicKey, Error> {
let args = format!(
"
management
owner-key
--local backend={backend};\
server={server};\
token={token};\
namespace={local_ns}
--remote backend={backend};\
server={server};\
token={token};\
namespace={remote_ns}\
",
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
local_ns = local_ns,
remote_ns = remote_ns,
);
let command = Command::from_iter(args.split_whitespace());
command.owner_key()
}
fn set_layout(path: &str, namespace: &str) -> Result<crate::layout::Layout, Error> {
let args = format!(
"
validator_config
set-layout
--path {path}
--backend backend={backend};\
server={server};\
token={token};\
namespace={ns}
",
path = path,
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
ns = namespace,
);
let command = Command::from_iter(args.split_whitespace());
command.set_layout()
}
fn validator_config(
owner_address: AccountAddress,
validator_address: NetworkAddress,
fullnode_address: NetworkAddress,
local_ns: &str,
remote_ns: &str,
) -> Result<Transaction, Error> {
let args = format!(
"
management
validator-config
--owner-address {owner_address}
--validator-address {validator_address}
--fullnode-address {fullnode_address}
--local backend={backend};\
server={server};\
token={token};\
namespace={local_ns}
--remote backend={backend};\
server={server};\
token={token};\
namespace={remote_ns}\
",
owner_address = owner_address,
validator_address = validator_address,
fullnode_address = fullnode_address,
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
local_ns = local_ns,
remote_ns = remote_ns,
);
let command = Command::from_iter(args.split_whitespace());
command.validator_config()
}
fn verify(namespace: &str) -> Result<String, Error> {
let args = format!(
"
validator_config
verify
--backend backend={backend};\
server={server};\
token={token};\
namespace={ns}
",
backend = crate::secure_backend::VAULT,
server = VAULT_HOST,
token = VAULT_ROOT_TOKEN,
ns = namespace,
);
let command = Command::from_iter(args.split_whitespace());
command.verify()
}
}
| 34.636612 | 99 | 0.553009 |
f426bc2dc94628e4d9648688eccc9bbdc1aa3915
| 3,235 |
//! Measure voltages with an ADS1015 analog/digital
//! converter and print them to an SSD1306 OLED display.
//! Further explanations about this device and how this example works at
//! https://blog.eldruin.com/ads1x1x-analog-to-digital-converter-driver-in-rust/
//! An image is [here](https://github.com/eldruin/driver-examples/blob/master/media/ads1015-voltage-divider.jpg).
//!
//! The setup() functions make the application code common. They are in src/i2c_led_delay.rs.
//! The specific function used will depend on the HAL setting (see README.md).
//! See the section of setup() corresponding to the HAL setting for details on pin connections.
//!
//! On "BluePill" (stm32f1xx_hal) using I2C1.
//! ```
//! BP <-> ADS1015 <-> Display
//! GND <-> GND <-> GND
//! +5V <-> +5V <-> +5V
//! PB9 <-> SDA <-> SDA
//! PB8 <-> SCL <-> SCL
//! ```
#![deny(unsafe_code)]
#![no_std]
#![no_main]
use ads1x1x::{channel as AdcChannel, Ads1x1x, FullScaleRange, SlaveAddr};
use cortex_m_rt::entry;
use embedded_hal::adc::OneShot;
use nb::block;
use core::fmt::Write;
use rtt_target::{rprintln, rtt_init_print};
use embedded_graphics::{
mono_font::{ascii::FONT_6X10, MonoTextStyleBuilder},
pixelcolor::BinaryColor,
prelude::*,
text::Text,
};
use ssd1306::{prelude::*, I2CDisplayInterface, Ssd1306};
use rust_integration_testing_of_examples::i2c_led_delay::{setup, LED};
#[entry]
fn main() -> ! {
rtt_init_print!();
rprintln!("ADS1015 example");
let (i2c, mut led, mut delay) = setup();
let manager = shared_bus::BusManager::<cortex_m::interrupt::Mutex<_>, _>::new(i2c);
let interface = I2CDisplayInterface::new(manager.acquire());
let mut display = Ssd1306::new(interface, DisplaySize128x64, DisplayRotation::Rotate0)
.into_buffered_graphics_mode();
display.init().unwrap();
let text_style = MonoTextStyleBuilder::new()
.font(&FONT_6X10)
.text_color(BinaryColor::On)
.build();
let mut adc = Ads1x1x::new_ads1015(manager.acquire(), SlaveAddr::default());
// need to be able to measure [0-5V]
adc.set_full_scale_range(FullScaleRange::Within6_144V)
.unwrap();
let mut lines: [heapless::String<32>; 4] = [
heapless::String::new(),
heapless::String::new(),
heapless::String::new(),
heapless::String::new(),
];
loop {
// Blink LED 0 to check that everything is actually running.
// If the LED 0 is off, something went wrong.
led.blink(50_u16, &mut delay);
// Read voltage in all channels
let values = [
block!(adc.read(&mut AdcChannel::SingleA0)).unwrap_or(8091),
block!(adc.read(&mut AdcChannel::SingleA1)).unwrap_or(8091),
block!(adc.read(&mut AdcChannel::SingleA2)).unwrap_or(8091),
block!(adc.read(&mut AdcChannel::SingleA3)).unwrap_or(8091),
];
display.clear();
for i in 0..values.len() {
write!(lines[i], "Channel {}: {}", i, values[i]).unwrap();
Text::new(&lines[i], Point::new(0, i as i32 * 16), text_style)
.draw(&mut display)
.unwrap();
}
display.flush().unwrap();
}
}
| 33.697917 | 113 | 0.628748 |
7515c0db6eac85ef92a4f3ec662c3ae9f4c29b23
| 2,899 |
#[doc = "Register `PR3INV` reader"]
pub struct R(crate::R<PR3INV_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<PR3INV_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<PR3INV_SPEC>> for R {
fn from(reader: crate::R<PR3INV_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `PR3INV` writer"]
pub struct W(crate::W<PR3INV_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<PR3INV_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<PR3INV_SPEC>> for W {
fn from(writer: crate::W<PR3INV_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `PR3` reader - "]
pub struct PR3_R(crate::FieldReader<u32, u32>);
impl PR3_R {
pub(crate) fn new(bits: u32) -> Self {
PR3_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for PR3_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PR3` writer - "]
pub struct PR3_W<'a> {
w: &'a mut W,
}
impl<'a> PR3_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);
self.w
}
}
impl R {
#[doc = "Bits 0:31"]
#[inline(always)]
pub fn pr3(&self) -> PR3_R {
PR3_R::new((self.bits & 0xffff_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:31"]
#[inline(always)]
pub fn pr3(&mut self) -> PR3_W {
PR3_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "PR3INV register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [pr3inv](index.html) module"]
pub struct PR3INV_SPEC;
impl crate::RegisterSpec for PR3INV_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [pr3inv::R](R) reader structure"]
impl crate::Readable for PR3INV_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [pr3inv::W](W) writer structure"]
impl crate::Writable for PR3INV_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets PR3INV to value 0"]
impl crate::Resettable for PR3INV_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
| 28.99 | 402 | 0.592618 |
72e67e3c4cd5967fe7bd907d273394e776a7b195
| 16,058 |
//! Patterns telling us certain facts about current syntax element, they are used in completion context
use hir::Semantics;
use ide_db::RootDatabase;
use syntax::{
algo::non_trivia_sibling,
ast::{self, ArgListOwner, LoopBodyOwner},
match_ast, AstNode, Direction, SyntaxElement,
SyntaxKind::*,
SyntaxNode, SyntaxToken, TextRange, TextSize, T,
};
#[cfg(test)]
use crate::test_utils::{check_pattern_is_applicable, check_pattern_is_not_applicable};
/// Immediate previous node to what we are completing.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub(crate) enum ImmediatePrevSibling {
IfExpr,
TraitDefName,
ImplDefType,
}
/// Direct parent "thing" of what we are currently completing.
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum ImmediateLocation {
Use,
Impl,
Trait,
RecordField,
RefExpr,
IdentPat,
BlockExpr,
ItemList,
// Fake file ast node
Attribute(ast::Attr),
// Fake file ast node
ModDeclaration(ast::Module),
// Original file ast node
MethodCall {
receiver: Option<ast::Expr>,
has_parens: bool,
},
// Original file ast node
FieldAccess {
receiver: Option<ast::Expr>,
receiver_is_ambiguous_float_literal: bool,
},
// Original file ast node
// Only set from a type arg
GenericArgList(ast::GenericArgList),
// Original file ast node
/// The record expr of the field name we are completing
RecordExpr(ast::RecordExpr),
// Original file ast node
/// The record pat of the field name we are completing
RecordPat(ast::RecordPat),
}
pub(crate) fn determine_prev_sibling(name_like: &ast::NameLike) -> Option<ImmediatePrevSibling> {
let node = match name_like {
ast::NameLike::NameRef(name_ref) => maximize_name_ref(name_ref),
ast::NameLike::Name(n) => n.syntax().clone(),
ast::NameLike::Lifetime(lt) => lt.syntax().clone(),
};
let node = match node.parent().and_then(ast::MacroCall::cast) {
// When a path is being typed after the name of a trait/type of an impl it is being
// parsed as a macro, so when the trait/impl has a block following it an we are between the
// name and block the macro will attach the block to itself so maximizing fails to take
// that into account
// FIXME path expr and statement have a similar problem with attrs
Some(call)
if call.excl_token().is_none()
&& call.token_tree().map_or(false, |t| t.l_curly_token().is_some())
&& call.semicolon_token().is_none() =>
{
call.syntax().clone()
}
_ => node,
};
let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
let res = match_ast! {
match prev_sibling {
ast::ExprStmt(it) => {
let node = it.expr().filter(|_| it.semicolon_token().is_none())?.syntax().clone();
match_ast! {
match node {
ast::IfExpr(_it) => ImmediatePrevSibling::IfExpr,
_ => return None,
}
}
},
ast::Trait(it) => if it.assoc_item_list().is_none() {
ImmediatePrevSibling::TraitDefName
} else {
return None
},
ast::Impl(it) => if it.assoc_item_list().is_none()
&& (it.for_token().is_none() || it.self_ty().is_some()) {
ImmediatePrevSibling::ImplDefType
} else {
return None
},
_ => return None,
}
};
Some(res)
}
pub(crate) fn determine_location(
sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
offset: TextSize,
name_like: &ast::NameLike,
) -> Option<ImmediateLocation> {
let node = match name_like {
ast::NameLike::NameRef(name_ref) => {
if ast::RecordExprField::for_field_name(name_ref).is_some() {
return sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::RecordExpr);
}
if ast::RecordPatField::for_field_name_ref(name_ref).is_some() {
return sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::RecordPat);
}
maximize_name_ref(name_ref)
}
ast::NameLike::Name(name) => {
if ast::RecordPatField::for_field_name(name).is_some() {
return sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::RecordPat);
}
name.syntax().clone()
}
ast::NameLike::Lifetime(lt) => lt.syntax().clone(),
};
let parent = match node.parent() {
Some(parent) => match ast::MacroCall::cast(parent.clone()) {
// When a path is being typed in an (Assoc)ItemList the parser will always emit a macro_call.
// This is usually fine as the node expansion code above already accounts for that with
// the ancestors call, but there is one exception to this which is that when an attribute
// precedes it the code above will not walk the Path to the parent MacroCall as their ranges differ.
// FIXME path expr and statement have a similar problem
Some(call)
if call.excl_token().is_none()
&& call.token_tree().is_none()
&& call.semicolon_token().is_none() =>
{
call.syntax().parent()?
}
_ => parent,
},
// SourceFile
None => {
return match node.kind() {
MACRO_ITEMS | SOURCE_FILE => Some(ImmediateLocation::ItemList),
_ => None,
}
}
};
let res = match_ast! {
match parent {
ast::IdentPat(_it) => ImmediateLocation::IdentPat,
ast::Use(_it) => ImmediateLocation::Use,
ast::BlockExpr(_it) => ImmediateLocation::BlockExpr,
ast::SourceFile(_it) => ImmediateLocation::ItemList,
ast::ItemList(_it) => ImmediateLocation::ItemList,
ast::RefExpr(_it) => ImmediateLocation::RefExpr,
ast::RecordField(_it) => ImmediateLocation::RecordField,
ast::AssocItemList(it) => match it.syntax().parent().map(|it| it.kind()) {
Some(IMPL) => ImmediateLocation::Impl,
Some(TRAIT) => ImmediateLocation::Trait,
_ => return None,
},
ast::GenericArgList(_it) => sema
.find_node_at_offset_with_macros(original_file, offset)
.map(ImmediateLocation::GenericArgList)?,
ast::Module(it) => {
if it.item_list().is_none() {
ImmediateLocation::ModDeclaration(it)
} else {
return None;
}
},
ast::Attr(it) => ImmediateLocation::Attribute(it),
ast::FieldExpr(it) => {
let receiver = it
.expr()
.map(|e| e.syntax().text_range())
.and_then(|r| find_node_with_range(original_file, r));
let receiver_is_ambiguous_float_literal = if let Some(ast::Expr::Literal(l)) = &receiver {
match l.kind() {
ast::LiteralKind::FloatNumber { .. } => l.token().text().ends_with('.'),
_ => false,
}
} else {
false
};
ImmediateLocation::FieldAccess {
receiver,
receiver_is_ambiguous_float_literal,
}
},
ast::MethodCallExpr(it) => ImmediateLocation::MethodCall {
receiver: it
.receiver()
.map(|e| e.syntax().text_range())
.and_then(|r| find_node_with_range(original_file, r)),
has_parens: it.arg_list().map_or(false, |it| it.l_paren_token().is_some())
},
_ => return None,
}
};
Some(res)
}
fn maximize_name_ref(name_ref: &ast::NameRef) -> SyntaxNode {
// Maximize a nameref to its enclosing path if its the last segment of said path
if let Some(segment) = name_ref.syntax().parent().and_then(ast::PathSegment::cast) {
let p = segment.parent_path();
if p.parent_path().is_none() {
if let Some(it) = p
.syntax()
.ancestors()
.take_while(|it| it.text_range() == p.syntax().text_range())
.last()
{
return it;
}
}
}
name_ref.syntax().clone()
}
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<N> {
syntax.covering_element(range).ancestors().find_map(N::cast)
}
pub(crate) fn inside_impl_trait_block(element: SyntaxElement) -> bool {
// Here we search `impl` keyword up through the all ancestors, unlike in `has_impl_parent`,
// where we only check the first parent with different text range.
element
.ancestors()
.find(|it| it.kind() == IMPL)
.map(|it| ast::Impl::cast(it).unwrap())
.map(|it| it.trait_().is_some())
.unwrap_or(false)
}
#[test]
fn test_inside_impl_trait_block() {
check_pattern_is_applicable(r"impl Foo for Bar { f$0 }", inside_impl_trait_block);
check_pattern_is_applicable(r"impl Foo for Bar { fn f$0 }", inside_impl_trait_block);
check_pattern_is_not_applicable(r"impl A { f$0 }", inside_impl_trait_block);
check_pattern_is_not_applicable(r"impl A { fn f$0 }", inside_impl_trait_block);
}
pub(crate) fn previous_token(element: SyntaxElement) -> Option<SyntaxToken> {
element.into_token().and_then(previous_non_trivia_token)
}
/// Check if the token previous to the previous one is `for`.
/// For example, `for _ i$0` => true.
pub(crate) fn for_is_prev2(element: SyntaxElement) -> bool {
element
.into_token()
.and_then(previous_non_trivia_token)
.and_then(previous_non_trivia_token)
.filter(|it| it.kind() == T![for])
.is_some()
}
#[test]
fn test_for_is_prev2() {
check_pattern_is_applicable(r"for i i$0", for_is_prev2);
}
pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
node.ancestors()
.take_while(|it| it.kind() != FN && it.kind() != CLOSURE_EXPR)
.find_map(|it| {
let loop_body = match_ast! {
match it {
ast::ForExpr(it) => it.loop_body(),
ast::WhileExpr(it) => it.loop_body(),
ast::LoopExpr(it) => it.loop_body(),
_ => None,
}
};
loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
})
.is_some()
}
fn previous_non_trivia_token(token: SyntaxToken) -> Option<SyntaxToken> {
let mut token = token.prev_token();
while let Some(inner) = token.clone() {
if !inner.kind().is_trivia() {
return Some(inner);
} else {
token = inner.prev_token();
}
}
None
}
#[cfg(test)]
mod tests {
use syntax::algo::find_node_at_offset;
use crate::test_utils::position;
use super::*;
fn check_location(code: &str, loc: impl Into<Option<ImmediateLocation>>) {
let (db, pos) = position(code);
let sema = Semantics::new(&db);
let original_file = sema.parse(pos.file_id);
let name_like = find_node_at_offset(original_file.syntax(), pos.offset).unwrap();
assert_eq!(
determine_location(&sema, original_file.syntax(), pos.offset, &name_like),
loc.into()
);
}
fn check_prev_sibling(code: &str, sibling: impl Into<Option<ImmediatePrevSibling>>) {
check_pattern_is_applicable(code, |e| {
let name = &e.parent().and_then(ast::NameLike::cast).expect("Expected a namelike");
assert_eq!(determine_prev_sibling(name), sibling.into());
true
});
}
#[test]
fn test_trait_loc() {
check_location(r"trait A { f$0 }", ImmediateLocation::Trait);
check_location(r"trait A { #[attr] f$0 }", ImmediateLocation::Trait);
check_location(r"trait A { f$0 fn f() {} }", ImmediateLocation::Trait);
check_location(r"trait A { fn f() {} f$0 }", ImmediateLocation::Trait);
check_location(r"trait A$0 {}", None);
check_location(r"trait A { fn f$0 }", None);
}
#[test]
fn test_impl_loc() {
check_location(r"impl A { f$0 }", ImmediateLocation::Impl);
check_location(r"impl A { #[attr] f$0 }", ImmediateLocation::Impl);
check_location(r"impl A { f$0 fn f() {} }", ImmediateLocation::Impl);
check_location(r"impl A { fn f() {} f$0 }", ImmediateLocation::Impl);
check_location(r"impl A$0 {}", None);
check_location(r"impl A { fn f$0 }", None);
}
#[test]
fn test_use_loc() {
check_location(r"use f$0", ImmediateLocation::Use);
check_location(r"use f$0;", ImmediateLocation::Use);
check_location(r"use f::{f$0}", None);
check_location(r"use {f$0}", None);
}
#[test]
fn test_record_field_loc() {
check_location(r"struct Foo { f$0 }", ImmediateLocation::RecordField);
check_location(r"struct Foo { f$0 pub f: i32}", ImmediateLocation::RecordField);
check_location(r"struct Foo { pub f: i32, f$0 }", ImmediateLocation::RecordField);
}
#[test]
fn test_block_expr_loc() {
check_location(r"fn my_fn() { let a = 2; f$0 }", ImmediateLocation::BlockExpr);
check_location(r"fn my_fn() { f$0 f }", ImmediateLocation::BlockExpr);
}
#[test]
fn test_ident_pat_loc() {
check_location(r"fn my_fn(m$0) {}", ImmediateLocation::IdentPat);
check_location(r"fn my_fn() { let m$0 }", ImmediateLocation::IdentPat);
check_location(r"fn my_fn(&m$0) {}", ImmediateLocation::IdentPat);
check_location(r"fn my_fn() { let &m$0 }", ImmediateLocation::IdentPat);
}
#[test]
fn test_ref_expr_loc() {
check_location(r"fn my_fn() { let x = &m$0 foo; }", ImmediateLocation::RefExpr);
}
#[test]
fn test_item_list_loc() {
check_location(r"i$0", ImmediateLocation::ItemList);
check_location(r"#[attr] i$0", ImmediateLocation::ItemList);
check_location(r"fn f() {} i$0", ImmediateLocation::ItemList);
check_location(r"mod foo { f$0 }", ImmediateLocation::ItemList);
check_location(r"mod foo { #[attr] f$0 }", ImmediateLocation::ItemList);
check_location(r"mod foo { fn f() {} f$0 }", ImmediateLocation::ItemList);
check_location(r"mod foo$0 {}", None);
}
#[test]
fn test_impl_prev_sibling() {
check_prev_sibling(r"impl A w$0 ", ImmediatePrevSibling::ImplDefType);
check_prev_sibling(r"impl A w$0 {}", ImmediatePrevSibling::ImplDefType);
check_prev_sibling(r"impl A for A w$0 ", ImmediatePrevSibling::ImplDefType);
check_prev_sibling(r"impl A for A w$0 {}", ImmediatePrevSibling::ImplDefType);
check_prev_sibling(r"impl A for w$0 {}", None);
check_prev_sibling(r"impl A for w$0", None);
}
#[test]
fn test_trait_prev_sibling() {
check_prev_sibling(r"trait A w$0 ", ImmediatePrevSibling::TraitDefName);
check_prev_sibling(r"trait A w$0 {}", ImmediatePrevSibling::TraitDefName);
}
#[test]
fn test_if_expr_prev_sibling() {
check_prev_sibling(r"fn foo() { if true {} w$0", ImmediatePrevSibling::IfExpr);
check_prev_sibling(r"fn foo() { if true {}; w$0", None);
}
}
| 37.783529 | 112 | 0.577158 |
2fb33944dda259287c57b958c3f694a7948678a7
| 6,845 |
use crate::modules::{get_modules, get_path};
use proc_macro::TokenStream;
use quote::{format_ident, quote};
use syn::{
parse::ParseStream, parse_macro_input, Data, DataStruct, DeriveInput, Field, Fields, Path,
};
#[derive(Default)]
struct RenderResourceFieldAttributes {
pub ignore: bool,
pub buffer: bool,
}
#[derive(Default)]
struct RenderResourceAttributes {
pub from_self: bool,
}
static RENDER_RESOURCE_ATTRIBUTE_NAME: &str = "render_resources";
pub fn derive_render_resources(input: TokenStream) -> TokenStream {
let ast = parse_macro_input!(input as DeriveInput);
let modules = get_modules(&ast.attrs);
let bevy_render_path: Path = get_path(&modules.bevy_render);
let attributes = ast
.attrs
.iter()
.find(|a| *a.path.get_ident().as_ref().unwrap() == RENDER_RESOURCE_ATTRIBUTE_NAME)
.map_or_else(RenderResourceAttributes::default, |a| {
syn::custom_keyword!(from_self);
let mut attributes = RenderResourceAttributes::default();
a.parse_args_with(|input: ParseStream| {
if input.parse::<Option<from_self>>()?.is_some() {
attributes.from_self = true;
}
Ok(())
})
.expect("invalid 'render_resources' attribute format");
attributes
});
let struct_name = &ast.ident;
let struct_name_string = struct_name.to_string();
if attributes.from_self {
TokenStream::from(quote! {
impl #bevy_render_path::renderer::RenderResources for #struct_name {
fn render_resources_len(&self) -> usize {
1
}
fn get_render_resource(&self, index: usize) -> Option<&dyn #bevy_render_path::renderer::RenderResource> {
if index == 0 {
Some(self)
} else {
None
}
}
fn get_render_resource_name(&self, index: usize) -> Option<&str> {
if index == 0 {
Some(#struct_name_string)
} else {
None
}
}
fn iter(&self) -> #bevy_render_path::renderer::RenderResourceIterator {
#bevy_render_path::renderer::RenderResourceIterator::new(self)
}
}
})
} else {
let fields = match &ast.data {
Data::Struct(DataStruct {
fields: Fields::Named(fields),
..
}) => &fields.named,
_ => panic!("expected a struct with named fields"),
};
let field_attributes = fields
.iter()
.map(|field| {
(
field,
field
.attrs
.iter()
.find(|a| {
*a.path.get_ident().as_ref().unwrap() == RENDER_RESOURCE_ATTRIBUTE_NAME
})
.map_or_else(RenderResourceFieldAttributes::default, |a| {
syn::custom_keyword!(ignore);
syn::custom_keyword!(buffer);
let mut attributes = RenderResourceFieldAttributes::default();
a.parse_args_with(|input: ParseStream| {
if input.parse::<Option<ignore>>()?.is_some() {
attributes.ignore = true;
} else if input.parse::<Option<buffer>>()?.is_some() {
attributes.buffer = true;
}
Ok(())
})
.expect("invalid 'render_resources' attribute format");
attributes
}),
)
})
.collect::<Vec<(&Field, RenderResourceFieldAttributes)>>();
let mut render_resource_names = Vec::new();
let mut render_resource_fields = Vec::new();
let mut render_resource_hints = Vec::new();
for (field, attrs) in field_attributes.iter() {
if attrs.ignore {
continue;
}
let field_ident = field.ident.as_ref().unwrap();
let field_name = field_ident.to_string();
render_resource_fields.push(field_ident);
render_resource_names.push(format!("{}_{}", struct_name, field_name));
if attrs.buffer {
render_resource_hints
.push(quote! {Some(#bevy_render_path::renderer::RenderResourceHints::BUFFER)})
} else {
render_resource_hints.push(quote! {None})
}
}
let render_resource_count = render_resource_names.len();
let render_resource_indices = 0..render_resource_count;
let struct_name_uppercase = struct_name_string.to_uppercase();
let render_resource_names_ident =
format_ident!("{}_RENDER_RESOURCE_NAMES", struct_name_uppercase);
let render_resource_hints_ident =
format_ident!("{}_RENDER_RESOURCE_HINTS", struct_name_uppercase);
TokenStream::from(quote! {
static #render_resource_names_ident: &[&str] = &[
#(#render_resource_names,)*
];
static #render_resource_hints_ident: &[Option<#bevy_render_path::renderer::RenderResourceHints>] = &[
#(#render_resource_hints,)*
];
impl #bevy_render_path::renderer::RenderResources for #struct_name {
fn render_resources_len(&self) -> usize {
#render_resource_count
}
fn get_render_resource(&self, index: usize) -> Option<&dyn #bevy_render_path::renderer::RenderResource> {
match index {
#(#render_resource_indices => Some(&self.#render_resource_fields),)*
_ => None,
}
}
fn get_render_resource_name(&self, index: usize) -> Option<&str> {
#render_resource_names_ident.get(index).copied()
}
fn get_render_resource_hints(&self, index: usize) -> Option<#bevy_render_path::renderer::RenderResourceHints> {
#render_resource_hints_ident.get(index).and_then(|o| *o)
}
fn iter(&self) -> #bevy_render_path::renderer::RenderResourceIterator {
#bevy_render_path::renderer::RenderResourceIterator::new(self)
}
}
})
}
}
| 38.672316 | 127 | 0.514828 |
1a66ec6afba3ea89af3ae1e9d6e84b9587b05464
| 18,150 |
#![allow(unused_imports)]
use crate::error::GoogleResponse;
use crate::resources::common::ListResponse;
pub use crate::resources::common::{Entity, ProjectTeam, Role};
/// The ObjectAccessControls resources represent the Access Control Lists (ACLs) for objects within
/// Google Cloud Storage. ACLs let you specify who has access to your data and to what extent.
///
/// ```text,ignore
/// Important: The methods for this resource fail with a 400 Bad Request response for buckets with
/// uniform bucket-level access enabled. Use storage.buckets.getIamPolicy and
/// storage.buckets.setIamPolicy to control access instead.
/// ```
///
/// There are two roles that can be assigned to an entity:
///
/// READERs can get an object, though the acl property will not be revealed.
/// OWNERs are READERs, and they can get the acl property, update an object, and call all
/// objectAccessControls methods on the object. The owner of an object is always an OWNER.
///
/// For more information, see Access Control, with the caveat that this API uses READER and OWNER
/// instead of READ and FULL_CONTROL.
#[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ObjectAccessControl {
/// The kind of item this is. For object access control entries, this is always
/// `storage#objectAccessControl`.
pub kind: String,
/// The ID of the access-control entry.
pub id: String,
/// The link to this access-control entry.
pub self_link: String,
/// The name of the bucket.
pub bucket: String,
/// The name of the object, if applied to an object.
pub object: String,
/// The content generation of the object, if applied to an object.
pub generation: Option<String>,
/// The entity holding the permission, in one of the following forms:
///
/// user-userId
/// user-email
/// group-groupId
/// group-email
/// domain-domain
/// project-team-projectId
/// allUsers
/// allAuthenticatedUsers
///
/// Examples:
///
/// The user [email protected] would be [email protected].
/// The group [email protected] would be [email protected].
/// To refer to all members of the G Suite for Business domain example.com, the entity would be
/// domain-example.com.
pub entity: Entity,
/// The access permission for the entity.
pub role: Role,
/// The email address associated with the entity, if any.
pub email: Option<String>,
/// The ID for the entity, if any.
pub entity_id: Option<String>,
/// The domain associated with the entity, if any.
pub domain: Option<String>,
/// The project team associated with the entity, if any.
pub project_team: Option<ProjectTeam>,
/// HTTP 1.1 Entity tag for the access-control entry.
pub etag: String,
}
/// Used to create a new `ObjectAccessControl` object.
#[derive(Debug, PartialEq, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct NewObjectAccessControl {
/// The entity holding the permission, in one of the following forms:
///
/// user-userId
/// user-email
/// group-groupId
/// group-email
/// domain-domain
/// project-team-projectId
/// allUsers
/// allAuthenticatedUsers
///
/// Examples:
///
/// The user [email protected] would be [email protected].
/// The group [email protected] would be [email protected].
/// To refer to all members of the G Suite for Business domain example.com, the entity would be
/// domain-example.com.
pub entity: Entity,
/// The access permission for the entity.
pub role: Role,
}
#[allow(unused)]
#[derive(Debug, serde::Deserialize)]
#[serde(rename_all = "camelCase")]
struct ObjectAccessControlList {
kind: String,
items: Vec<ObjectAccessControl>,
}
impl ObjectAccessControl {
/// Creates a new ACL entry on the specified `object`.
///
/// ### Important
/// This method fails with a 400 Bad Request response for buckets with uniform
/// bucket-level access enabled. Use `Bucket::get_iam_policy` and `Bucket::set_iam_policy` to
/// control access instead.
pub async fn create(
bucket: &str,
object: &str,
new_object_access_control: &NewObjectAccessControl,
) -> crate::Result<Self> {
let url = format!("{}/b/{}/o/{}/acl", crate::BASE_URL, bucket, object);
let result: GoogleResponse<Self> = crate::CLIENT
.post(&url)
.headers(crate::get_headers().await?)
.json(new_object_access_control)
.send()
.await?
.json()
.await?;
match result {
GoogleResponse::Success(s) => Ok(s),
GoogleResponse::Error(e) => Err(e.into()),
}
}
/// The sync equivalent of `ObjectAccessControl::create`.
///
/// ### Features
/// This function requires that the feature flag `sync` is enabled in `Cargo.toml`.
#[cfg(feature = "sync")]
#[tokio::main]
pub async fn create_sync(
bucket: &str,
object: &str,
new_object_access_control: &NewObjectAccessControl,
) -> crate::Result<Self> {
Self::create(bucket, object, new_object_access_control).await
}
/// Retrieves `ACL` entries on the specified object.
///
/// ### Important
/// Important: This method fails with a 400 Bad Request response for buckets with uniform
/// bucket-level access enabled. Use `Bucket::get_iam_policy` and `Bucket::set_iam_policy` to
/// control access instead.
pub async fn list(bucket: &str, object: &str) -> crate::Result<Vec<Self>> {
let url = format!("{}/b/{}/o/{}/acl", crate::BASE_URL, bucket, object);
let result: GoogleResponse<ListResponse<Self>> = crate::CLIENT
.get(&url)
.headers(crate::get_headers().await?)
.send()
.await?
.json()
.await?;
match result {
GoogleResponse::Success(s) => Ok(s.items),
GoogleResponse::Error(e) => Err(e.into()),
}
}
/// The sync equivalent of `ObjectAccessControl::list`.
///
/// ### Features
/// This function requires that the feature flag `sync` is enabled in `Cargo.toml`.
#[cfg(feature = "sync")]
#[tokio::main]
pub async fn list_sync(bucket: &str, object: &str) -> crate::Result<Vec<Self>> {
Self::list(bucket, object).await
}
/// Returns the `ACL` entry for the specified entity on the specified bucket.
///
/// ### Important
/// Important: This method fails with a 400 Bad Request response for buckets with uniform
/// bucket-level access enabled. Use `Bucket::get_iam_policy` and `Bucket::set_iam_policy` to
/// control access instead.
pub async fn read(bucket: &str, object: &str, entity: &Entity) -> crate::Result<Self> {
let url = format!(
"{}/b/{}/o/{}/acl/{}",
crate::BASE_URL,
bucket,
object,
entity
);
let result: GoogleResponse<Self> = crate::CLIENT
.get(&url)
.headers(crate::get_headers().await?)
.send()
.await?
.json()
.await?;
match result {
GoogleResponse::Success(s) => Ok(s),
GoogleResponse::Error(e) => Err(e.into()),
}
}
/// The sync equivalent of `ObjectAccessControl::read`.
///
/// ### Features
/// This function requires that the feature flag `sync` is enabled in `Cargo.toml`.
#[cfg(feature = "sync")]
#[tokio::main]
pub async fn read_sync(bucket: &str, object: &str, entity: &Entity) -> crate::Result<Self> {
Self::read(bucket, object, entity).await
}
/// Updates an ACL entry on the specified object.
///
/// ### Important
/// Important: This method fails with a 400 Bad Request response for buckets with uniform
/// bucket-level access enabled. Use `Bucket::get_iam_policy` and `Bucket::set_iam_policy` to
/// control access instead.
pub async fn update(&self) -> crate::Result<Self> {
let url = format!(
"{}/b/{}/o/{}/acl/{}",
crate::BASE_URL,
self.bucket,
self.object,
self.entity,
);
let result: GoogleResponse<Self> = crate::CLIENT
.put(&url)
.headers(crate::get_headers().await?)
.json(self)
.send()
.await?
.json()
.await?;
match result {
GoogleResponse::Success(s) => Ok(s),
GoogleResponse::Error(e) => Err(e.into()),
}
}
/// The sync equivalent of `ObjectAccessControl::update`.
///
/// ### Features
/// This function requires that the feature flag `sync` is enabled in `Cargo.toml`.
#[cfg(feature = "sync")]
#[tokio::main]
pub async fn update_sync(&self) -> crate::Result<Self> {
self.update().await
}
/// Permanently deletes the ACL entry for the specified entity on the specified object.
///
/// ### Important
/// Important: This method fails with a 400 Bad Request response for buckets with uniform
/// bucket-level access enabled. Use `Bucket::get_iam_policy` and `Bucket::set_iam_policy` to
/// control access instead.
pub async fn delete(self) -> crate::Result<()> {
let url = format!(
"{}/b/{}/o/{}/acl/{}",
crate::BASE_URL,
self.bucket,
self.object,
self.entity,
);
let response = crate::CLIENT
.delete(&url)
.headers(crate::get_headers().await?)
.send()
.await?;
if response.status().is_success() {
Ok(())
} else {
Err(crate::Error::Google(response.json().await?))
}
}
/// The sync equivalent of `ObjectAccessControl::delete`.
///
/// ### Features
/// This function requires that the feature flag `sync` is enabled in `Cargo.toml`.
#[cfg(feature = "sync")]
#[tokio::main]
pub async fn delete_sync(self) -> crate::Result<()> {
self.delete().await
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::Object;
#[tokio::test]
async fn create() {
let bucket = crate::read_test_bucket().await;
Object::create(
&bucket.name,
vec![0, 1],
"test-object-access-controls-create",
"text/plain",
)
.await
.unwrap();
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
ObjectAccessControl::create(
&bucket.name,
"test-object-access-controls-create",
&new_bucket_access_control,
)
.await
.unwrap();
}
#[tokio::test]
async fn list() {
let bucket = crate::read_test_bucket().await;
Object::create(
&bucket.name,
vec![0, 1],
"test-object-access-controls-list",
"text/plain",
)
.await
.unwrap();
ObjectAccessControl::list(&bucket.name, "test-object-access-controls-list")
.await
.unwrap();
}
#[tokio::test]
async fn read() {
let bucket = crate::read_test_bucket().await;
Object::create(
&bucket.name,
vec![0, 1],
"test-object-access-controls-read",
"text/plain",
)
.await
.unwrap();
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
ObjectAccessControl::create(
&bucket.name,
"test-object-access-controls-read",
&new_bucket_access_control,
)
.await
.unwrap();
ObjectAccessControl::read(
&bucket.name,
"test-object-access-controls-read",
&Entity::AllUsers,
)
.await
.unwrap();
}
#[tokio::test]
async fn update() {
// use a seperate bucket to prevent synchronization issues
let bucket = crate::create_test_bucket("test-object-access-controls-update").await;
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
Object::create(&bucket.name, vec![0, 1], "test-update", "text/plain")
.await
.unwrap();
ObjectAccessControl::create(&bucket.name, "test-update", &new_bucket_access_control)
.await
.unwrap();
let mut acl = ObjectAccessControl::read(&bucket.name, "test-update", &Entity::AllUsers)
.await
.unwrap();
acl.entity = Entity::AllAuthenticatedUsers;
acl.update().await.unwrap();
Object::delete(&bucket.name, "test-update").await.unwrap();
bucket.delete().await.unwrap();
}
#[tokio::test]
async fn delete() {
// use a seperate bucket to prevent synchronization issues
let bucket = crate::create_test_bucket("test-object-access-controls-delete").await;
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
Object::create(&bucket.name, vec![0, 1], "test-delete", "text/plain")
.await
.unwrap();
ObjectAccessControl::create(&bucket.name, "test-delete", &new_bucket_access_control)
.await
.unwrap();
let acl = ObjectAccessControl::read(&bucket.name, "test-delete", &Entity::AllUsers)
.await
.unwrap();
acl.delete().await.unwrap();
Object::delete(&bucket.name, "test-delete").await.unwrap();
bucket.delete().await.unwrap();
}
#[cfg(feature = "sync")]
mod sync {
use super::*;
#[test]
fn create() {
let bucket = crate::read_test_bucket_sync();
Object::create_sync(
&bucket.name,
vec![0, 1],
"test-object-access-controls-create",
"text/plain",
)
.unwrap();
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
ObjectAccessControl::create_sync(
&bucket.name,
"test-object-access-controls-create",
&new_bucket_access_control,
)
.unwrap();
}
#[test]
fn list() {
let bucket = crate::read_test_bucket_sync();
Object::create_sync(
&bucket.name,
vec![0, 1],
"test-object-access-controls-list",
"text/plain",
)
.unwrap();
ObjectAccessControl::list_sync(&bucket.name, "test-object-access-controls-list")
.unwrap();
}
#[test]
fn read() {
let bucket = crate::read_test_bucket_sync();
Object::create_sync(
&bucket.name,
vec![0, 1],
"test-object-access-controls-read",
"text/plain",
)
.unwrap();
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
ObjectAccessControl::create_sync(
&bucket.name,
"test-object-access-controls-read",
&new_bucket_access_control,
)
.unwrap();
ObjectAccessControl::read_sync(
&bucket.name,
"test-object-access-controls-read",
&Entity::AllUsers,
)
.unwrap();
}
#[test]
fn update() {
// use a seperate bucket to prevent synchronization issues
let bucket = crate::create_test_bucket_sync("test-object-access-controls-update");
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
Object::create_sync(&bucket.name, vec![0, 1], "test-update", "text/plain").unwrap();
ObjectAccessControl::create_sync(
&bucket.name,
"test-update",
&new_bucket_access_control,
)
.unwrap();
let mut acl =
ObjectAccessControl::read_sync(&bucket.name, "test-update", &Entity::AllUsers)
.unwrap();
acl.entity = Entity::AllAuthenticatedUsers;
acl.update_sync().unwrap();
Object::delete_sync(&bucket.name, "test-update").unwrap();
bucket.delete_sync().unwrap();
}
#[test]
fn delete() {
// use a seperate bucket to prevent synchronization issues
let bucket = crate::create_test_bucket_sync("test-object-access-controls-delete");
let new_bucket_access_control = NewObjectAccessControl {
entity: Entity::AllUsers,
role: Role::Reader,
};
Object::create_sync(&bucket.name, vec![0, 1], "test-delete", "text/plain").unwrap();
ObjectAccessControl::create_sync(
&bucket.name,
"test-delete",
&new_bucket_access_control,
)
.unwrap();
let acl =
ObjectAccessControl::read_sync(&bucket.name, "test-delete", &Entity::AllUsers)
.unwrap();
acl.delete_sync().unwrap();
Object::delete_sync(&bucket.name, "test-delete").unwrap();
bucket.delete_sync().unwrap();
}
}
}
| 34.440228 | 99 | 0.563691 |
69968074863d8e3817f09a83ec8fdfe2a84689a6
| 4,903 |
use rustc_hash::FxHashSet;
use ra_syntax::TextUnit;
use crate::completion::{CompletionItem, CompletionItemKind, Completions, CompletionKind, CompletionContext};
pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
if !ctx.is_trivial_path {
return;
}
let module = match &ctx.module {
Some(it) => it,
None => return,
};
if let Some(function) = &ctx.function {
let scopes = function.scopes(ctx.db);
complete_fn(acc, &scopes, ctx.offset);
}
let module_scope = module.scope(ctx.db);
let (file_id, _) = module.definition_source(ctx.db);
module_scope
.entries()
.filter(|(_name, res)| {
// Don't expose this item
// FIXME: this penetrates through all kinds of abstractions,
// we need to figura out the way to do it less ugly.
match res.import {
None => true,
Some(import) => {
let range = import.range(ctx.db, file_id);
!range.is_subrange(&ctx.leaf.range())
}
}
})
.for_each(|(name, res)| {
CompletionItem::new(CompletionKind::Reference, name.to_string())
.from_resolution(ctx, res)
.add_to(acc)
});
}
fn complete_fn(acc: &mut Completions, scopes: &hir::ScopesWithSyntaxMapping, offset: TextUnit) {
let mut shadowed = FxHashSet::default();
scopes
.scope_chain_for_offset(offset)
.flat_map(|scope| scopes.scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.for_each(|entry| {
CompletionItem::new(CompletionKind::Reference, entry.name().to_string())
.kind(CompletionItemKind::Binding)
.add_to(acc)
});
}
#[cfg(test)]
mod tests {
use crate::completion::{CompletionKind, check_completion};
fn check_reference_completion(code: &str, expected_completions: &str) {
check_completion(code, expected_completions, CompletionKind::Reference);
}
#[test]
fn completes_bindings_from_let() {
check_reference_completion(
r"
fn quux(x: i32) {
let y = 92;
1 + <|>;
let z = ();
}
",
r#"y;x;quux "quux($0)""#,
);
}
#[test]
fn completes_bindings_from_if_let() {
check_reference_completion(
r"
fn quux() {
if let Some(x) = foo() {
let y = 92;
};
if let Some(a) = bar() {
let b = 62;
1 + <|>
}
}
",
r#"b;a;quux "quux()$0""#,
);
}
#[test]
fn completes_bindings_from_for() {
check_reference_completion(
r"
fn quux() {
for x in &[1, 2, 3] {
<|>
}
}
",
r#"x;quux "quux()$0""#,
);
}
#[test]
fn completes_module_items() {
check_reference_completion(
r"
struct Foo;
enum Baz {}
fn quux() {
<|>
}
",
r#"quux "quux()$0";Foo;Baz"#,
);
}
#[test]
fn completes_module_items_in_nested_modules() {
check_reference_completion(
r"
struct Foo;
mod m {
struct Bar;
fn quux() { <|> }
}
",
r#"quux "quux()$0";Bar"#,
);
}
#[test]
fn completes_return_type() {
check_reference_completion(
r"
struct Foo;
fn x() -> <|>
",
r#"Foo;x "x()$0""#,
)
}
#[test]
fn dont_show_both_completions_for_shadowing() {
check_reference_completion(
r"
fn foo() -> {
let bar = 92;
{
let bar = 62;
<|>
}
}
",
r#"bar;foo "foo()$0""#,
)
}
#[test]
fn completes_self_in_methods() {
check_reference_completion(r"impl S { fn foo(&self) { <|> } }", "self")
}
#[test]
fn inserts_parens_for_function_calls() {
check_reference_completion(
r"
fn no_args() {}
fn main() { no_<|> }
",
r#"no_args "no_args()$0"
main "main()$0""#,
);
check_reference_completion(
r"
fn with_args(x: i32, y: String) {}
fn main() { with_<|> }
",
r#"main "main()$0"
with_args "with_args($0)""#,
);
}
}
| 25.941799 | 108 | 0.450337 |
fc7d5afd78158dbc68d5fecdcd50bfde57cc3530
| 1,825 |
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
use super::{Statement, StatementParser};
use crate::lexer::lexer::{Lexer, LocToken, Token};
use crate::lexer::preprocessor::context::PreprocContext;
use crate::parser::attributes::Attributes;
use crate::parser::expression::{ExprNode, ExpressionParser};
#[derive(Clone, Debug, PartialEq)]
pub struct While {
pub attributes: Option<Attributes>,
pub condition: ExprNode,
pub body: Statement,
}
pub struct WhileStmtParser<'a, 'b, PC: PreprocContext> {
lexer: &'b mut Lexer<'a, PC>,
}
impl<'a, 'b, PC: PreprocContext> WhileStmtParser<'a, 'b, PC> {
pub(super) fn new(lexer: &'b mut Lexer<'a, PC>) -> Self {
Self { lexer }
}
pub(super) fn parse(self, attributes: Option<Attributes>) -> (Option<LocToken>, Option<While>) {
let tok = self.lexer.next_useful();
if tok.tok != Token::LeftParen {
unreachable!("Invalid token in while statement: {:?}", tok);
}
let mut ep = ExpressionParser::new(self.lexer, Token::RightParen);
let (tok, condition) = ep.parse(None);
let tok = tok.unwrap_or_else(|| self.lexer.next_useful());
if tok.tok != Token::RightParen {
unreachable!("Invalid token in if statement: {:?}", tok);
}
let sp = StatementParser::new(self.lexer);
let (tok, body) = sp.parse(None);
(
tok,
Some(While {
attributes,
body: body.unwrap(),
condition: condition.unwrap(),
}),
)
}
}
| 32.589286 | 100 | 0.612603 |
28045bf0571c4e475407237eedc8a34730bd18ae
| 36,661 |
// Copyright 2016 The xi-editor Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! The main container for core state.
//!
//! All events from the frontend or from plugins are handled here first.
//!
//! This file is called 'tabs' for historical reasons, and should probably
//! be renamed.
use std::cell::{Cell, RefCell};
use std::collections::{BTreeMap, HashSet};
use std::fmt;
use std::fs::File;
use std::io;
use std::mem;
use std::path::{Path, PathBuf};
use serde::de::{self, Deserialize, Deserializer, Unexpected};
use serde::ser::{Serialize, Serializer};
use serde_json::Value;
use xi_rope::Rope;
use xi_rpc::{self, ReadError, RemoteError, RpcCtx, RpcPeer};
use xi_trace::{self, trace_block};
use crate::client::Client;
use crate::config::{self, ConfigDomain, ConfigDomainExternal, ConfigManager, Table};
use crate::editor::Editor;
use crate::event_context::EventContext;
use crate::file::FileManager;
use crate::line_ending::LineEnding;
use crate::plugin_rpc::{PluginNotification, PluginRequest};
use crate::plugins::{start_plugin_process, Plugin, PluginCatalog, PluginPid};
use crate::recorder::Recorder;
use crate::rpc::{
CoreNotification, CoreRequest, EditNotification, EditRequest,
PluginNotification as CorePluginNotification,
};
use crate::styles::{ThemeStyleMap, DEFAULT_THEME};
use crate::syntax::LanguageId;
use crate::view::View;
use crate::whitespace::Indentation;
use crate::width_cache::WidthCache;
use crate::WeakXiCore;
#[cfg(feature = "notify")]
use crate::watcher::{FileWatcher, WatchToken};
#[cfg(feature = "notify")]
use notify::DebouncedEvent;
#[cfg(feature = "notify")]
use std::ffi::OsStr;
/// ViewIds are the primary means of routing messages between
/// xi-core and a client view.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct ViewId(pub(crate) usize);
/// BufferIds uniquely identify open buffers.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)]
pub struct BufferId(pub(crate) usize);
pub type PluginId = crate::plugins::PluginPid;
// old-style names; will be deprecated
pub type BufferIdentifier = BufferId;
/// Totally arbitrary; we reserve this space for `ViewId`s
pub(crate) const RENDER_VIEW_IDLE_MASK: usize = 1 << 25;
pub(crate) const REWRAP_VIEW_IDLE_MASK: usize = 1 << 26;
const NEW_VIEW_IDLE_TOKEN: usize = 1001;
/// xi_rpc idle Token for watcher related idle scheduling.
pub(crate) const WATCH_IDLE_TOKEN: usize = 1002;
#[cfg(feature = "notify")]
const CONFIG_EVENT_TOKEN: WatchToken = WatchToken(1);
/// Token for file-change events in open files
#[cfg(feature = "notify")]
pub const OPEN_FILE_EVENT_TOKEN: WatchToken = WatchToken(2);
#[cfg(feature = "notify")]
const THEME_FILE_EVENT_TOKEN: WatchToken = WatchToken(3);
#[allow(dead_code)]
pub struct CoreState {
editors: BTreeMap<BufferId, RefCell<Editor>>,
views: BTreeMap<ViewId, RefCell<View>>,
file_manager: FileManager,
/// A local pasteboard.
kill_ring: RefCell<Rope>,
/// Theme and style state.
style_map: RefCell<ThemeStyleMap>,
width_cache: RefCell<WidthCache>,
/// User and platform specific settings
config_manager: ConfigManager,
/// Recorded editor actions
recorder: RefCell<Recorder>,
/// A weak reference to the main state container, stashed so that
/// it can be passed to plugins.
self_ref: Option<WeakXiCore>,
/// Views which need to have setup finished.
pending_views: Vec<(ViewId, Table)>,
peer: Client,
id_counter: Counter,
plugins: PluginCatalog,
// for the time being we auto-start all plugins we find on launch.
running_plugins: Vec<Plugin>,
}
/// Initial setup and bookkeeping
impl CoreState {
pub(crate) fn new(
peer: &RpcPeer,
config_dir: Option<PathBuf>,
extras_dir: Option<PathBuf>,
) -> Self {
#[cfg(feature = "notify")]
let mut watcher = FileWatcher::new(peer.clone());
if let Some(p) = config_dir.as_ref() {
if !p.exists() {
if let Err(e) = config::init_config_dir(p) {
//TODO: report this error?
error!("error initing file based configs: {:?}", e);
}
}
#[cfg(feature = "notify")]
watcher.watch_filtered(p, true, CONFIG_EVENT_TOKEN, |p| {
p.extension().and_then(OsStr::to_str).unwrap_or("") == "xiconfig"
});
}
let config_manager = ConfigManager::new(config_dir, extras_dir);
let themes_dir = config_manager.get_themes_dir();
if let Some(p) = themes_dir.as_ref() {
#[cfg(feature = "notify")]
watcher.watch_filtered(p, true, THEME_FILE_EVENT_TOKEN, |p| {
p.extension().and_then(OsStr::to_str).unwrap_or("") == "tmTheme"
});
}
CoreState {
views: BTreeMap::new(),
editors: BTreeMap::new(),
#[cfg(feature = "notify")]
file_manager: FileManager::new(watcher),
#[cfg(not(feature = "notify"))]
file_manager: FileManager::new(),
kill_ring: RefCell::new(Rope::from("")),
style_map: RefCell::new(ThemeStyleMap::new(themes_dir)),
width_cache: RefCell::new(WidthCache::new()),
config_manager,
recorder: RefCell::new(Recorder::new()),
self_ref: None,
pending_views: Vec::new(),
peer: Client::new(peer.clone()),
id_counter: Counter::default(),
plugins: PluginCatalog::default(),
running_plugins: Vec::new(),
}
}
fn next_view_id(&self) -> ViewId {
ViewId(self.id_counter.next())
}
fn next_buffer_id(&self) -> BufferId {
BufferId(self.id_counter.next())
}
fn next_plugin_id(&self) -> PluginId {
PluginPid(self.id_counter.next())
}
pub(crate) fn finish_setup(&mut self, self_ref: WeakXiCore) {
self.self_ref = Some(self_ref);
if let Some(path) = self.config_manager.base_config_file_path() {
self.load_file_based_config(&path);
}
// Load the custom theme files.
self.style_map.borrow_mut().load_theme_dir();
// instead of having to do this here, config should just own
// the plugin catalog and reload automatically
let plugin_paths = self.config_manager.get_plugin_paths();
self.plugins.reload_from_paths(&plugin_paths);
let languages = self.plugins.make_languages_map();
let languages_ids = languages.iter().map(|l| l.name.clone()).collect::<Vec<_>>();
self.peer.available_languages(languages_ids);
self.config_manager.set_languages(languages);
let theme_names = self.style_map.borrow().get_theme_names();
self.peer.available_themes(theme_names);
// FIXME: temporary: we just launch every plugin we find at startup
for manifest in self.plugins.iter() {
start_plugin_process(
manifest.clone(),
self.next_plugin_id(),
self.self_ref.as_ref().unwrap().clone(),
);
}
}
/// Attempt to load a config file.
fn load_file_based_config(&mut self, path: &Path) {
let _t = trace_block("CoreState::load_config_file", &["core"]);
if let Some(domain) = self.config_manager.domain_for_path(path) {
match config::try_load_from_file(&path) {
Ok(table) => self.set_config(domain, table),
Err(e) => self.peer.alert(e.to_string()),
}
} else {
self.peer.alert(format!("Unexpected config file {:?}", path));
}
}
/// Sets (overwriting) the config for a given domain.
fn set_config(&mut self, domain: ConfigDomain, table: Table) {
match self.config_manager.set_user_config(domain, table) {
Err(e) => self.peer.alert(format!("{}", &e)),
Ok(changes) => self.handle_config_changes(changes),
}
}
/// Notify editors/views/plugins of config changes.
fn handle_config_changes(&self, changes: Vec<(BufferId, Table)>) {
for (id, table) in changes {
let view_id = self
.views
.values()
.find(|v| v.borrow().get_buffer_id() == id)
.map(|v| v.borrow().get_view_id())
.unwrap();
self.make_context(view_id).unwrap().config_changed(&table)
}
}
}
/// Handling client events
impl CoreState {
/// Creates an `EventContext` for the provided `ViewId`. This context
/// holds references to the `Editor` and `View` backing this `ViewId`,
/// as well as to sibling views, plugins, and other state necessary
/// for handling most events.
pub(crate) fn make_context(&self, view_id: ViewId) -> Option<EventContext> {
self.views.get(&view_id).map(|view| {
let buffer_id = view.borrow().get_buffer_id();
let editor = &self.editors[&buffer_id];
let info = self.file_manager.get_info(buffer_id);
let plugins = self.running_plugins.iter().collect::<Vec<_>>();
let config = self.config_manager.get_buffer_config(buffer_id);
let language = self.config_manager.get_buffer_language(buffer_id);
EventContext {
view_id,
buffer_id,
view,
editor,
config: &config.items,
recorder: &self.recorder,
language,
info,
siblings: Vec::new(),
plugins,
client: &self.peer,
style_map: &self.style_map,
width_cache: &self.width_cache,
kill_ring: &self.kill_ring,
weak_core: self.self_ref.as_ref().unwrap(),
}
})
}
/// Produces an iterator over all event contexts, with each view appearing
/// exactly once.
fn iter_groups<'a>(&'a self) -> Iter<'a, Box<Iterator<Item = &ViewId> + 'a>> {
Iter { views: Box::new(self.views.keys()), seen: HashSet::new(), inner: self }
}
pub(crate) fn client_notification(&mut self, cmd: CoreNotification) {
use self::CoreNotification::*;
use self::CorePluginNotification as PN;
match cmd {
Edit(crate::rpc::EditCommand { view_id, cmd }) => self.do_edit(view_id, cmd),
Save { view_id, file_path } => self.do_save(view_id, file_path),
CloseView { view_id } => self.do_close_view(view_id),
ModifyUserConfig { domain, changes } => self.do_modify_user_config(domain, changes),
SetTheme { theme_name } => self.do_set_theme(&theme_name),
SaveTrace { destination, frontend_samples } => {
self.save_trace(&destination, frontend_samples)
}
Plugin(cmd) => match cmd {
PN::Start { view_id, plugin_name } => self.do_start_plugin(view_id, &plugin_name),
PN::Stop { view_id, plugin_name } => self.do_stop_plugin(view_id, &plugin_name),
PN::PluginRpc { view_id, receiver, rpc } => {
self.do_plugin_rpc(view_id, &receiver, &rpc.method, &rpc.params)
}
},
TracingConfig { enabled } => self.toggle_tracing(enabled),
// handled at the top level
ClientStarted { .. } => (),
SetLanguage { view_id, language_id } => self.do_set_language(view_id, language_id),
}
}
pub(crate) fn client_request(&mut self, cmd: CoreRequest) -> Result<Value, RemoteError> {
use self::CoreRequest::*;
match cmd {
//TODO: make file_path be an Option<PathBuf>
//TODO: make this a notification
NewView { file_path } => self.do_new_view(file_path.map(PathBuf::from)),
Edit(crate::rpc::EditCommand { view_id, cmd }) => self.do_edit_sync(view_id, cmd),
//TODO: why is this a request?? make a notification?
GetConfig { view_id } => self.do_get_config(view_id).map(|c| json!(c)),
DebugGetContents { view_id } => self.do_get_contents(view_id).map(|c| json!(c)),
}
}
fn do_edit(&mut self, view_id: ViewId, cmd: EditNotification) {
if let Some(mut edit_ctx) = self.make_context(view_id) {
edit_ctx.do_edit(cmd);
}
}
fn do_edit_sync(&mut self, view_id: ViewId, cmd: EditRequest) -> Result<Value, RemoteError> {
if let Some(mut edit_ctx) = self.make_context(view_id) {
edit_ctx.do_edit_sync(cmd)
} else {
// TODO: some custom error tpye that can Into<RemoteError>
Err(RemoteError::custom(404, format!("missing view {:?}", view_id), None))
}
}
fn do_new_view(&mut self, path: Option<PathBuf>) -> Result<Value, RemoteError> {
let view_id = self.next_view_id();
let buffer_id = self.next_buffer_id();
let rope = match path.as_ref() {
Some(p) => self.file_manager.open(p, buffer_id)?,
None => Rope::from(""),
};
let editor = RefCell::new(Editor::with_text(rope));
let view = RefCell::new(View::new(view_id, buffer_id));
self.editors.insert(buffer_id, editor);
self.views.insert(view_id, view);
let config = self.config_manager.add_buffer(buffer_id, path.as_ref().map(|p| p.as_path()));
//NOTE: because this is a synchronous call, we have to return the
//view_id before we can send any events to this view. We mark the
// view as pending and schedule the idle handler so that we can finish
// setting up this view on the next runloop pass, in finalize_new_views.
self.pending_views.push((view_id, config));
self.peer.schedule_idle(NEW_VIEW_IDLE_TOKEN);
Ok(json!(view_id))
}
fn do_save<P>(&mut self, view_id: ViewId, path: P)
where
P: AsRef<Path>,
{
let _t = trace_block("CoreState::do_save", &["core"]);
let path = path.as_ref();
let buffer_id = self.views.get(&view_id).map(|v| v.borrow().get_buffer_id());
let buffer_id = match buffer_id {
Some(id) => id,
None => return,
};
let ed = &self.editors[&buffer_id];
if let Err(e) = self.file_manager.save(path, ed.borrow().get_buffer(), buffer_id) {
let error_message = e.to_string();
error!("File error: {:?}", error_message);
self.peer.alert(error_message);
return;
}
let changes = self.config_manager.update_buffer_path(buffer_id, path);
let language = self.config_manager.get_buffer_language(buffer_id);
self.make_context(view_id).unwrap().after_save(path);
self.make_context(view_id).unwrap().language_changed(&language);
// update the config _after_ sending save related events
if let Some(changes) = changes {
self.make_context(view_id).unwrap().config_changed(&changes);
}
}
fn do_close_view(&mut self, view_id: ViewId) {
let close_buffer = self.make_context(view_id).map(|ctx| ctx.close_view()).unwrap_or(true);
let buffer_id = self.views.remove(&view_id).map(|v| v.borrow().get_buffer_id());
if let Some(buffer_id) = buffer_id {
if close_buffer {
self.editors.remove(&buffer_id);
self.file_manager.close(buffer_id);
self.config_manager.remove_buffer(buffer_id);
}
}
}
fn do_set_theme(&self, theme_name: &str) {
//Set only if requested theme is different from the
//current one.
if theme_name != self.style_map.borrow().get_theme_name() {
if let Err(e) = self.style_map.borrow_mut().set_theme(&theme_name) {
error!("error setting theme: {:?}, {:?}", theme_name, e);
return;
}
}
self.notify_client_and_update_views();
}
fn notify_client_and_update_views(&self) {
{
let style_map = self.style_map.borrow();
self.peer.theme_changed(style_map.get_theme_name(), style_map.get_theme_settings());
}
self.iter_groups().for_each(|mut edit_ctx| {
edit_ctx.with_editor(|ed, view, _, _| {
ed.theme_changed(&self.style_map.borrow());
view.set_dirty(ed.get_buffer());
});
edit_ctx.render_if_needed();
});
}
/// Updates the config for a given domain.
fn do_modify_user_config(&mut self, domain: ConfigDomainExternal, changes: Table) {
// the client sends ViewId but we need BufferId so we do a dance
let domain: ConfigDomain = match domain {
ConfigDomainExternal::General => ConfigDomain::General,
ConfigDomainExternal::Syntax(id) => ConfigDomain::Language(id),
ConfigDomainExternal::Language(id) => ConfigDomain::Language(id),
ConfigDomainExternal::UserOverride(view_id) => match self.views.get(&view_id) {
Some(v) => ConfigDomain::UserOverride(v.borrow().get_buffer_id()),
None => return,
},
};
let new_config = self.config_manager.table_for_update(domain.clone(), changes);
self.set_config(domain, new_config);
}
fn do_get_config(&self, view_id: ViewId) -> Result<Table, RemoteError> {
let _t = trace_block("CoreState::get_config", &["core"]);
self.views
.get(&view_id)
.map(|v| v.borrow().get_buffer_id())
.map(|id| self.config_manager.get_buffer_config(id).to_table())
.ok_or(RemoteError::custom(404, format!("missing {}", view_id), None))
}
fn do_get_contents(&self, view_id: ViewId) -> Result<Rope, RemoteError> {
self.make_context(view_id)
.map(|ctx| ctx.editor.borrow().get_buffer().to_owned())
.ok_or_else(|| RemoteError::custom(404, format!("No view for id {}", view_id), None))
}
fn do_set_language(&mut self, view_id: ViewId, language_id: LanguageId) {
if let Some(view) = self.views.get(&view_id) {
let buffer_id = view.borrow().get_buffer_id();
let changes = self.config_manager.override_language(buffer_id, language_id.clone());
let mut context = self.make_context(view_id).unwrap();
context.language_changed(&language_id);
if let Some(changes) = changes {
context.config_changed(&changes);
}
}
}
fn do_start_plugin(&mut self, _view_id: ViewId, plugin: &str) {
if self.running_plugins.iter().any(|p| p.name == plugin) {
info!("plugin {} already running", plugin);
return;
}
if let Some(manifest) = self.plugins.get_named(plugin) {
//TODO: lots of races possible here, we need to keep track of
//pending launches.
start_plugin_process(
manifest.clone(),
self.next_plugin_id(),
self.self_ref.as_ref().unwrap().clone(),
);
} else {
warn!("no plugin found with name '{}'", plugin);
}
}
fn do_stop_plugin(&mut self, _view_id: ViewId, plugin: &str) {
if let Some(p) = self
.running_plugins
.iter()
.position(|p| p.name == plugin)
.map(|ix| self.running_plugins.remove(ix))
{
//TODO: verify shutdown; kill if necessary
p.shutdown();
self.after_stop_plugin(&p);
}
}
fn do_plugin_rpc(&self, view_id: ViewId, receiver: &str, method: &str, params: &Value) {
self.running_plugins
.iter()
.filter(|p| p.name == receiver)
.for_each(|p| p.dispatch_command(view_id, method, params))
}
fn after_stop_plugin(&mut self, plugin: &Plugin) {
self.iter_groups().for_each(|mut cx| cx.plugin_stopped(plugin));
}
}
/// Idle, tracing, and file event handling
impl CoreState {
pub(crate) fn handle_idle(&mut self, token: usize) {
match token {
NEW_VIEW_IDLE_TOKEN => self.finalize_new_views(),
WATCH_IDLE_TOKEN => self.handle_fs_events(),
other if (other & RENDER_VIEW_IDLE_MASK) != 0 => {
self.handle_render_timer(other ^ RENDER_VIEW_IDLE_MASK)
}
other if (other & REWRAP_VIEW_IDLE_MASK) != 0 => {
self.handle_rewrap_callback(other ^ REWRAP_VIEW_IDLE_MASK)
}
other => panic!("unexpected idle token {}", other),
};
}
fn finalize_new_views(&mut self) {
let to_start = mem::replace(&mut self.pending_views, Vec::new());
to_start.iter().for_each(|(id, config)| {
let modified = self.detect_whitespace(*id, config);
let config = modified.as_ref().unwrap_or(config);
let mut edit_ctx = self.make_context(*id).unwrap();
edit_ctx.finish_init(&config);
});
}
// Detects whitespace settings from the file and merges them with the config
fn detect_whitespace(&mut self, id: ViewId, config: &Table) -> Option<Table> {
let buffer_id = self.views.get(&id).map(|v| v.borrow().get_buffer_id())?;
let editor = self
.editors
.get(&buffer_id)
.expect("existing buffer_id must have corresponding editor");
if editor.borrow().get_buffer().len() == 0 {
return None;
}
let autodetect_whitespace =
self.config_manager.get_buffer_config(buffer_id).items.autodetect_whitespace;
if !autodetect_whitespace {
return None;
}
let mut changes = Table::new();
let indentation = Indentation::parse(editor.borrow().get_buffer());
match indentation {
Ok(Some(Indentation::Tabs)) => {
changes.insert("translate_tabs_to_spaces".into(), false.into());
}
Ok(Some(Indentation::Spaces(n))) => {
changes.insert("translate_tabs_to_spaces".into(), true.into());
changes.insert("tab_size".into(), n.into());
}
Err(_) => info!("detected mixed indentation"),
Ok(None) => info!("file contains no indentation"),
}
let line_ending = LineEnding::parse(editor.borrow().get_buffer());
match line_ending {
Ok(Some(LineEnding::CrLf)) => {
changes.insert("line_ending".into(), "\r\n".into());
}
Ok(Some(LineEnding::Lf)) => {
changes.insert("line_ending".into(), "\n".into());
}
Err(_) => info!("detected mixed line endings"),
Ok(None) => info!("file contains no supported line endings"),
}
let config_delta =
self.config_manager.table_for_update(ConfigDomain::SysOverride(buffer_id), changes);
match self
.config_manager
.set_user_config(ConfigDomain::SysOverride(buffer_id), config_delta)
{
Ok(ref mut items) if !items.is_empty() => {
assert!(
items.len() == 1,
"whitespace overrides can only update a single buffer's config\n{:?}",
items
);
let table = items.remove(0).1;
let mut config = config.clone();
config.extend(table);
Some(config)
}
Ok(_) => {
warn!("set_user_config failed to update config, no tables were returned");
None
}
Err(err) => {
warn!("detect_whitespace failed to update config: {:?}", err);
None
}
}
}
fn handle_render_timer(&mut self, token: usize) {
let id: ViewId = token.into();
if let Some(mut ctx) = self.make_context(id) {
ctx._finish_delayed_render();
}
}
/// Callback for doing word wrap on a view
fn handle_rewrap_callback(&mut self, token: usize) {
let id: ViewId = token.into();
if let Some(mut ctx) = self.make_context(id) {
ctx.do_rewrap_batch();
}
}
#[cfg(feature = "notify")]
fn handle_fs_events(&mut self) {
let _t = trace_block("CoreState::handle_fs_events", &["core"]);
let mut events = self.file_manager.watcher().take_events();
for (token, event) in events.drain(..) {
match token {
OPEN_FILE_EVENT_TOKEN => self.handle_open_file_fs_event(event),
CONFIG_EVENT_TOKEN => self.handle_config_fs_event(event),
THEME_FILE_EVENT_TOKEN => self.handle_themes_fs_event(event),
_ => warn!("unexpected fs event token {:?}", token),
}
}
}
#[cfg(not(feature = "notify"))]
fn handle_fs_events(&mut self) {}
/// Handles a file system event related to a currently open file
#[cfg(feature = "notify")]
fn handle_open_file_fs_event(&mut self, event: DebouncedEvent) {
use notify::DebouncedEvent::*;
let path = match event {
NoticeWrite(ref path) | Create(ref path) | Write(ref path) | Chmod(ref path) => path,
other => {
debug!("Event in open file {:?}", other);
return;
}
};
let buffer_id = match self.file_manager.get_editor(path) {
Some(id) => id,
None => return,
};
let has_changes = self.file_manager.check_file(path, buffer_id);
let is_pristine = self.editors.get(&buffer_id).map(|ed| ed.borrow().is_pristine()).unwrap();
//TODO: currently we only use the file's modification time when
// determining if a file has been changed by another process.
// A more robust solution would also hash the file's contents.
if has_changes && is_pristine {
if let Ok(text) = self.file_manager.open(path, buffer_id) {
// this is ugly; we don't map buffer_id -> view_id anywhere
// but we know we must have a view.
let view_id = self
.views
.values()
.find(|v| v.borrow().get_buffer_id() == buffer_id)
.map(|v| v.borrow().get_view_id())
.unwrap();
self.make_context(view_id).unwrap().reload(text);
}
}
}
/// Handles a config related file system event.
#[cfg(feature = "notify")]
fn handle_config_fs_event(&mut self, event: DebouncedEvent) {
use self::DebouncedEvent::*;
match event {
Create(ref path) | Write(ref path) | Chmod(ref path) => {
self.load_file_based_config(path)
}
Remove(ref path) if !path.exists() => self.remove_config_at_path(path),
Rename(ref old, ref new) => {
self.remove_config_at_path(old);
self.load_file_based_config(new);
}
_ => (),
}
}
fn remove_config_at_path(&mut self, path: &Path) {
if let Some(domain) = self.config_manager.domain_for_path(path) {
self.set_config(domain, Table::default());
}
}
/// Handles changes in theme files.
#[cfg(feature = "notify")]
fn handle_themes_fs_event(&mut self, event: DebouncedEvent) {
use self::DebouncedEvent::*;
match event {
Create(ref path) | Write(ref path) => self.load_theme_file(path),
// the way FSEvents on macOS work, we want to verify that this path
// has actually be removed before we do anything.
NoticeRemove(ref path) | Remove(ref path) if !path.exists() => self.remove_theme(path),
Rename(ref old, ref new) => {
self.remove_theme(old);
self.load_theme_file(new);
}
Chmod(ref path) | Remove(ref path) => {
self.style_map.borrow_mut().sync_dir(path.parent())
}
_ => (),
}
let theme_names = self.style_map.borrow().get_theme_names();
self.peer.available_themes(theme_names);
}
/// Load a single theme file. Updates if already present.
fn load_theme_file(&mut self, path: &Path) {
let _t = trace_block("CoreState::load_theme_file", &["core"]);
let result = self.style_map.borrow_mut().load_theme_info_from_path(path);
match result {
Ok(theme_name) => {
if theme_name == self.style_map.borrow().get_theme_name() {
if self.style_map.borrow_mut().set_theme(&theme_name).is_ok() {
self.notify_client_and_update_views();
}
}
}
Err(e) => error!("Error loading theme file: {:?}, {:?}", path, e),
}
}
fn remove_theme(&mut self, path: &Path) {
let result = self.style_map.borrow_mut().remove_theme(path);
// Set default theme if the removed theme was the
// current one.
if let Some(theme_name) = result {
if theme_name == self.style_map.borrow().get_theme_name() {
self.do_set_theme(DEFAULT_THEME);
}
}
}
fn toggle_tracing(&self, enabled: bool) {
self.running_plugins.iter().for_each(|plugin| plugin.toggle_tracing(enabled))
}
fn save_trace<P>(&self, path: P, frontend_samples: Value)
where
P: AsRef<Path>,
{
use xi_trace_dump::*;
let mut all_traces = xi_trace::samples_cloned_unsorted();
if let Ok(mut traces) = chrome_trace::decode(frontend_samples) {
all_traces.append(&mut traces);
}
for plugin in &self.running_plugins {
match plugin.collect_trace() {
Ok(json) => {
let mut trace = chrome_trace::decode(json).unwrap();
all_traces.append(&mut trace);
}
Err(e) => error!("trace error {:?}", e),
}
}
all_traces.sort_unstable();
let mut trace_file = match File::create(path.as_ref()) {
Ok(f) => f,
Err(e) => {
error!("error saving trace {:?}", e);
return;
}
};
if let Err(e) = chrome_trace::serialize(&all_traces, &mut trace_file) {
error!("error saving trace {:?}", e);
}
}
}
/// plugin event handling
impl CoreState {
/// Called from a plugin's thread after trying to start the plugin.
pub(crate) fn plugin_connect(&mut self, plugin: Result<Plugin, io::Error>) {
match plugin {
Ok(plugin) => {
let init_info =
self.iter_groups().map(|mut ctx| ctx.plugin_info()).collect::<Vec<_>>();
plugin.initialize(init_info);
self.iter_groups().for_each(|mut cx| cx.plugin_started(&plugin));
self.running_plugins.push(plugin);
}
Err(e) => error!("failed to start plugin {:?}", e),
}
}
pub(crate) fn plugin_exit(&mut self, id: PluginId, error: Result<(), ReadError>) {
warn!("plugin {:?} exited with result {:?}", id, error);
let running_idx = self.running_plugins.iter().position(|p| p.id == id);
if let Some(idx) = running_idx {
let plugin = self.running_plugins.remove(idx);
self.after_stop_plugin(&plugin);
}
}
/// Handles the response to a sync update sent to a plugin.
pub(crate) fn plugin_update(
&mut self,
_plugin_id: PluginId,
view_id: ViewId,
response: Result<Value, xi_rpc::Error>,
) {
if let Some(mut edit_ctx) = self.make_context(view_id) {
edit_ctx.do_plugin_update(response);
}
}
pub(crate) fn plugin_notification(
&mut self,
_ctx: &RpcCtx,
view_id: ViewId,
plugin_id: PluginId,
cmd: PluginNotification,
) {
if let Some(mut edit_ctx) = self.make_context(view_id) {
edit_ctx.do_plugin_cmd(plugin_id, cmd)
}
}
pub(crate) fn plugin_request(
&mut self,
_ctx: &RpcCtx,
view_id: ViewId,
plugin_id: PluginId,
cmd: PluginRequest,
) -> Result<Value, RemoteError> {
if let Some(mut edit_ctx) = self.make_context(view_id) {
Ok(edit_ctx.do_plugin_cmd_sync(plugin_id, cmd))
} else {
Err(RemoteError::custom(404, "missing view", None))
}
}
}
/// test helpers
impl CoreState {
pub fn _test_open_editors(&self) -> Vec<BufferId> {
self.editors.keys().cloned().collect()
}
pub fn _test_open_views(&self) -> Vec<ViewId> {
self.views.keys().cloned().collect()
}
}
pub mod test_helpers {
use super::{BufferId, ViewId};
pub fn new_view_id(id: usize) -> ViewId {
ViewId(id)
}
pub fn new_buffer_id(id: usize) -> BufferId {
BufferId(id)
}
}
/// A multi-view aware iterator over `EventContext`s. A view which appears
/// as a sibling will not appear again as a main view.
pub struct Iter<'a, I> {
views: I,
seen: HashSet<ViewId>,
inner: &'a CoreState,
}
impl<'a, I> Iterator for Iter<'a, I>
where
I: Iterator<Item = &'a ViewId>,
{
type Item = EventContext<'a>;
fn next(&mut self) -> Option<Self::Item> {
let &mut Iter { ref mut views, ref mut seen, ref inner } = self;
loop {
let next_view = match views.next() {
None => return None,
Some(v) if seen.contains(v) => continue,
Some(v) => v,
};
let context = inner.make_context(*next_view).unwrap();
context.siblings.iter().for_each(|sibl| {
let _ = seen.insert(sibl.borrow().get_view_id());
});
return Some(context);
}
}
}
#[derive(Debug, Default)]
pub(crate) struct Counter(Cell<usize>);
impl Counter {
pub(crate) fn next(&self) -> usize {
let n = self.0.get();
self.0.set(n + 1);
n + 1
}
}
// these two only exist so that we can use ViewIds as idle tokens
impl From<usize> for ViewId {
fn from(src: usize) -> ViewId {
ViewId(src)
}
}
impl From<ViewId> for usize {
fn from(src: ViewId) -> usize {
src.0
}
}
impl fmt::Display for ViewId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "view-id-{}", self.0)
}
}
impl Serialize for ViewId {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl<'de> Deserialize<'de> for ViewId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let ord = s.trim_left_matches("view-id-");
match usize::from_str_radix(ord, 10) {
Ok(id) => Ok(ViewId(id)),
Err(_) => Err(de::Error::invalid_value(Unexpected::Str(&s), &"view id")),
}
}
}
impl fmt::Display for BufferId {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "buffer-id-{}", self.0)
}
}
impl BufferId {
pub fn new(val: usize) -> Self {
BufferId(val)
}
}
#[cfg(test)]
mod tests {
use serde::Deserialize;
use super::ViewId;
#[test]
fn test_deserialize_view_id() {
let de = json!("view-id-1");
assert_eq!(ViewId::deserialize(&de).unwrap(), ViewId(1));
let de = json!("not-a-view-id");
assert!(ViewId::deserialize(&de).unwrap_err().is_data());
}
}
| 35.801758 | 100 | 0.580781 |
1d750991a21ed5ed0b758b99507a4f58e58aec54
| 16,404 |
//! Unproven tree types
use super::dht_protocol::FirstDhTupleProverMessage;
use super::proof_tree::ConjectureType;
use super::proof_tree::ProofTree;
use super::proof_tree::ProofTreeConjecture;
use super::proof_tree::ProofTreeKind;
use super::{dlog_protocol::FirstDlogProverMessage, Challenge, FirstProverMessage};
use crate::sigma_protocol::proof_tree::ProofTreeLeaf;
use ergotree_ir::sigma_protocol::sigma_boolean::cand::Cand;
use ergotree_ir::sigma_protocol::sigma_boolean::cor::Cor;
use ergotree_ir::sigma_protocol::sigma_boolean::cthreshold::Cthreshold;
use ergotree_ir::sigma_protocol::sigma_boolean::ProveDhTuple;
use ergotree_ir::sigma_protocol::sigma_boolean::ProveDlog;
use ergotree_ir::sigma_protocol::sigma_boolean::SigmaBoolean;
use ergotree_ir::sigma_protocol::sigma_boolean::SigmaConjectureItems;
use ergotree_ir::sigma_protocol::sigma_boolean::SigmaProofOfKnowledgeTree;
use gf2_192::gf2_192poly::Gf2_192Poly;
use k256::Scalar;
extern crate derive_more;
use derive_more::From;
/// Unproven trees
#[derive(PartialEq, Debug, Clone, From)]
pub(crate) enum UnprovenTree {
UnprovenLeaf(UnprovenLeaf),
UnprovenConjecture(UnprovenConjecture),
}
impl UnprovenTree {
/// Is real or simulated
pub(crate) fn is_real(&self) -> bool {
!self.simulated()
}
pub(crate) fn simulated(&self) -> bool {
match self {
UnprovenTree::UnprovenLeaf(ul) => ul.simulated(),
UnprovenTree::UnprovenConjecture(uc) => uc.simulated(),
}
}
pub(crate) fn with_position(self, updated: NodePosition) -> Self {
match self {
UnprovenTree::UnprovenLeaf(ul) => ul.with_position(updated).into(),
UnprovenTree::UnprovenConjecture(uc) => uc.with_position(updated).into(),
}
}
pub(crate) fn with_challenge(self, challenge: Challenge) -> Self {
match self {
UnprovenTree::UnprovenLeaf(ul) => ul.with_challenge(challenge).into(),
UnprovenTree::UnprovenConjecture(uc) => uc.with_challenge(challenge).into(),
}
}
pub(crate) fn with_simulated(self, simulated: bool) -> Self {
match self {
UnprovenTree::UnprovenLeaf(ul) => ul.with_simulated(simulated).into(),
UnprovenTree::UnprovenConjecture(uc) => uc.with_simulated(simulated).into(),
}
}
pub(crate) fn as_tree_kind(&self) -> ProofTreeKind {
match self {
UnprovenTree::UnprovenLeaf(ul) => ProofTreeKind::Leaf(ul),
UnprovenTree::UnprovenConjecture(uc) => ProofTreeKind::Conjecture(uc),
}
}
pub(crate) fn challenge(&self) -> Option<Challenge> {
match self {
UnprovenTree::UnprovenLeaf(ul) => ul.challenge(),
UnprovenTree::UnprovenConjecture(uc) => uc.challenge(),
}
}
pub(crate) fn position(&self) -> &NodePosition {
match self {
UnprovenTree::UnprovenLeaf(ul) => ul.position(),
UnprovenTree::UnprovenConjecture(uc) => uc.position(),
}
}
}
impl From<UnprovenSchnorr> for UnprovenTree {
fn from(v: UnprovenSchnorr) -> Self {
UnprovenTree::UnprovenLeaf(v.into())
}
}
impl From<CandUnproven> for UnprovenTree {
fn from(v: CandUnproven) -> Self {
UnprovenTree::UnprovenConjecture(v.into())
}
}
impl From<CorUnproven> for UnprovenTree {
fn from(v: CorUnproven) -> Self {
UnprovenTree::UnprovenConjecture(v.into())
}
}
impl From<UnprovenDhTuple> for UnprovenTree {
fn from(v: UnprovenDhTuple) -> Self {
UnprovenTree::UnprovenLeaf(v.into())
}
}
impl From<CthresholdUnproven> for UnprovenTree {
fn from(v: CthresholdUnproven) -> Self {
UnprovenTree::UnprovenConjecture(v.into())
}
}
/// Unproven leaf types
#[derive(PartialEq, Debug, Clone, From)]
pub(crate) enum UnprovenLeaf {
/// Unproven Schnorr
UnprovenSchnorr(UnprovenSchnorr),
UnprovenDhTuple(UnprovenDhTuple),
}
impl UnprovenLeaf {
pub(crate) fn with_position(self, updated: NodePosition) -> Self {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.with_position(updated).into(),
UnprovenLeaf::UnprovenDhTuple(ut) => ut.with_position(updated).into(),
}
}
pub(crate) fn with_challenge(self, challenge: Challenge) -> Self {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.with_challenge(challenge).into(),
UnprovenLeaf::UnprovenDhTuple(ut) => ut.with_challenge(challenge).into(),
}
}
pub(crate) fn with_simulated(self, simulated: bool) -> Self {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.with_simulated(simulated).into(),
UnprovenLeaf::UnprovenDhTuple(ut) => ut.with_simulated(simulated).into(),
}
}
pub(crate) fn is_real(&self) -> bool {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.is_real(),
UnprovenLeaf::UnprovenDhTuple(ut) => ut.is_real(),
}
}
pub(crate) fn challenge(&self) -> Option<Challenge> {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.challenge_opt.clone(),
UnprovenLeaf::UnprovenDhTuple(ut) => ut.challenge_opt.clone(),
}
}
pub(crate) fn position(&self) -> &NodePosition {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => &us.position,
UnprovenLeaf::UnprovenDhTuple(ut) => &ut.position,
}
}
pub(crate) fn simulated(&self) -> bool {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.simulated,
UnprovenLeaf::UnprovenDhTuple(udht) => udht.simulated,
}
}
}
impl ProofTreeLeaf for UnprovenLeaf {
fn proposition(&self) -> SigmaBoolean {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => SigmaBoolean::ProofOfKnowledge(
SigmaProofOfKnowledgeTree::ProveDlog(us.proposition.clone()),
),
UnprovenLeaf::UnprovenDhTuple(udht) => SigmaBoolean::ProofOfKnowledge(
SigmaProofOfKnowledgeTree::ProveDhTuple(udht.proposition.clone()),
),
}
}
fn commitment_opt(&self) -> Option<FirstProverMessage> {
match self {
UnprovenLeaf::UnprovenSchnorr(us) => us.commitment_opt.clone().map(Into::into),
UnprovenLeaf::UnprovenDhTuple(udht) => udht.commitment_opt.clone().map(Into::into),
}
}
}
#[derive(PartialEq, Debug, Clone, From)]
#[allow(clippy::enum_variant_names)]
pub(crate) enum UnprovenConjecture {
CandUnproven(CandUnproven),
CorUnproven(CorUnproven),
CthresholdUnproven(CthresholdUnproven),
}
impl UnprovenConjecture {
pub(crate) fn children(&self) -> SigmaConjectureItems<ProofTree> {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.children.clone(),
UnprovenConjecture::CorUnproven(cor) => cor.children.clone(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.children.clone(),
}
}
pub(crate) fn with_children(self, children: SigmaConjectureItems<ProofTree>) -> Self {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.with_children(children).into(),
UnprovenConjecture::CorUnproven(cor) => cor.with_children(children).into(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.with_children(children).into(),
}
}
pub(crate) fn position(&self) -> &NodePosition {
match self {
UnprovenConjecture::CandUnproven(cand) => &cand.position,
UnprovenConjecture::CorUnproven(cor) => &cor.position,
UnprovenConjecture::CthresholdUnproven(ct) => &ct.position,
}
}
fn challenge(&self) -> Option<Challenge> {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.challenge_opt.clone(),
UnprovenConjecture::CorUnproven(cor) => cor.challenge_opt.clone(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.challenge_opt.clone(),
}
}
fn with_position(self, updated: NodePosition) -> Self {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.with_position(updated).into(),
UnprovenConjecture::CorUnproven(cor) => cor.with_position(updated).into(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.with_position(updated).into(),
}
}
fn with_challenge(self, challenge: Challenge) -> Self {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.with_challenge(challenge).into(),
UnprovenConjecture::CorUnproven(cor) => cor.with_challenge(challenge).into(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.with_challenge(challenge).into(),
}
}
fn with_simulated(self, simulated: bool) -> Self {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.with_simulated(simulated).into(),
UnprovenConjecture::CorUnproven(cor) => cor.with_simulated(simulated).into(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.with_simulated(simulated).into(),
}
}
fn simulated(&self) -> bool {
match self {
UnprovenConjecture::CandUnproven(au) => au.simulated,
UnprovenConjecture::CorUnproven(ou) => ou.simulated,
UnprovenConjecture::CthresholdUnproven(ct) => ct.simulated,
}
}
pub(crate) fn is_real(&self) -> bool {
!self.simulated()
}
}
impl ProofTreeConjecture for UnprovenConjecture {
fn conjecture_type(&self) -> ConjectureType {
match self {
UnprovenConjecture::CandUnproven(_) => ConjectureType::And,
UnprovenConjecture::CorUnproven(_) => ConjectureType::Or,
UnprovenConjecture::CthresholdUnproven(_) => ConjectureType::Threshold,
}
}
fn children(&self) -> SigmaConjectureItems<ProofTree> {
match self {
UnprovenConjecture::CandUnproven(cand) => cand.children.clone(),
UnprovenConjecture::CorUnproven(cor) => cor.children.clone(),
UnprovenConjecture::CthresholdUnproven(ct) => ct.children.clone(),
}
}
}
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct UnprovenSchnorr {
pub(crate) proposition: ProveDlog,
pub(crate) commitment_opt: Option<FirstDlogProverMessage>,
pub(crate) randomness_opt: Option<Scalar>,
pub(crate) challenge_opt: Option<Challenge>,
pub(crate) simulated: bool,
pub(crate) position: NodePosition,
}
impl UnprovenSchnorr {
fn with_position(self, updated: NodePosition) -> Self {
UnprovenSchnorr {
position: updated,
..self
}
}
fn with_challenge(self, challenge: Challenge) -> Self {
UnprovenSchnorr {
challenge_opt: Some(challenge),
..self
}
}
fn with_simulated(self, simulated: bool) -> Self {
UnprovenSchnorr { simulated, ..self }
}
pub(crate) fn is_real(&self) -> bool {
!self.simulated
}
}
/// Unproven DhTuple
#[derive(PartialEq, Debug, Clone)]
pub struct UnprovenDhTuple {
/// Proposition
pub proposition: ProveDhTuple,
/// Commitment
pub commitment_opt: Option<FirstDhTupleProverMessage>,
/// Randomness
pub randomness_opt: Option<Scalar>,
/// Challenge
pub challenge_opt: Option<Challenge>,
/// Simulated or not
pub simulated: bool,
/// Position in tree
pub position: NodePosition,
}
impl UnprovenDhTuple {
fn with_position(self, updated: NodePosition) -> Self {
UnprovenDhTuple {
position: updated,
..self
}
}
fn with_challenge(self, challenge: Challenge) -> Self {
UnprovenDhTuple {
challenge_opt: Some(challenge),
..self
}
}
fn with_simulated(self, simulated: bool) -> Self {
UnprovenDhTuple { simulated, ..self }
}
/// Set Commitment
pub fn with_commitment(self, commitment: FirstDhTupleProverMessage) -> Self {
Self {
commitment_opt: Some(commitment),
..self
}
}
pub(crate) fn is_real(&self) -> bool {
!self.simulated
}
}
/// Data type which encodes position of a node in a tree.
///
/// Position is encoded like following (the example provided is for CTHRESHOLD(2, Seq(pk1, pk2, pk3 && pk4)) :
///
/// r#"
/// 0
/// / | \
/// / | \
/// 0-0 0-1 0-2
/// /|
/// / |
/// / |
/// / |
/// 0-2-0 0-2-1
/// "#;
///
/// So a hint associated with pk1 has a position "0-0", pk4 - "0-2-1" .
///
/// Please note that "0" prefix is for a crypto tree. There are several kinds of trees during evaluation.
/// Initial mixed tree (ergoTree) would have another prefix.
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct NodePosition {
/// positions from root (inclusive) in top-down order
pub positions: Vec<usize>,
}
impl NodePosition {
/// Prefix of crypto tree root
pub fn crypto_tree_prefix() -> Self {
NodePosition { positions: vec![0] }
}
/// Child position
pub fn child(&self, child_idx: usize) -> NodePosition {
let mut positions = self.positions.clone();
positions.push(child_idx);
NodePosition { positions }
}
}
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct CandUnproven {
pub(crate) proposition: Cand,
pub(crate) challenge_opt: Option<Challenge>,
pub(crate) simulated: bool,
pub(crate) children: SigmaConjectureItems<ProofTree>,
pub(crate) position: NodePosition,
}
impl CandUnproven {
pub(crate) fn is_real(&self) -> bool {
!self.simulated
}
fn with_position(self, updated: NodePosition) -> Self {
CandUnproven {
position: updated,
..self
}
}
fn with_challenge(self, challenge: Challenge) -> Self {
CandUnproven {
challenge_opt: Some(challenge),
..self
}
}
fn with_simulated(self, simulated: bool) -> Self {
Self { simulated, ..self }
}
pub(crate) fn with_children(self, children: SigmaConjectureItems<ProofTree>) -> Self {
CandUnproven { children, ..self }
}
}
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct CorUnproven {
pub(crate) proposition: Cor,
pub(crate) challenge_opt: Option<Challenge>,
pub(crate) simulated: bool,
pub(crate) children: SigmaConjectureItems<ProofTree>,
pub(crate) position: NodePosition,
}
impl CorUnproven {
pub(crate) fn is_real(&self) -> bool {
!self.simulated
}
fn with_position(self, updated: NodePosition) -> Self {
Self {
position: updated,
..self
}
}
fn with_challenge(self, challenge: Challenge) -> Self {
Self {
challenge_opt: Some(challenge),
..self
}
}
fn with_simulated(self, simulated: bool) -> Self {
Self { simulated, ..self }
}
pub(crate) fn with_children(self, children: SigmaConjectureItems<ProofTree>) -> Self {
Self { children, ..self }
}
}
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct CthresholdUnproven {
pub(crate) proposition: Cthreshold,
pub(crate) k: u8,
pub(crate) children: SigmaConjectureItems<ProofTree>,
pub(crate) polinomial_opt: Option<Gf2_192Poly>,
pub(crate) challenge_opt: Option<Challenge>,
pub(crate) simulated: bool,
pub(crate) position: NodePosition,
}
impl CthresholdUnproven {
pub(crate) fn with_children(self, children: SigmaConjectureItems<ProofTree>) -> Self {
Self { children, ..self }
}
pub(crate) fn with_polynomial(self, q: Gf2_192Poly) -> Self {
Self {
polinomial_opt: Some(q),
..self
}
}
fn with_position(self, updated: NodePosition) -> Self {
Self {
position: updated,
..self
}
}
fn with_challenge(self, challenge: Challenge) -> Self {
Self {
challenge_opt: Some(challenge),
..self
}
}
fn with_simulated(self, simulated: bool) -> Self {
Self { simulated, ..self }
}
pub(crate) fn is_real(&self) -> bool {
!self.simulated
}
}
| 30.719101 | 110 | 0.624116 |
e92d73622e2875e0dff26c692c957f980562da59
| 594 |
// https://leetcode-cn.com/problems/range-module/
struct RangeModule {}
/**
* Your RangeModule object will be instantiated and called as such:
* let obj = RangeModule::new();
* obj.add_range(left, right);
* let ret_2: bool = obj.query_range(left, right);
* obj.remove_range(left, right);
*/
impl RangeModule {
fn new() -> Self {
todo!()
}
fn add_range(&self, left: i32, right: i32) {
todo!()
}
fn query_range(&self, left: i32, right: i32) -> bool {
todo!()
}
fn remove_range(&self, left: i32, right: i32) {
todo!()
}
}
| 20.482759 | 67 | 0.584175 |
1de9a7798d11c5ef60422f105c39195d587eb7b5
| 961 |
// This file is part of security-keys-rust. It is subject to the license terms in the COPYRIGHT file found in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/security-keys-rust/master/COPYRIGHT. No part of security-keys-rust, including this file, may be copied, modified, propagated, or distributed except according to the terms contained in the COPYRIGHT file.
// Copyright © 2021 The developers of security-keys-rust. See the COPYRIGHT file in the top-level directory of this distribution and at https://raw.githubusercontent.com/lemonrock/security-keys-rust/master/COPYRIGHT.
#[derive(Debug)]
struct ItemStateTable
{
globals: Rc<GlobalItems>,
locals: Stack<ParsingLocalItems>,
}
impl TryClone for ItemStateTable
{
#[inline(always)]
fn try_clone(&self) -> Result<Self, TryReserveError>
{
Ok
(
Self
{
globals: self.globals.clone(),
locals: self.locals.try_clone()?,
}
)
}
}
| 33.137931 | 409 | 0.75026 |
6974e6dca67f500b646c2d7797a45b1a113d999d
| 324 |
use ansi_term::Colour;
/*****************************************
*****Print nice looking text(nice io)*****
*****************************************/
pub fn red(text: String) {
println!("{}", Colour::Red.bold().paint(text));
}
pub fn green(text: String) {
println!("{}", Colour::Green.bold().paint(text))
}
| 27 | 52 | 0.438272 |
23a71e932d9cdc318c50f31ebca4053f30c7a2cd
| 14,700 |
use super::pbrt::{Float, is_power_of_2, round_up_pow2_i32, mod_t, log2_int_i32};
use super::stats_accumulator::StatsAccumulator;
use super::profiler::Profiler;
use super::geometry::{Point2i, Point2f, Vector2f};
use super::spectrum::{RGBSpectrum, SampledSpectrum};
use super::memory::BlockedArray;
use super::texture::lanczos;
use std::mem::size_of;
use std::ops::*;
use parking_lot::Once;
use rayon::prelude::*;
use num::Float as NumFloat;
#[derive(Debug, Copy, Clone, PartialOrd, PartialEq)]
pub enum ImageWrap {
Repeat,
Black,
Clamp
}
#[derive(Debug, Default, Copy, Clone)]
pub struct ResampleWeight {
pub first_texel: u32,
pub weight: [Float; 4]
}
const WEIGHT_LUT_SIZE: usize = 128;
static mut WEIGHT_LUT: [Float; WEIGHT_LUT_SIZE] = [0.0; WEIGHT_LUT_SIZE];
pub struct MIPMap<T> {
do_trilinear: bool,
max_anisotropy: Float,
wrap_mode: ImageWrap,
resolution: Point2i,
pyramid: Vec<BlockedArray<T>>,
}
impl<T> MIPMap<T>
where
T: Default +
Clone +
Copy +
Default +
AddAssign +
Clamp +
Lerp +
Mul<Float, Output=T> +
Div<Float, Output=T> +
Add<Output=T> +
Send +
Sync +
num::Zero
{
pub fn new(
mut resolution: Point2i,
img: &[T],
do_trilinear: bool,
max_anisotropy: Float,
wrap_mode: ImageWrap
) -> MIPMap<T> {
let _profile = Profiler::instance().profile("MIP map generation");
let mut mipmap = MIPMap::<T>{
do_trilinear,
max_anisotropy,
wrap_mode,
resolution,
pyramid: Vec::new()
};
let mut resampled_image: Vec<T> = Vec::new();
if !is_power_of_2(resolution.x) || !is_power_of_2(resolution.y) {
// Resample image to power-of-two resolution
let res_pow2 = Point2i::new(round_up_pow2_i32(resolution.x), round_up_pow2_i32(resolution.y));
info!("Resampling MIPMap from {} to {}. Ration= {}", resolution, res_pow2,
((res_pow2.x * res_pow2.y) as Float / (resolution.x * resolution.y) as Float));
// Resample image in $s$ direction
let s_weights = MIPMap::resample_weights(resolution.x, res_pow2.x);
resampled_image.resize((res_pow2.x * res_pow2.y) as usize, Default::default());
// Apply _sWeights_ to zoom in $s$ direction
(0..resolution.y as usize).into_par_iter()//.chunks(16)
.for_each(|t| {
for s in 0..res_pow2.x as usize {
// Compute texel $(s,t)$ in $s$-zoomed image
// resampled_image is already defaulted
for j in 0..4 {
let mut orig_s = s_weights[s].first_texel + j as u32;
if wrap_mode == ImageWrap::Repeat {
orig_s = mod_t(orig_s, resolution.x as u32);
} else if wrap_mode == ImageWrap::Clamp {
orig_s = num::clamp(orig_s, 0, resolution.x as u32 - 1);
}
if orig_s >= 0 && orig_s < resolution.x as u32 {
resampled_image[t * res_pow2.x as usize + s] +=
img[t * resolution.x as usize + orig_s as usize] *
s_weights[s].weight[j];
}
}
}
});
// Resample image in $t$ direction
let t_weights = MIPMap::resample_weights(resolution.y, res_pow2.y);
(0..res_pow2.x as usize).into_par_iter().for_each(|s| {
let mut work_data: Vec<T> = vec![Default::default(); res_pow2.y as usize];
for t in 0..res_pow2.y as usize {
for j in 0..4 {
let offset = t_weights[t].first_texel as usize + j;
if wrap_mode == ImageWrap::Repeat {
offset = mod_t(offset, resolution.y as usize);
} else if wrap_mode == ImageWrap::Clamp {
offset = num::clamp(offset, 0, resolution.y as usize - 1);
}
if offset >= 0 && offset < resolution.y as usize {
work_data[t] += resampled_image[offset * res_pow2.x as usize + s] *
t_weights[t].weight[j];
}
}
}
for t in 0..res_pow2.y as usize {
resampled_image[t * res_pow2.x as usize + s] = Clamp::clamp(work_data[t], 0.0, Float::infinity());
}
});
resolution = res_pow2;
}
// Initialize levels of MIPMap from image
let n_levels = 1 + log2_int_i32(resolution.x.max(resolution.y)) as usize;
mipmap.pyramid.reserve(n_levels);
// Initialize most detailed level of MIPMap
mipmap.pyramid.push(BlockedArray::new_from(
resolution.x as usize,
resolution.y as usize,
if !resampled_image.is_empty() { &resampled_image } else { img }
));
for i in 1..n_levels {
// Initialize $i$th MIPMap level from $i-1$st level
let s_res = 1.max(mipmap.pyramid[i - 1].u_size() / 2);
let t_res = 1.max(mipmap.pyramid[i - 1].v_size() / 2);
let mut ba = BlockedArray::<T>::new(s_res, t_res);
// Filter four texels from finer level of pyramid
(0..t_res).into_par_iter().for_each(|t| {
for s in 0..s_res {
ba[(s,t)] = (*mipmap.texel(i - 1, 2 * s, 2 * t) +
*mipmap.texel(i - 1, 2 * s + 1, 2 * t) +
*mipmap.texel(i - 1, 2 * s, 2 * t + 1) +
*mipmap.texel(i - 1, 2 * s + 1, 2 * t + 1)) * 0.25 as Float
}
});
mipmap.pyramid.push(ba);
}
// Initialize EWA filter weights if needed
static ONCE: Once = Once::new();
unsafe{
ONCE.call_once(|| {
for i in 0..WEIGHT_LUT_SIZE {
let alpha: Float = 2.0;
let r2 = i as Float / (WEIGHT_LUT_SIZE - 1) as Float;
WEIGHT_LUT[i] = (-alpha * r2).exp() - (-alpha).exp();
}
});
}
StatsAccumulator::instance().report_memory_counter(String::from("Memory/Texture MIP maps"), ((4 * mipmap.resolution[0] * mipmap.resolution[1] * size_of::<T>() as i32) / 3) as i64);
mipmap
}
pub fn texel(&self, level: usize, mut s: usize, mut t: usize) -> &T {
assert!(level < self.pyramid.len());
let l = &self.pyramid[level];
// Compute texel $(s,t)$ accounting for boundary conditions
match self.wrap_mode {
ImageWrap::Repeat => {
s = mod_t(s, l.u_size());
t = mod_t(t, l.v_size());
}
ImageWrap::Clamp => {
s = num::clamp(s, 0, l.u_size() - 1);
t = num::clamp(t, 0, l.v_size() - 1);
}
ImageWrap::Black => {
if s < 0 || s >= l.u_size() || t < 0 || t >= l.v_size() {
return &num::Zero::zero();
}
}
}
&l[(s,t)]
}
fn resample_weights(old_res: i32, new_res: i32) -> Vec<ResampleWeight> {
assert!(new_res >= old_res);
let mut wt: Vec<ResampleWeight> = Vec::with_capacity(new_res as usize);
let filter_width: Float = 2.0;
for i in 0..new_res as usize {
// Compute image resampling weights for _i_th texel
let center = (i as Float + 0.5) * old_res as Float / new_res as Float;
wt[i].first_texel = (center - filter_width + 0.5).floor() as u32;
for j in 0..4 {
let pos = wt[i].first_texel as Float + j as Float + 0.5;
wt[i].weight[j] = lanczos((pos - center) / filter_width, 2.0);
}
// Normalize filter weights for texel resampling
let inv_sum_wts = 1.0 / (wt[i].weight[0] + wt[i].weight[1] +
wt[i].weight[2] + wt[i].weight[3]) as Float;
for j in 0..4 {
wt[i].weight[j] *= inv_sum_wts;
}
}
wt
}
pub fn width(&self) ->i32 {
self.resolution.x
}
pub fn height(&self) -> i32 {
self.resolution.y
}
pub fn levels(&self) -> usize {
self.pyramid.len()
}
/// default width is 0.0
pub fn lookup(&self, st: &Point2f, width: Float) -> T {
StatsAccumulator::instance().report_counter(String::from("Texture/Trilinear lookups"), 1);
let _p = Profiler::instance().profile("MIPMap::lookup() (trilinear)");
// Compute MIPMap level for trilinear filtering
let level = self.levels() as Float - 1.0 + width.max(1e-8).log2();
if level < 0.0 {
self.triangle(0, st)
} else if level >= self.levels() as Float - 1.0 {
*self.texel(self.levels() - 1, 0, 0)
} else {
let i_level = level.floor() as usize;
let delta = level - i_level as Float;
self.triangle(i_level, st).lerp(&self.triangle(i_level + 1, st), delta)
}
}
pub fn lookup_vec(&self, st: &Point2f, mut dstdx: Vector2f, mut dstdy: Vector2f) -> T {
if self.do_trilinear {
let width = dstdx.x.abs().max(dstdx.y.abs())
.max(dstdy.y.abs().max(dstdy.y.abs()));
return self.lookup(st, 2.0 * width);
}
StatsAccumulator::instance().report_counter(String::from("Texture/EWA lookups"), 1);
let _p = Profiler::instance().profile("MIPMap::lookup() (EWA)");
// Compute ellipse minor and major axes
if dstdx.length_squared() < dstdy.length_squared() {
std::mem::swap(&mut dstdx, &mut dstdy);
}
let major_length = dstdx.length();
let mut minor_length = dstdy.length();
// Clamp ellipse eccentricity if too large
if minor_length * self.max_anisotropy < major_length && minor_length > 0.0 {
let scale = major_length / (minor_length * self.max_anisotropy);
dstdy *= scale;
minor_length *= scale;
}
if minor_length == 0.0 {
return self.triangle(0, st);
}
// Choose level of detail for EWA lookup and perform EWA filtering
let lod = (self.levels() as Float - 1.0 + minor_length.log2()).max(0.0);
let ilod = lod.floor() as usize;
self.ewa(ilod, *st, dstdx, dstdy).lerp(&self.ewa(ilod + 1, *st, dstdx, dstdy), lod - ilod as Float)
}
fn triangle(&self, mut level: usize, st: &Point2f) -> T {
level = num::clamp(level, 0, self.levels() - 1);
let s = st.x * self.pyramid[level].u_size() as Float - 0.5;
let t = st.y * self.pyramid[level].v_size() as Float - 0.5;
let s0 = s.floor();
let t0 = t.floor();
let ds = s - s0;
let dt = t - t0;
*self.texel(level, s0 as usize, t0 as usize) * (1.0 - ds) * (1.0 - dt) +
*self.texel(level, s0 as usize, t0 as usize + 1) * (1.0 - ds) * dt +
*self.texel(level, s0 as usize + 1, t0 as usize) * ds * (1.0 - dt) +
*self.texel(level, s0 as usize + 1, t0 as usize + 1) * ds * dt
}
fn ewa(
&self,
level: usize,
mut st: Point2f,
mut dst0: Vector2f,
mut dst1: Vector2f
) -> T {
if level >= self.levels() {
return *self.texel(self.levels() - 1, 0, 0);
}
// Convert EWA coordinates to appropriate scale for level
st.x = st.x * self.pyramid[level].u_size() as Float - 0.5;
st.y = st.y * self.pyramid[level].v_size() as Float - 0.5;
dst0.x *= self.pyramid[level].u_size() as Float;
dst0.y *= self.pyramid[level].v_size() as Float;
dst1.x *= self.pyramid[level].u_size() as Float;
dst1.y *= self.pyramid[level].v_size() as Float;
// Compute ellipse coefficients to bound EWA filter region
let mut a = dst0.y * dst0.y + dst1.y * dst1.y + 1.0;
let mut b = -2.0 * (dst0.x * dst0.y + dst1.x * dst1.y);
let mut c = dst0.x * dst0.x + dst1.x * dst1.x + 1.0;
let inv_f = 1.0 / (a * c - b * b * 0.25);
a *= inv_f;
b *= inv_f;
c *= inv_f;
// Compute the ellipse's $(s,t)$ bounding box in texture space
let det = -b * b + 4.0 * a * c;
let inv_det = 1.0 / det;
let u_sqrt = (det * c).sqrt();
let v_sqrt = (a * det).sqrt();
let s0 = (st.x - 2.0 * inv_det * u_sqrt).ceil() as usize;
let s1 = (st.x + 2.0 * inv_det * u_sqrt).floor() as usize;
let t0 = (st.y - 2.0 * inv_det * v_sqrt).ceil() as usize;
let t1 = (st.y + 2.0 * inv_det * v_sqrt).floor() as usize;
// Scan over ellipse bound and compute quadratic equation
let mut sum: T = num::Zero::zero();
let mut sum_wts: Float = 0.0;
for it in t0..=t1 {
let tt = it as Float - st.y;
for is in s0..=s1 {
let ss = is as Float - st.x;
// Compute squared radius and filter texel if inside ellipse
let r2 = a * ss * ss + b * ss * tt + c * tt * tt;
if r2 < 1.0 {
let index = ((r2 * WEIGHT_LUT_SIZE as Float) as usize).min(WEIGHT_LUT_SIZE - 1);
let weight = WEIGHT_LUT[index];
sum += *self.texel(level, is, it);
sum_wts += weight;
}
}
}
sum / sum_wts
}
}
pub trait Clamp {
fn clamp(self, min: Float, max: Float) -> Self;
}
impl Clamp for Float {
fn clamp(self, min: Float, max: Float) -> Self {
num::clamp(self, min, max)
}
}
impl Clamp for SampledSpectrum {
fn clamp(self, min: Float, max: Float) -> Self {
self.clamp(min, max)
}
}
impl Clamp for RGBSpectrum {
fn clamp(self, min: Float, max: Float) -> Self {
self.clamp(min, max)
}
}
pub trait Lerp {
fn lerp(&self, o: &Self, t: Float) -> Self;
}
impl Lerp for Float {
fn lerp(&self, o: &Self, t: Float) -> Self {
super::pbrt::lerp(t, *self, *o)
}
}
impl Lerp for SampledSpectrum {
fn lerp(&self, o: &Self, t: Float) -> Self {
self.lerp(o, t)
}
}
impl Lerp for RGBSpectrum {
fn lerp(&self, o: &Self, t: Float) -> Self {
self.lerp(o, t)
}
}
| 37.692308 | 188 | 0.510544 |
22c5f8ff2f787b7f63013cff4bfbdfa16558738a
| 763 |
use crate::{BinOp, BinaryExpression, Expression, UnaryExpression, UnaryOp};
use codespan::{ByteIndex, ByteSpan};
pub(crate) fn s(start: usize, end: usize) -> ByteSpan {
ByteSpan::new(ByteIndex(start as u32), ByteIndex(end as u32))
}
pub(crate) fn bop<L, R>(l: L, r: R, op: BinOp, span: ByteSpan) -> Expression
where
L: Into<Expression>,
R: Into<Expression>,
{
let expr = BinaryExpression {
left: Box::new(l.into()),
right: Box::new(r.into()),
op,
span,
};
Expression::Binary(expr)
}
pub(crate) fn unop<E>(expr: E, op: UnaryOp, span: ByteSpan) -> Expression
where
E: Into<Expression>,
{
Expression::Unary(UnaryExpression {
value: Box::new(expr.into()),
op,
span,
})
}
| 23.121212 | 76 | 0.605505 |
c1cf51252893483af831256ffa8090391348183a
| 23,532 |
//
//
// This file is a part of Aleph
//
// https://github.com/nathanvoglsam/aleph
//
// MIT License
//
// Copyright (c) 2020 Aleph Engine
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
use crate::world::{
ComponentIdMap, ComponentRegistry, ComponentSource, ComponentTypeDescription, ComponentTypeId,
EntityId, EntityLayout, EntityLayoutBuf,
};
use std::num::NonZeroU32;
use std::ptr::NonNull;
use virtual_buffer::VirtualVec;
///
/// This index wrapper represents an index into the list of archetypes within a world.
///
/// This is used to better document the purpose of various indexes that would've otherwise been
/// plain `u32` fields.
///
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct ArchetypeIndex(pub NonZeroU32);
///
/// This index wrapper represents an index into an archetype's component storage.
///
/// This is used to better document the purpose of various indexes that would've otherwise been
/// plain `u32` fields.
///
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct ArchetypeEntityIndex(pub NonZeroU32);
// TODO: State system based on linked list described here: https://ajmmertens.medium.com/why-storing-state-machines-in-ecs-is-a-bad-idea-742de7a18e59
///
/// The data structure that stores the components for a given entity. An archetype provides SoA
/// storage for all entities of the same "shape" (have the same components).
///
/// # Implementation Details
///
/// The storage implementation is very straight forward. An `Archetype` consists of an array of
/// buffers and a map that maps the `ComponentId` to the buffer that stores data for the matching
/// component type. There is also another buffer that stores the `EntityId` that the entity in the
/// given slot was allocated to so you can map from entity slot back to the ID (needed for
/// implementing component/entity removal and providing the ID of the entity in the slot when
/// iterating).
///
/// All buffers are stored type-erased as raw `u8` buffers. Size and alignment is checked on
/// construction to prevent unaligned access.
///
/// The components are stored using virtual memory allocations. When constructing an archetype a
/// maximum capacity is provided as the maximum number of entities that can be stored. This capacity
/// is then used to reserve enough address space to store the maximum number of entities and their
/// components. Virtual memory is only committed when the space is needed to store entities meaning
/// memory is only consumed for real entities and not the entire reserved range.
///
/// Using virtual memory like this has two important benefits:
///
/// Unlike `Vec<u8>` we do not copy the data when "growing" the buffer. A `Vec<u8>` will have to
/// allocate a new region of memory and copy the old data across to grow the buffer. By using a
/// virtual memory allocation the data never moves when growing, we simply commit more of the
/// address space to grow the buffer. The data never needs to be copied because it never needs to
/// move.
///
/// This also has the benefit of making pointers into the component storage stable. The backing
/// buffers will never move. This allows for holding onto pointers into the component buffers, and
/// they will never be invalidated by the buffer growing. This is still wildly unsafe but can be
/// a useful assumption for optimization purposes.
///
pub struct Archetype {
/// The entity layout of this archetype
entity_layout: EntityLayoutBuf,
/// A hash table that maps a component's id to the storage index. The storage index is used to
/// index into the `component_descriptions` and `storages` fields.
storage_indices: ComponentIdMap<usize>,
/// A list of the description of each component type in the entity layout, indexed by the
/// storage index
component_descriptions: Vec<ComponentTypeDescription>,
/// A list of all the storages of each component type in the entity layout, indexed by the
/// storage index
storages: Vec<VirtualVec<u8>>,
/// A list that maps an entity's index in the archetype storage back to the ID it was allocated
/// with.
///
/// Typically used by iterators that yield an `EntityID` alongside the components.
entity_ids: VirtualVec<EntityId>,
/// The maximum number of entities that can be stored in this archetype
capacity: u32,
/// The number of entities currently stored in this archetype
len: u32,
}
impl Archetype {
/// Returns a reference to the entity layout this archetype stores entities for
#[inline(always)]
pub fn entity_layout(&self) -> &EntityLayout {
&self.entity_layout
}
/// Returns the maximum number of entities that can be stored in this archetype
#[inline(always)]
pub fn capacity(&self) -> u32 {
self.capacity
}
/// Returns the current number of entities that can be stored in this archetype
#[inline(always)]
pub fn len(&self) -> u32 {
self.len
}
}
/// Internal implementations
impl Archetype {
pub(crate) fn new(capacity: u32, layout: &EntityLayout, registry: &ComponentRegistry) -> Self {
// Add 1 so there's space for the always empty 0th element
let storage_capacity = capacity.checked_add(1).unwrap();
// Produce the index map from the layout
let storage_indices: ComponentIdMap<usize> =
layout.iter().enumerate().map(|v| (v.1, v.0)).collect();
// Lookup the list of descriptions in the registry
let component_descriptions: Vec<ComponentTypeDescription> = layout
.iter()
.map(|v| {
registry
.lookup(v)
.expect("Tried to create an archetype with an unregistered component type")
})
.cloned()
.collect();
// Create a virtual memory reservation for each component's storage
let storages = component_descriptions
.iter()
.map(|v| {
let mut buffer = VirtualVec::new(v.type_size * storage_capacity as usize)
.expect("Failed to reserve address space for components");
// Pre-fill the first slot with zeroes, it will never be accessed
buffer.resize(v.type_size, 0);
buffer
})
.collect();
// Create the buffer for mapping entity indices back to an entity id
let mut entity_ids = VirtualVec::new(storage_capacity as usize)
.expect("Failed to reserve address space for entity id list");
entity_ids.push(EntityId::null());
Self {
entity_layout: layout.to_owned(),
storage_indices,
component_descriptions,
storages,
entity_ids,
capacity,
len: 0,
}
}
/// This function allocates spaces for `count` entities. This does not handle writing the
/// entity components into the storages, this must be done separately.
///
/// The function returns the base index where the first newly allocated entity can be found.
/// All new entities will be contiguous.
#[inline]
pub(crate) fn allocate_entities(&mut self, count: u32) -> ArchetypeEntityIndex {
// The base will be the index of the first slot after the end of the densely packed section
//
// We need to offset by 1 because the 1st slot in memory is skipped to allow using a 0 index
// as a niche value.
let base = NonZeroU32::new(self.len.checked_add(1).unwrap()).unwrap();
let new_size = self.len.checked_add(count).unwrap();
if new_size > self.capacity {
panic!(
"Adding {} entities would overflow capacity \"{}\"",
count, self.capacity
);
}
for (desc, storage) in self
.component_descriptions
.iter()
.zip(self.storages.iter_mut())
{
let bytes = count as usize * desc.type_size;
storage.resize(storage.len() + bytes, 0);
}
self.entity_ids
.resize(count as usize + self.entity_ids.len(), EntityId::null());
self.len = new_size;
ArchetypeEntityIndex(base)
}
/// This function handles writing data from a generic component source into the correct buffers
/// for each component type. It starts the copy from the given `base` entity slot.
///
/// # Warning
///
/// This will not perform any checks to ensure existing data isn't overwritten, this is
/// effectively a wrapper around `memcpy`. It will, however, ensure that data is not written or
/// read out of bounds.
#[inline]
pub(crate) fn copy_from_source<T: ComponentSource>(
&mut self,
base: ArchetypeEntityIndex,
source: T,
) {
// Copy the component data into the archetype buffers
for (i, comp) in source.entity_layout().iter().enumerate() {
let source = source.data_for(comp);
// Get the size of the type we're copying from the buffers
let type_size = self.component_descriptions[i].type_size;
// Calculate the base index for where to start copying into the buffer
let base = base.0.get() as usize;
let base = base * type_size;
// Calculate the end of the region to copy into
let end = base + source.len();
// Get the target slice to copy into
let target = self.storages[i].as_slice_mut();
let target = &mut target[base..end];
// Perform the actual copy
target.copy_from_slice(source);
}
}
/// This function will write the provided `data` into the storage for the given `component_type`
/// at the given `slot` within the storage.
#[inline]
pub(crate) fn copy_component_data_into_slot(
&mut self,
slot: ArchetypeEntityIndex,
component_type: ComponentTypeId,
data: &[u8],
) {
// Get the index of the type inside the archetype and lookup the size of the type
let type_index = self.storage_indices.get(&component_type).copied().unwrap();
let type_size = self.component_descriptions[type_index].type_size;
// Get the bounds of the component's data
let dest_base = slot.0.get() as usize;
let dest_base = dest_base * type_size;
let dest_end = dest_base + type_size;
// Create the slice to copy into, no dropping is needed as the data is uninitialized
let dest_buffer = self.storages[type_index].as_slice_mut();
let dest_buffer = &mut dest_buffer[dest_base..dest_end];
// Perform the actual copy
dest_buffer.copy_from_slice(data);
}
#[inline]
pub(crate) unsafe fn drop_component_in_slot(
&mut self,
slot: ArchetypeEntityIndex,
component_type: ComponentTypeId,
) {
let type_index = self.storage_indices.get(&component_type).copied().unwrap();
let type_size = self.component_descriptions[type_index].type_size;
let drop_fn = self.component_descriptions[type_index].fn_drop;
if let Some(drop_fn) = drop_fn {
let base = slot.0.get() as usize;
let base = base * type_size;
let end = base + type_size;
let slice = self.storages[type_index].as_slice_mut();
let slice = &mut slice[base..end];
drop_fn(slice.as_mut_ptr());
}
}
#[inline]
pub(crate) fn get_component_ptr(
&self,
slot: ArchetypeEntityIndex,
component_type: ComponentTypeId,
) -> Option<NonNull<u8>> {
// Lookup the storage index, load the size of the type and get the storage pointer
let storage_index = self.storage_indices.get(&component_type).copied()?;
let type_size = self.component_descriptions[storage_index].type_size;
let storage = self.storages[storage_index].as_slice();
// Get the bounds of the component's data
let base = slot.0.get() as usize;
let base = base * type_size;
let end = base + type_size;
// Get a pointer to the position in the buffer the component can be found
let slice = &storage[base..end];
let ptr = slice.as_ptr();
NonNull::new(ptr as *mut u8)
}
/// Remove the entity at the given index.
///
/// The const parameter chooses whether to call the drop function or not.
///
/// Will return an optional `EntityId`. If an ID is yielded it means we had to move an entity
/// within the archetype to perform the removal and keep the entities packed.
///
/// If this function returns a value then the user (i.e. [`World`]) must update the
/// `EntityLocation` field for that ID to prevent the ID from becoming a dangling reference
/// (unsafe).
#[inline]
pub(crate) fn remove_entity<const DROP: bool>(
&mut self,
index: ArchetypeEntityIndex,
) -> Option<EntityId> {
// swap-remove the ID from the dense ID array
//
// Checks if we're popping from the end of th array. If we have to remove from the interior
// of the dense list then we will need to move the ID at the end into the empty space. The
// entity storage in the World will need to be updated to respect the entity being moved
// inside the archetype
//
// This returns the entity that needs to be updated and whether an update is needed
self.entity_ids.swap_remove(index.0.get() as usize);
let out_index = if index.0.get() as usize == self.entity_ids.len() {
None
} else {
Some(self.entity_ids[index.0.get() as usize])
};
for i in 0..self.storages.len() {
self.swap_and_pop_for_storage::<DROP>(i, index);
}
self.len -= 1;
out_index
}
/// Swap and pop the component in `storage_index` at `index`
///
/// The const parameter chooses whether to call the drop function or not
///
/// # Info
///
/// DO NOT FORGET TO MANUALLY DECREMENT self.len
#[inline]
pub(crate) fn swap_and_pop_for_storage<const DROP: bool>(
&mut self,
storage_index: usize,
index: ArchetypeEntityIndex,
) {
let index = index.0.get() as usize;
let last_index = self.len as usize;
if index == last_index {
// Swap and pop at the end of the storage just decays to a regular pop operation.
self.pop_for_storage::<DROP>(storage_index);
} else {
let storage = &mut self.storages[storage_index];
let desc = &self.component_descriptions[storage_index];
let remove_offset = index * desc.type_size;
let last_offset = last_index * desc.type_size;
let (remove, last) = storage.split_at_mut(last_offset);
let remove = &mut remove[remove_offset..];
remove.swap_with_slice(last);
// Pop off the end, which destroys the element we wanted to remove
self.pop_for_storage::<DROP>(storage_index);
}
}
/// The const parameter chooses whether to call the drop function or not
///
/// # Info
///
/// DO NOT FORGET TO MANUALLY DECREMENT `self.len`
#[inline]
pub(crate) fn pop_for_storage<const DROP: bool>(&mut self, storage_index: usize) {
if self.len != 0 {
let storage = &mut self.storages[storage_index];
let desc = &self.component_descriptions[storage_index];
if DROP {
if let Some(fn_drop) = desc.fn_drop {
let last_index = (self.len - 1) as usize;
let last_ptr = &mut storage[last_index * desc.type_size] as *mut u8;
// SAFETY: This handles calling the drop function for a component through a raw
// pointer. The signature is type erased so the interface is unsafe.
//
// This is just a type-erased call to `drop::<T>()` where T is the type of
// the component. The `Archetype` data structure's safe interface ensures
// the drop function is only called with valid data.
//
// UB can be triggered if `fn_drop` is not implemented correctly, but this
// is impossible from safe code as the implementation for each component is
// auto generated from a generic implementation. The function can only be
// incorrect by providing an incorrect ComponentTypeDescription using an
// unsafe function.
unsafe {
fn_drop(last_ptr);
}
}
}
}
}
#[inline]
pub(crate) fn copy_from_archetype(
&mut self,
target: ArchetypeEntityIndex,
source: &Archetype,
) -> ArchetypeEntityIndex {
// Allocate a new slot in self to copy the component from the other archetype into
let new_index = self.allocate_entities(1);
// Copy the entity ID slot across
self.entity_ids[new_index.0.get() as usize] = source.entity_ids[target.0.get() as usize];
for (source_index, source_id) in self.entity_layout.iter().enumerate() {
// Get the size of the component to copy
let type_size = self.component_descriptions[source_index].type_size;
// Get the bounds of the data to copy
let source_base = target.0.get() as usize;
let source_base = source_base * type_size;
let source_end = source_base + type_size;
// Create a slice of the data to copy, exiting the loop if the component is not present
// in the source archetype
let source_buffer =
if let Some(source_index) = source.storage_indices.get(&source_id).copied() {
source.storages[source_index].as_slice()
} else {
continue;
};
let source_buffer = &source_buffer[source_base..source_end];
// Get the bounds of the memory to copy the data to
let dest_base = new_index.0.get() as usize;
let dest_base = dest_base * type_size;
let dest_end = dest_base + type_size;
// Create a slice of the destination to copy into
let dest_buffer = self.storages[source_index].as_slice_mut();
let dest_buffer = &mut dest_buffer[dest_base..dest_end];
// Perform the actual copy
dest_buffer.copy_from_slice(source_buffer);
}
new_index
}
/// Writes the entity ID into the ID list at the given slot.
///
/// Used for initializing the ID when entities are inserted.
#[inline(always)]
pub(crate) fn update_entity_id(&mut self, slot: ArchetypeEntityIndex, id: EntityId) {
self.entity_ids[slot.0.get() as usize] = id;
}
/// Returns the starting address for the entity id list so it can be used by query iterators
#[inline(always)]
pub(crate) fn entity_id_base_ptr(&self) -> NonNull<EntityId> {
unsafe {
let ptr = self.entity_ids.as_ptr() as *mut EntityId;
let ptr = ptr.add(1);
let ptr = NonNull::new(ptr).unwrap();
ptr
}
}
}
impl Drop for Archetype {
fn drop(&mut self) {
// Iterate over every component storage and call the drop function on all components
for (index, storage) in self.storages.iter_mut().enumerate() {
// Lookup the size and drop fn so we can iterate over the components in the storage
let type_size = self.component_descriptions[index].type_size;
let drop_fn = self.component_descriptions[index].fn_drop;
// Only need to iterate if the drop function is actually defined
if let Some(drop_fn) = drop_fn {
// SAFETY: This just iterates over each item in the storage while type erased, which
// is a sound operation. The drop function will never be invalid to call if
// there is no unsafe code interfacing with the world.
unsafe {
let mut current = storage.as_mut_ptr().add(type_size);
for _ in 0..self.len {
drop_fn(current);
current = current.add(type_size);
}
}
}
}
}
}
pub unsafe extern "C" fn archetype_get_component_descriptions(
archetype: NonNull<Archetype>,
out_len: &mut usize,
) -> NonNull<ComponentTypeDescription> {
let descriptions = &archetype.as_ref().component_descriptions;
let ptr = descriptions.as_ptr() as *mut ComponentTypeDescription;
let ptr = NonNull::new_unchecked(ptr);
*out_len = descriptions.len();
ptr
}
pub unsafe extern "C" fn archetype_get_component_index(
archetype: NonNull<Archetype>,
component: ComponentTypeId,
out_index: &mut usize,
) -> u32 {
if let Some(index) = archetype.as_ref().storage_indices.get(&component).copied() {
*out_index = index;
1
} else {
0
}
}
pub unsafe extern "C" fn archetype_get_storage_by_index(
mut archetype: NonNull<Archetype>,
index: usize,
) -> NonNull<u8> {
let storage = archetype.as_mut().storages[index].as_slice_mut();
NonNull::new_unchecked(storage.as_mut_ptr())
}
pub unsafe extern "C" fn archetype_get_entity_layout(
archetype: NonNull<Archetype>,
out_len: &mut usize,
) -> NonNull<ComponentTypeId> {
let layout = archetype.as_ref().entity_layout().as_inner();
let ptr = layout.as_ptr() as *mut ComponentTypeId;
let ptr = NonNull::new_unchecked(ptr);
*out_len = layout.len();
ptr
}
/// `Archetype::len`
pub unsafe extern "C" fn archetype_get_len(archetype: NonNull<Archetype>) -> u32 {
archetype.as_ref().len()
}
/// `Archetype::len`
pub unsafe extern "C" fn archetype_get_capacity(archetype: NonNull<Archetype>) -> u32 {
archetype.as_ref().capacity()
}
| 39.22 | 149 | 0.634328 |
e96b1ecbb0e49b79cb763c413a137ac991afc316
| 1,371 |
use std::cmp::Ordering;
#[derive(Debug, PartialEq)]
pub enum Comparison {
Equal,
Sublist,
Superlist,
Unequal,
}
pub fn sublist<T: PartialEq>(first_list: &[T], second_list: &[T]) -> Comparison {
match first_list.len().cmp(&(second_list.len())) {
Ordering::Equal => Comparison::equal(compare(first_list, second_list)),
Ordering::Less => Comparison::sublist(slice_compare(first_list, second_list)),
_ => Comparison::superlist(slice_compare(second_list, first_list)),
}
}
fn compare<T: PartialEq>(first_list: &[T], second_list: &[T]) -> bool {
first_list.iter().zip(second_list).all(|(a, b)| a == b)
}
fn slice_compare<T: PartialEq>(small: &[T], large: &[T]) -> bool {
small.len() == 0 || large.windows(small.len()).any(|win| compare(small, win))
}
impl Comparison {
fn compare(validation: bool, true_value: Comparison) -> Comparison {
if validation {
true_value
} else {
Comparison::Unequal
}
}
pub fn equal(validation: bool) -> Comparison {
Comparison::compare(validation, Comparison::Equal)
}
pub fn sublist(validation: bool) -> Comparison {
Comparison::compare(validation, Comparison::Sublist)
}
pub fn superlist(validation: bool) -> Comparison {
Comparison::compare(validation, Comparison::Superlist)
}
}
| 30.466667 | 86 | 0.632385 |
fe39a348804a16bd3ef186c878816a1075899db9
| 436 |
use serde::{Deserialize, Deserializer, Serializer};
pub fn to_hex<S>(vec: &Vec<u8>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let hex = hex::encode(vec);
serializer.serialize_str(&hex)
}
pub fn from_hex<'de, D>(deserialiser: D) -> Result<Vec<u8>, D::Error>
where
D: Deserializer<'de>,
{
let buf = String::deserialize(deserialiser)?;
hex::decode(buf).map_err(serde::de::Error::custom)
}
| 22.947368 | 73 | 0.65367 |
8af47a2ecd5138f6d93037fbdadb5d162d1c7d60
| 12,329 |
// Copyright 2020 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use {
crate::cache::BlobNetworkTimeouts,
fidl_fuchsia_pkg_ext::BlobId,
fuchsia_inspect::{
IntProperty, Node, NumericProperty as _, Property as _, StringProperty, UintProperty,
},
fuchsia_zircon as zx,
std::marker::PhantomData,
};
fn now_monotonic_nanos() -> i64 {
zx::Time::get_monotonic().into_nanos()
}
/// Creates Inspect wrappers for individual blob fetches.
pub struct BlobFetcher {
node: Node,
}
impl BlobFetcher {
/// Create a `BlobFetcher` from an Inspect node.
pub fn from_node_and_timeouts(node: Node, timeouts: &BlobNetworkTimeouts) -> Self {
node.record_uint("blob_header_timeout_seconds", timeouts.header().as_secs());
node.record_uint("blob_body_timeout_seconds", timeouts.body().as_secs());
Self { node }
}
/// Create an Inspect wrapper for an individual blob fetch.
pub fn fetch(&self, id: &BlobId) -> Fetch {
let node = self.node.create_child(id.to_string());
node.record_int("fetch_ts", now_monotonic_nanos());
Fetch { node }
}
}
/// A blob fetch that the pkg-resolver has begun processing.
pub struct Fetch {
node: Node,
}
impl Fetch {
/// Mark that the blob contents will be obtained via http.
pub fn http(self) -> FetchHttp {
self.node.record_string("source", "http");
FetchHttp { node: self.node }
}
/// Mark that the blob contents will be obtained via fuchsia.pkg/LocalMirror.
pub fn local_mirror(self) -> FetchState<LocalMirror> {
self.node.record_string("source", "local-mirror");
let state = self.node.create_string("state", "initial");
let state_ts = self.node.create_int("state_ts", now_monotonic_nanos());
let bytes_written = self.node.create_uint("bytes_written", 0);
let attempts = self.node.create_uint("attempts", 0);
FetchState::<LocalMirror> {
state,
state_ts,
bytes_written,
attempts,
_node: self.node,
_phantom: PhantomData,
}
}
}
/// A blob fetch being downloaded via http.
pub struct FetchHttp {
node: Node,
}
impl FetchHttp {
/// Annotate the fetch with the mirror url.
pub fn mirror(self, mirror: &str) -> FetchState<Http> {
self.node.record_string("mirror", mirror);
let state = self.node.create_string("state", "initial");
let state_ts = self.node.create_int("state_ts", now_monotonic_nanos());
let bytes_written = self.node.create_uint("bytes_written", 0);
let attempts = self.node.create_uint("attempts", 0);
FetchState::<Http> {
state,
state_ts,
bytes_written,
attempts,
_node: self.node,
_phantom: PhantomData,
}
}
}
/// Sub-states for an http fetch.
pub enum Http {
CreateBlob,
DownloadBlob,
CloseBlob,
HttpGet,
TruncateBlob,
ReadHttpBody,
WriteBlob,
WriteComplete,
}
/// Sub-states for a fuchsia.pkg/LocalMirror fetch.
pub enum LocalMirror {
CreateBlob,
GetBlob,
TruncateBlob,
ReadBlob,
WriteBlob,
CloseBlob,
}
/// A sub-state for a fetch. The stringification will be exported via Inspect.
pub trait State {
fn as_str(&self) -> &'static str;
}
impl State for Http {
fn as_str(&self) -> &'static str {
match self {
Http::CreateBlob => "create blob",
Http::DownloadBlob => "download blob",
Http::CloseBlob => "close blob",
Http::HttpGet => "http get",
Http::TruncateBlob => "truncate blob",
Http::ReadHttpBody => "read http body",
Http::WriteBlob => "write blob",
Http::WriteComplete => "write complete",
}
}
}
impl State for LocalMirror {
fn as_str(&self) -> &'static str {
match self {
LocalMirror::CreateBlob => "create blob",
LocalMirror::GetBlob => "get blob",
LocalMirror::TruncateBlob => "truncate blob",
LocalMirror::ReadBlob => "read blob",
LocalMirror::WriteBlob => "write blob",
LocalMirror::CloseBlob => "close blob",
}
}
}
/// The terminal type of the fetch Inspect wrappers. This ends the use of move semantics to enforce
/// type transitions because at this point in cache.rs the type is being passed into and out of
/// functions and captured by FnMut.
pub struct FetchState<S: State> {
state: StringProperty,
state_ts: IntProperty,
bytes_written: UintProperty,
attempts: UintProperty,
_node: Node,
_phantom: std::marker::PhantomData<S>,
}
impl<S: State> FetchState<S> {
/// Increase the attempt account.
pub fn attempt(&self) {
self.attempts.add(1);
}
/// Mark that `bytes` more bytes of the blob have been written to blobfs.
pub fn write_bytes(&self, bytes: usize) -> &Self {
self.bytes_written.add(bytes as u64);
self
}
/// Change the sub-state of this fetch.
pub fn state(&self, state: S) {
self.state.set(state.as_str());
self.state_ts.set(now_monotonic_nanos());
}
}
/// The terminal type of an http fetch.
pub type FetchStateHttp = FetchState<Http>;
/// The terminal type of a fuchsia.pkg/LocalMirror.
pub type FetchStateLocal = FetchState<LocalMirror>;
#[cfg(test)]
mod tests {
use {
super::*,
fuchsia_inspect::{assert_inspect_tree, testing::AnyProperty, Inspector},
};
const ZEROES_HASH: &'static str =
"0000000000000000000000000000000000000000000000000000000000000000";
const ONES_HASH: &'static str =
"1111111111111111111111111111111111111111111111111111111111111111";
impl BlobFetcher {
fn from_node(node: Node) -> Self {
Self { node }
}
}
#[test]
fn http_state_progression() {
let inspector = Inspector::new();
let blob_fetcher = BlobFetcher::from_node(inspector.root().create_child("blob_fetcher"));
let inspect = blob_fetcher.fetch(&BlobId::parse(ZEROES_HASH).unwrap());
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
}
}
}
);
let inspect = inspect.http();
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
source: "http",
}
}
}
);
let inspect = inspect.mirror("fake-mirror");
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
source: "http",
mirror: "fake-mirror",
state: "initial",
state_ts: AnyProperty,
bytes_written: 0u64,
attempts: 0u64,
}
}
}
);
inspect.state(Http::CreateBlob);
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
source: "http",
mirror: "fake-mirror",
state: "create blob",
state_ts: AnyProperty,
bytes_written: 0u64,
attempts: 0u64,
}
}
}
);
}
#[test]
fn local_mirror_state_progression() {
let inspector = Inspector::new();
let blob_fetcher = BlobFetcher::from_node(inspector.root().create_child("blob_fetcher"));
let inspect = blob_fetcher.fetch(&BlobId::parse(ZEROES_HASH).unwrap());
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
}
}
}
);
let inspect = inspect.local_mirror();
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
source: "local-mirror",
state: "initial",
state_ts: AnyProperty,
bytes_written: 0u64,
attempts: 0u64,
}
}
}
);
inspect.state(LocalMirror::CreateBlob);
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => {
fetch_ts: AnyProperty,
source: "local-mirror",
state: "create blob",
state_ts: AnyProperty,
bytes_written: 0u64,
attempts: 0u64,
}
}
}
);
}
#[test]
fn state_does_not_change_bytes_written_or_attempts() {
let inspector = Inspector::new();
let blob_fetcher = BlobFetcher::from_node(inspector.root().create_child("blob_fetcher"));
let inspect = blob_fetcher.fetch(&BlobId::parse(ZEROES_HASH).unwrap()).local_mirror();
inspect.attempt();
inspect.write_bytes(6);
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => contains {
state: "initial",
bytes_written: 6u64,
attempts: 1u64,
}
}
}
);
inspect.state(LocalMirror::TruncateBlob);
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => contains {
state: "truncate blob",
bytes_written: 6u64,
attempts: 1u64,
}
}
}
);
}
#[test]
fn write_bytes_is_cumulative() {
let inspector = Inspector::new();
let blob_fetcher = BlobFetcher::from_node(inspector.root().create_child("blob_fetcher"));
let inspect = blob_fetcher.fetch(&BlobId::parse(ZEROES_HASH).unwrap()).local_mirror();
inspect.write_bytes(7);
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => contains {
bytes_written: 7u64,
}
}
}
);
inspect.write_bytes(8);
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: contains {
ZEROES_HASH.to_string() => contains {
bytes_written: 15u64,
}
}
}
);
}
#[test]
fn multiple_fetches() {
let inspector = Inspector::new();
let blob_fetcher = BlobFetcher::from_node(inspector.root().create_child("blob_fetcher"));
let _inspect0 = blob_fetcher.fetch(&BlobId::parse(ZEROES_HASH).unwrap());
let _inspect1 = blob_fetcher.fetch(&BlobId::parse(ONES_HASH).unwrap());
assert_inspect_tree!(
inspector,
root: {
blob_fetcher: {
ZEROES_HASH.to_string() => contains {},
ONES_HASH.to_string() => contains {},
}
}
);
}
}
| 29.924757 | 99 | 0.523887 |
61814b3ced901316dc6f29890fe45e651f0c2bf8
| 3,799 |
use hir::Semantics;
use ide_db::{
base_db::{CrateId, FileID, FilePosition},
RootDatabase,
};
use itertools::Itertools;
use syntax::{
algo::find_node_at_offset,
ast::{self, AstNode},
};
use crate::NavigationTarget;
// Feature: Parent Module
//
// Navigates to the parent module of the current module.
//
// |===
// | Editor | Action Name
//
// | VS Code | **Rust Analyzer: Locate parent module**
// |===
//
// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]
/// This returns `Vec` because a module may be included from several places.
pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id);
let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
// If cursor is literally on `mod foo`, go to the grandpa.
if let Some(m) = &module {
if !m
.item_list()
.map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset))
{
cov_mark::hit!(test_resolve_parent_module_on_module_decl);
module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast);
}
}
match module {
Some(module) => sema
.to_def(&module)
.into_iter()
.map(|module| NavigationTarget::from_module_to_decl(db, module))
.collect(),
None => sema
.to_module_defs(position.file_id)
.map(|module| NavigationTarget::from_module_to_decl(db, module))
.collect(),
}
}
/// Returns `Vec` for the same reason as `parent_module`
pub(crate) fn crate_for(db: &RootDatabase, file_id: FileID) -> Vec<CrateId> {
let sema = Semantics::new(db);
sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect()
}
#[cfg(test)]
mod tests {
use ide_db::base_db::FileRange;
use crate::fixture;
fn check(ra_fixture: &str) {
let (analysis, position, expected) = fixture::annotations(ra_fixture);
let navs = analysis.parent_module(position).unwrap();
let navs = navs
.iter()
.map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
.collect::<Vec<_>>();
assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::<Vec<_>>(), navs);
}
#[test]
fn test_resolve_parent_module() {
check(
r#"
//- /lib.rs
mod foo;
//^^^
//- /foo.rs
$0// empty
"#,
);
}
#[test]
fn test_resolve_parent_module_on_module_decl() {
cov_mark::check!(test_resolve_parent_module_on_module_decl);
check(
r#"
//- /lib.rs
mod foo;
//^^^
//- /foo.rs
mod $0bar;
//- /foo/bar.rs
// empty
"#,
);
}
#[test]
fn test_resolve_parent_module_for_inline() {
check(
r#"
//- /lib.rs
mod foo {
mod bar {
mod baz { $0 }
} //^^^
}
"#,
);
}
#[test]
fn test_resolve_multi_parent_module() {
check(
r#"
//- /main.rs
mod foo;
//^^^
#[path = "foo.rs"]
mod bar;
//^^^
//- /foo.rs
$0
"#,
);
}
#[test]
fn test_resolve_crate_root() {
let (analysis, file_id) = fixture::file(
r#"
//- /foo.rs
$0
//- /main.rs
mod foo;
"#,
);
assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1);
}
#[test]
fn test_resolve_multi_parent_crate() {
let (analysis, file_id) = fixture::file(
r#"
//- /baz.rs
$0
//- /foo.rs crate:foo
mod baz;
//- /bar.rs crate:bar
mod baz;
"#,
);
assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2);
}
}
| 22.613095 | 113 | 0.572782 |
61456a7f6b4e0c9da08407f2ce9f5ee07efafef7
| 5,024 |
use super::super::{EResult, Emulator};
const REGISTERS: [char; 8] = ['b', 'c', 'd', 'e', 'h', 'l', 'm', 'a'];
impl Emulator {
pub fn jmp_not(&mut self, flag: &str) -> EResult<()> {
if !self.reg.get_flag(flag) {
self.pc = self.read_addr()?;
} else {
self.pc += 2;
}
Ok(())
}
pub fn jmp_if(&mut self, flag: &str) -> EResult<()> {
if self.reg.get_flag(flag) {
self.pc = self.read_addr()?;
} else {
self.pc += 2;
}
Ok(())
}
pub fn call_not(&mut self, flag: &str) -> EResult<()> {
if !self.reg.get_flag(flag) {
self.call_imm()?;
} else {
self.pc += 2;
}
Ok(())
}
pub fn call_if(&mut self, flag: &str) -> EResult<()> {
if self.reg.get_flag(flag) {
self.call_imm()?;
} else {
self.pc += 2;
}
Ok(())
}
pub fn call_imm(&mut self) -> EResult<()> {
let adr = self.read_addr()?;
self.push(self.pc)?;
self.pc = adr;
Ok(())
}
pub fn call(&mut self, adr: u16) -> EResult<()> {
self.push(self.pc)?;
self.pc = adr;
Ok(())
}
pub fn ret_if(&mut self, flag: &str) -> EResult<()> {
if self.reg.get_flag(flag) {
self.ret()?;
}
Ok(())
}
pub fn ret_not(&mut self, flag: &str) -> EResult<()> {
if !self.reg.get_flag(flag) {
self.ret()?;
}
Ok(())
}
pub fn ret(&mut self) -> EResult<()> {
self.pc = self.pop()?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn call_ret() {
let mut e = Emulator::new();
e.sp = 0x3fff;
e.ram[0x1234] = 0xc9;
e.call(0x1234).expect("Fuck");
assert_eq!(e.sp, 0x3fff - 2);
assert_eq!(e.pc, 0x1234);
e.execute_next().expect("Fuck");
assert_eq!(e.pc, 0x0);
}
#[test]
fn jmp_if() {
let mut e = Emulator::new();
e.ram.load_vec(vec![0x04, 0x00, 0x00, 0x00], 0);
// Performs
// a) one failing jmp (-> pc = 2)
// b) one succeeding jmp (pc = ram[pc] = ram[2] -> 0)
// c) Back in starting position
// -> Repeat for each flag
for flag in vec!["zero", "carry", "sign", "parity", "aux"] {
e.jmp_if(flag).expect("");
assert_eq!(e.pc, 2);
e.reg.set_flag(flag, true);
e.jmp_if(flag).expect("");
assert_eq!(e.pc, 0);
}
}
#[test]
fn jmp_not() {
let mut e = Emulator::new();
e.ram.load_vec(vec![0x04, 0x00, 0x00, 0x00], 0);
e.reg.set_flags(0xff);
// same as tests::jmp_if
for flag in vec!["zero", "carry", "sign", "parity", "aux"] {
e.jmp_not(flag).expect("");
assert_eq!(e.pc, 2);
e.reg.flip_flag(flag);
e.jmp_not(flag).expect("");
assert_eq!(e.pc, 0);
}
}
#[test]
fn call_if() {
let mut e = Emulator::new();
e.sp = 0x3fff;
e.ram.load_vec(vec![0x00, 0x00, 0x11, 0x11], 0);
for flag in vec!["zero", "carry", "sign", "parity", "aux"] {
e.call_if(flag).expect("");
assert_eq!(e.pc, 2);
e.ret_if(flag).expect("");
assert_eq!(e.pc, 2);
e.reg.set_flag(flag, true);
e.call_if(flag).expect("");
assert_eq!(e.pc, 0x1111);
e.ret_if(flag).expect("");
assert_eq!(e.pc, 4);
e.pc = 0;
}
}
#[test]
fn call_not() {
let mut e = Emulator::new();
e.sp = 0x3fff;
e.ram.load_vec(vec![0x00, 0x00, 0x11, 0x11], 0);
e.reg.set_flags(0xff);
for flag in vec!["zero", "carry", "sign", "parity", "aux"] {
e.call_not(flag).expect("");
assert_eq!(e.pc, 2);
e.ret_not(flag).expect("");
assert_eq!(e.pc, 2);
e.reg.flip_flag(flag);
e.call_not(flag).expect("");
assert_eq!(e.pc, 0x1111);
e.ret_not(flag).expect("");
assert_eq!(e.pc, 4);
e.pc = 0;
}
}
#[test]
fn call() {
let mut e = Emulator::new();
e.sp = 0x3fff;
assert_eq!(e.pc, 0x0);
e.call_if("carry").expect("Fuck");
assert_eq!(e.pc, 0x2);
e.reg.set_flag("carry", true);
e.ram.load_vec(vec![0x34, 0x12], 2);
e.call_if("carry").expect("Fuck");
assert_eq!(e.pc, 0x1234);
}
#[test]
fn rst() {
let mut e = Emulator::new();
e.pc = 0x1111;
e.sp = 0x3fff;
e.ram
.load_vec(vec![0xc7, 0xcf, 0xd7, 0xdf, 0xe7, 0xef, 0xf7, 0xff], e.pc);
for i in 0x1111..0x1119 {
e.pc = i as u16;
e.execute_next().expect("Fuck");
assert_eq!(e.pc, (i - 0x1111) * 8);
}
}
}
| 24.270531 | 82 | 0.440287 |
d9eebc3693bdaad197781e0ca80e76d744ad07a8
| 61,563 |
#![doc = "generated by AutoRust"]
#![allow(unused_mut)]
#![allow(unused_variables)]
#![allow(unused_imports)]
#![allow(clippy::redundant_clone)]
use super::models;
#[derive(Clone)]
pub struct Client {
endpoint: String,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
pipeline: azure_core::Pipeline,
}
#[derive(Clone)]
pub struct ClientBuilder {
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
endpoint: Option<String>,
scopes: Option<Vec<String>>,
}
pub const DEFAULT_ENDPOINT: &str = azure_core::resource_manager_endpoint::AZURE_PUBLIC_CLOUD;
impl ClientBuilder {
pub fn new(credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>) -> Self {
Self {
credential,
endpoint: None,
scopes: None,
}
}
pub fn endpoint(mut self, endpoint: impl Into<String>) -> Self {
self.endpoint = Some(endpoint.into());
self
}
pub fn scopes(mut self, scopes: &[&str]) -> Self {
self.scopes = Some(scopes.iter().map(|scope| (*scope).to_owned()).collect());
self
}
pub fn build(self) -> Client {
let endpoint = self.endpoint.unwrap_or_else(|| DEFAULT_ENDPOINT.to_owned());
let scopes = self.scopes.unwrap_or_else(|| vec![format!("{}/", endpoint)]);
Client::new(endpoint, self.credential, scopes)
}
}
impl Client {
pub(crate) fn endpoint(&self) -> &str {
self.endpoint.as_str()
}
pub(crate) fn token_credential(&self) -> &dyn azure_core::auth::TokenCredential {
self.credential.as_ref()
}
pub(crate) fn scopes(&self) -> Vec<&str> {
self.scopes.iter().map(String::as_str).collect()
}
pub(crate) async fn send(&self, request: impl Into<azure_core::Request>) -> azure_core::error::Result<azure_core::Response> {
let mut context = azure_core::Context::default();
let mut request = request.into();
self.pipeline.send(&mut context, &mut request).await
}
pub fn new(
endpoint: impl Into<String>,
credential: std::sync::Arc<dyn azure_core::auth::TokenCredential>,
scopes: Vec<String>,
) -> Self {
let endpoint = endpoint.into();
let pipeline = azure_core::Pipeline::new(
option_env!("CARGO_PKG_NAME"),
option_env!("CARGO_PKG_VERSION"),
azure_core::ClientOptions::default(),
Vec::new(),
Vec::new(),
);
Self {
endpoint,
credential,
scopes,
pipeline,
}
}
pub fn management_associations(&self) -> management_associations::Client {
management_associations::Client(self.clone())
}
pub fn management_configurations(&self) -> management_configurations::Client {
management_configurations::Client(self.clone())
}
pub fn operations(&self) -> operations::Client {
operations::Client(self.clone())
}
pub fn solutions(&self) -> solutions::Client {
solutions::Client(self.clone())
}
}
pub mod solutions {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Retrieve solution."]
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
solution_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
solution_name: solution_name.into(),
}
}
#[doc = "Create/Update Solution."]
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
solution_name: impl Into<String>,
parameters: impl Into<models::Solution>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
solution_name: solution_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Patch a Solution."]
pub fn update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
solution_name: impl Into<String>,
parameters: impl Into<models::SolutionPatch>,
) -> update::Builder {
update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
solution_name: solution_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Deletes the solution"]
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
solution_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
solution_name: solution_name.into(),
}
}
#[doc = "Retrieves the solution list for the subscription"]
pub fn list_by_resource_group(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
) -> list_by_resource_group::Builder {
list_by_resource_group::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
}
}
#[doc = "Retrieves the solution list for the subscription"]
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::Solution;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) solution_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/solutions/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.solution_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::Solution = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::Solution;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) solution_name: String,
pub(crate) parameters: models::Solution,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/solutions/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.solution_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::CREATED => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::Solution = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::Solution;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) solution_name: String,
pub(crate) parameters: models::SolutionPatch,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/solutions/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.solution_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PATCH);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::Solution = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
type Response = ();
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) solution_name: String,
}
impl Builder {
#[doc = "only the first response will be fetched as long running operations are not supported yet"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/solutions/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.solution_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_by_resource_group {
use super::models;
use azure_core::error::ResultExt;
type Response = models::SolutionPropertiesList;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/solutions",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::SolutionPropertiesList = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod list_by_subscription {
use super::models;
use azure_core::error::ResultExt;
type Response = models::SolutionPropertiesList;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.OperationsManagement/solutions",
this.client.endpoint(),
&this.subscription_id
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::SolutionPropertiesList = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod management_associations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Retrieves the ManagementAssociations list for the subscription"]
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Retrieve ManagementAssociation."]
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
provider_name: impl Into<String>,
resource_type: impl Into<String>,
resource_name: impl Into<String>,
management_association_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
provider_name: provider_name.into(),
resource_type: resource_type.into(),
resource_name: resource_name.into(),
management_association_name: management_association_name.into(),
}
}
#[doc = "Create/Update ManagementAssociation."]
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
provider_name: impl Into<String>,
resource_type: impl Into<String>,
resource_name: impl Into<String>,
management_association_name: impl Into<String>,
parameters: impl Into<models::ManagementAssociation>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
provider_name: provider_name.into(),
resource_type: resource_type.into(),
resource_name: resource_name.into(),
management_association_name: management_association_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Deletes the ManagementAssociation"]
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
provider_name: impl Into<String>,
resource_type: impl Into<String>,
resource_name: impl Into<String>,
management_association_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
provider_name: provider_name.into(),
resource_type: resource_type.into(),
resource_name: resource_name.into(),
management_association_name: management_association_name.into(),
}
}
}
pub mod list_by_subscription {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ManagementAssociationPropertiesList;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.OperationsManagement/ManagementAssociations",
this.client.endpoint(),
&this.subscription_id
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ManagementAssociationPropertiesList = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ManagementAssociation;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) provider_name: String,
pub(crate) resource_type: String,
pub(crate) resource_name: String,
pub(crate) management_association_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/providers/Microsoft.OperationsManagement/ManagementAssociations/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . provider_name , & this . resource_type , & this . resource_name , & this . management_association_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ManagementAssociation = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ManagementAssociation;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) provider_name: String,
pub(crate) resource_type: String,
pub(crate) resource_name: String,
pub(crate) management_association_name: String,
pub(crate) parameters: models::ManagementAssociation,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/providers/Microsoft.OperationsManagement/ManagementAssociations/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . provider_name , & this . resource_type , & this . resource_name , & this . management_association_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ManagementAssociation = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
type Response = ();
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) provider_name: String,
pub(crate) resource_type: String,
pub(crate) resource_name: String,
pub(crate) management_association_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = & format ! ("{}/subscriptions/{}/resourcegroups/{}/providers/{}/{}/{}/providers/Microsoft.OperationsManagement/ManagementAssociations/{}" , this . client . endpoint () , & this . subscription_id , & this . resource_group_name , & this . provider_name , & this . resource_type , & this . resource_name , & this . management_association_name) ;
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod management_configurations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
#[doc = "Retrieves the ManagementConfigurations list for the subscription"]
pub fn list_by_subscription(&self, subscription_id: impl Into<String>) -> list_by_subscription::Builder {
list_by_subscription::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
}
}
#[doc = "Retrieve ManagementConfiguration."]
pub fn get(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
management_configuration_name: impl Into<String>,
) -> get::Builder {
get::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
management_configuration_name: management_configuration_name.into(),
}
}
#[doc = "Create/Update ManagementConfiguration."]
pub fn create_or_update(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
management_configuration_name: impl Into<String>,
parameters: impl Into<models::ManagementConfiguration>,
) -> create_or_update::Builder {
create_or_update::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
management_configuration_name: management_configuration_name.into(),
parameters: parameters.into(),
}
}
#[doc = "Deletes the ManagementConfiguration"]
pub fn delete(
&self,
subscription_id: impl Into<String>,
resource_group_name: impl Into<String>,
management_configuration_name: impl Into<String>,
) -> delete::Builder {
delete::Builder {
client: self.0.clone(),
subscription_id: subscription_id.into(),
resource_group_name: resource_group_name.into(),
management_configuration_name: management_configuration_name.into(),
}
}
}
pub mod list_by_subscription {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ManagementConfigurationPropertiesList;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/providers/Microsoft.OperationsManagement/ManagementConfigurations",
this.client.endpoint(),
&this.subscription_id
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ManagementConfigurationPropertiesList = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod get {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ManagementConfiguration;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) management_configuration_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/ManagementConfigurations/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.management_configuration_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ManagementConfiguration = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod create_or_update {
use super::models;
use azure_core::error::ResultExt;
type Response = models::ManagementConfiguration;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) management_configuration_name: String,
pub(crate) parameters: models::ManagementConfiguration,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/ManagementConfigurations/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.management_configuration_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::PUT);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
req_builder = req_builder.header("content-type", "application/json");
let req_body = azure_core::to_json(&this.parameters)?;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::ManagementConfiguration = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
pub mod delete {
use super::models;
use azure_core::error::ResultExt;
type Response = ();
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
pub(crate) subscription_id: String,
pub(crate) resource_group_name: String,
pub(crate) management_configuration_name: String,
}
impl Builder {
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!(
"{}/subscriptions/{}/resourcegroups/{}/providers/Microsoft.OperationsManagement/ManagementConfigurations/{}",
this.client.endpoint(),
&this.subscription_id,
&this.resource_group_name,
&this.management_configuration_name
);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::DELETE);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => Ok(()),
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
pub mod operations {
use super::models;
pub struct Client(pub(crate) super::Client);
impl Client {
pub fn list(&self) -> list::Builder {
list::Builder { client: self.0.clone() }
}
}
pub mod list {
use super::models;
use azure_core::error::ResultExt;
type Response = models::OperationListResult;
#[derive(Clone)]
pub struct Builder {
pub(crate) client: super::super::Client,
}
impl Builder {
#[doc = "only the first response will be fetched as the continuation token is not part of the response schema"]
pub fn into_future(self) -> futures::future::BoxFuture<'static, azure_core::error::Result<Response>> {
Box::pin({
let this = self.clone();
async move {
let url_str = &format!("{}/providers/Microsoft.OperationsManagement/operations", this.client.endpoint(),);
let mut url = url::Url::parse(url_str).context(azure_core::error::ErrorKind::DataConversion, "parse url")?;
let mut req_builder = http::request::Builder::new();
req_builder = req_builder.method(http::Method::GET);
let credential = this.client.token_credential();
let token_response = credential
.get_token(&this.client.scopes().join(" "))
.await
.context(azure_core::error::ErrorKind::Other, "get bearer token")?;
req_builder = req_builder.header(http::header::AUTHORIZATION, format!("Bearer {}", token_response.token.secret()));
url.query_pairs_mut().append_pair("api-version", "2015-11-01-preview");
let req_body = azure_core::EMPTY_BODY;
req_builder = req_builder.uri(url.as_str());
let req = req_builder
.body(req_body)
.context(azure_core::error::ErrorKind::Other, "build request")?;
let rsp = this
.client
.send(req)
.await
.context(azure_core::error::ErrorKind::Io, "execute request")?;
let (rsp_status, rsp_headers, rsp_stream) = rsp.deconstruct();
match rsp_status {
http::StatusCode::OK => {
let rsp_body = azure_core::collect_pinned_stream(rsp_stream).await?;
let rsp_value: models::OperationListResult = serde_json::from_slice(&rsp_body)?;
Ok(rsp_value)
}
status_code => Err(azure_core::error::Error::from(azure_core::error::ErrorKind::HttpResponse {
status: status_code.as_u16(),
error_code: None,
})),
}
}
})
}
}
}
}
| 51.951899 | 380 | 0.493511 |
fb1ad12d914fc9ec18d03f7ff9746e0f11f1e183
| 14,976 |
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Serialized configuration of a build.
//!
//! This module implements parsing `config.mk` and `config.toml` configuration
//! files to tweak how the build runs.
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::prelude::*;
use std::path::PathBuf;
use std::process;
use num_cpus;
use rustc_serialize::Decodable;
use toml::{Parser, Decoder, Value};
/// Global configuration for the entire build and/or bootstrap.
///
/// This structure is derived from a combination of both `config.toml` and
/// `config.mk`. As of the time of this writing it's unlikely that `config.toml`
/// is used all that much, so this is primarily filled out by `config.mk` which
/// is generated from `./configure`.
///
/// Note that this structure is not decoded directly into, but rather it is
/// filled out from the decoded forms of the structs below. For documentation
/// each field, see the corresponding fields in
/// `src/bootstrap/config.toml.example`.
#[derive(Default)]
pub struct Config {
pub ccache: bool,
pub ninja: bool,
pub verbose: bool,
pub submodules: bool,
pub compiler_docs: bool,
pub docs: bool,
pub target_config: HashMap<String, Target>,
// llvm codegen options
pub llvm_assertions: bool,
pub llvm_optimize: bool,
pub llvm_version_check: bool,
pub llvm_static_stdcpp: bool,
// rust codegen options
pub rust_optimize: bool,
pub rust_codegen_units: u32,
pub rust_debug_assertions: bool,
pub rust_debuginfo: bool,
pub rust_rpath: bool,
pub rustc_default_linker: Option<String>,
pub rustc_default_ar: Option<String>,
pub rust_optimize_tests: bool,
pub rust_debuginfo_tests: bool,
pub build: String,
pub host: Vec<String>,
pub target: Vec<String>,
pub rustc: Option<String>,
pub cargo: Option<String>,
pub local_rebuild: bool,
// libstd features
pub debug_jemalloc: bool,
pub use_jemalloc: bool,
// misc
pub channel: String,
pub musl_root: Option<PathBuf>,
pub prefix: Option<String>,
}
/// Per-target configuration stored in the global configuration structure.
#[derive(Default)]
pub struct Target {
pub llvm_config: Option<PathBuf>,
pub jemalloc: Option<PathBuf>,
pub cc: Option<PathBuf>,
pub cxx: Option<PathBuf>,
pub ndk: Option<PathBuf>,
}
/// Structure of the `config.toml` file that configuration is read from.
///
/// This structure uses `Decodable` to automatically decode a TOML configuration
/// file into this format, and then this is traversed and written into the above
/// `Config` structure.
#[derive(RustcDecodable, Default)]
struct TomlConfig {
build: Option<Build>,
llvm: Option<Llvm>,
rust: Option<Rust>,
target: Option<HashMap<String, TomlTarget>>,
}
/// TOML representation of various global build decisions.
#[derive(RustcDecodable, Default, Clone)]
struct Build {
build: Option<String>,
host: Vec<String>,
target: Vec<String>,
cargo: Option<String>,
rustc: Option<String>,
compiler_docs: Option<bool>,
docs: Option<bool>,
}
/// TOML representation of how the LLVM build is configured.
#[derive(RustcDecodable, Default)]
struct Llvm {
ccache: Option<bool>,
ninja: Option<bool>,
assertions: Option<bool>,
optimize: Option<bool>,
version_check: Option<bool>,
static_libstdcpp: Option<bool>,
}
/// TOML representation of how the Rust build is configured.
#[derive(RustcDecodable, Default)]
struct Rust {
optimize: Option<bool>,
codegen_units: Option<u32>,
debug_assertions: Option<bool>,
debuginfo: Option<bool>,
debug_jemalloc: Option<bool>,
use_jemalloc: Option<bool>,
default_linker: Option<String>,
default_ar: Option<String>,
channel: Option<String>,
musl_root: Option<String>,
rpath: Option<bool>,
optimize_tests: Option<bool>,
debuginfo_tests: Option<bool>,
}
/// TOML representation of how each build target is configured.
#[derive(RustcDecodable, Default)]
struct TomlTarget {
llvm_config: Option<String>,
jemalloc: Option<String>,
cc: Option<String>,
cxx: Option<String>,
android_ndk: Option<String>,
}
impl Config {
pub fn parse(build: &str, file: Option<PathBuf>) -> Config {
let mut config = Config::default();
config.llvm_optimize = true;
config.use_jemalloc = true;
config.rust_optimize = true;
config.rust_optimize_tests = true;
config.submodules = true;
config.docs = true;
config.rust_rpath = true;
config.rust_codegen_units = 1;
config.build = build.to_string();
config.channel = "dev".to_string();
let toml = file.map(|file| {
let mut f = t!(File::open(&file));
let mut toml = String::new();
t!(f.read_to_string(&mut toml));
let mut p = Parser::new(&toml);
let table = match p.parse() {
Some(table) => table,
None => {
println!("failed to parse TOML configuration:");
for err in p.errors.iter() {
let (loline, locol) = p.to_linecol(err.lo);
let (hiline, hicol) = p.to_linecol(err.hi);
println!("{}:{}-{}:{}: {}", loline, locol, hiline,
hicol, err.desc);
}
process::exit(2);
}
};
let mut d = Decoder::new(Value::Table(table));
match Decodable::decode(&mut d) {
Ok(cfg) => cfg,
Err(e) => {
println!("failed to decode TOML: {}", e);
process::exit(2);
}
}
}).unwrap_or_else(|| TomlConfig::default());
let build = toml.build.clone().unwrap_or(Build::default());
set(&mut config.build, build.build.clone());
config.host.push(config.build.clone());
for host in build.host.iter() {
if !config.host.contains(host) {
config.host.push(host.clone());
}
}
for target in config.host.iter().chain(&build.target) {
if !config.target.contains(target) {
config.target.push(target.clone());
}
}
config.rustc = build.rustc;
config.cargo = build.cargo;
set(&mut config.compiler_docs, build.compiler_docs);
set(&mut config.docs, build.docs);
if let Some(ref llvm) = toml.llvm {
set(&mut config.ccache, llvm.ccache);
set(&mut config.ninja, llvm.ninja);
set(&mut config.llvm_assertions, llvm.assertions);
set(&mut config.llvm_optimize, llvm.optimize);
set(&mut config.llvm_version_check, llvm.version_check);
set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
}
if let Some(ref rust) = toml.rust {
set(&mut config.rust_debug_assertions, rust.debug_assertions);
set(&mut config.rust_debuginfo, rust.debuginfo);
set(&mut config.rust_optimize, rust.optimize);
set(&mut config.rust_optimize_tests, rust.optimize_tests);
set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests);
set(&mut config.rust_rpath, rust.rpath);
set(&mut config.debug_jemalloc, rust.debug_jemalloc);
set(&mut config.use_jemalloc, rust.use_jemalloc);
set(&mut config.channel, rust.channel.clone());
config.rustc_default_linker = rust.default_linker.clone();
config.rustc_default_ar = rust.default_ar.clone();
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
match rust.codegen_units {
Some(0) => config.rust_codegen_units = num_cpus::get() as u32,
Some(n) => config.rust_codegen_units = n,
None => {}
}
}
if let Some(ref t) = toml.target {
for (triple, cfg) in t {
let mut target = Target::default();
if let Some(ref s) = cfg.llvm_config {
target.llvm_config = Some(env::current_dir().unwrap().join(s));
}
if let Some(ref s) = cfg.jemalloc {
target.jemalloc = Some(env::current_dir().unwrap().join(s));
}
if let Some(ref s) = cfg.android_ndk {
target.ndk = Some(env::current_dir().unwrap().join(s));
}
target.cxx = cfg.cxx.clone().map(PathBuf::from);
target.cc = cfg.cc.clone().map(PathBuf::from);
config.target_config.insert(triple.clone(), target);
}
}
return config
}
/// "Temporary" routine to parse `config.mk` into this configuration.
///
/// While we still have `./configure` this implements the ability to decode
/// that configuration into this. This isn't exactly a full-blown makefile
/// parser, but hey it gets the job done!
pub fn update_with_config_mk(&mut self) {
let mut config = String::new();
File::open("config.mk").unwrap().read_to_string(&mut config).unwrap();
for line in config.lines() {
let mut parts = line.splitn(2, ":=").map(|s| s.trim());
let key = parts.next().unwrap();
let value = match parts.next() {
Some(n) if n.starts_with('\"') => &n[1..n.len() - 1],
Some(n) => n,
None => continue
};
macro_rules! check {
($(($name:expr, $val:expr),)*) => {
if value == "1" {
$(
if key == concat!("CFG_ENABLE_", $name) {
$val = true;
continue
}
if key == concat!("CFG_DISABLE_", $name) {
$val = false;
continue
}
)*
}
}
}
check! {
("CCACHE", self.ccache),
("MANAGE_SUBMODULES", self.submodules),
("COMPILER_DOCS", self.compiler_docs),
("DOCS", self.docs),
("LLVM_ASSERTIONS", self.llvm_assertions),
("OPTIMIZE_LLVM", self.llvm_optimize),
("LLVM_VERSION_CHECK", self.llvm_version_check),
("LLVM_STATIC_STDCPP", self.llvm_static_stdcpp),
("OPTIMIZE", self.rust_optimize),
("DEBUG_ASSERTIONS", self.rust_debug_assertions),
("DEBUGINFO", self.rust_debuginfo),
("JEMALLOC", self.use_jemalloc),
("DEBUG_JEMALLOC", self.debug_jemalloc),
("RPATH", self.rust_rpath),
("OPTIMIZE_TESTS", self.rust_optimize_tests),
("DEBUGINFO_TESTS", self.rust_debuginfo_tests),
("LOCAL_REBUILD", self.local_rebuild),
}
match key {
"CFG_BUILD" => self.build = value.to_string(),
"CFG_HOST" => {
self.host = value.split(" ").map(|s| s.to_string())
.collect();
}
"CFG_TARGET" => {
self.target = value.split(" ").map(|s| s.to_string())
.collect();
}
"CFG_MUSL_ROOT" if value.len() > 0 => {
self.musl_root = Some(PathBuf::from(value));
}
"CFG_DEFAULT_AR" if value.len() > 0 => {
self.rustc_default_ar = Some(value.to_string());
}
"CFG_DEFAULT_LINKER" if value.len() > 0 => {
self.rustc_default_linker = Some(value.to_string());
}
"CFG_RELEASE_CHANNEL" => {
self.channel = value.to_string();
}
"CFG_PREFIX" => {
self.prefix = Some(value.to_string());
}
"CFG_LLVM_ROOT" if value.len() > 0 => {
let target = self.target_config.entry(self.build.clone())
.or_insert(Target::default());
let root = PathBuf::from(value);
target.llvm_config = Some(root.join("bin/llvm-config"));
}
"CFG_JEMALLOC_ROOT" if value.len() > 0 => {
let target = self.target_config.entry(self.build.clone())
.or_insert(Target::default());
target.jemalloc = Some(PathBuf::from(value));
}
"CFG_ARM_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
let target = "arm-linux-androideabi".to_string();
let target = self.target_config.entry(target)
.or_insert(Target::default());
target.ndk = Some(PathBuf::from(value));
}
"CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => {
let target = "armv7-linux-androideabi".to_string();
let target = self.target_config.entry(target)
.or_insert(Target::default());
target.ndk = Some(PathBuf::from(value));
}
"CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => {
let target = "i686-linux-androideabi".to_string();
let target = self.target_config.entry(target)
.or_insert(Target::default());
target.ndk = Some(PathBuf::from(value));
}
"CFG_AARCH64_LINUX_ANDROID_NDK" if value.len() > 0 => {
let target = "aarch64-linux-androideabi".to_string();
let target = self.target_config.entry(target)
.or_insert(Target::default());
target.ndk = Some(PathBuf::from(value));
}
_ => {}
}
}
}
}
fn set<T>(field: &mut T, val: Option<T>) {
if let Some(v) = val {
*field = v;
}
}
| 38.10687 | 83 | 0.54387 |
89d65dfacf3dc9c062bf50678602508b141fa224
| 1,840 |
use std::collections::HashMap;
use std::sync::Mutex;
#[macro_use]
extern crate lazy_static;
use once_cell::sync::Lazy;
use once_cell::sync::OnceCell;
use std::net::TcpStream;
fn main() {
println!("Hello, world!");
test_once_cell();
test_lazy_static();
init_db();
}
static GLOBAL_DATA: Lazy<Mutex<HashMap<i32, String>>> = Lazy::new(|| {
let mut m = HashMap::new();
m.insert(13, "Spica".to_string());
m.insert(14, "Spica13".to_string());
m.insert(15, "Spica15".to_string());
Mutex::new(m)
});
// set(T) => 安全的设置全局变量
// get() -> T => 获取已经设置的全局变量
// get_or_init(Fn) => 获取全局变量,为空则初始化
// Lazy::new(Fn) => 延迟创建全局变量
fn global_data() -> &'static Mutex<HashMap<i32, String>> {
static INSTANCE: OnceCell<Mutex<HashMap<i32, String>>> = OnceCell::new();
INSTANCE.get_or_init(|| {
let mut m = HashMap::new();
m.insert(13, "Spica".to_string());
m.insert(74, "Hoyten".to_string());
Mutex::new(m)
})
}
// fn global_client() -> &'static Mutex<TcpStream> {
// static Client: OnceCell<Mutex<TcpStream>> = OnceCell::new();
// Client.get_or_init(|| {
// let conn = TcpStream::connect("0.0.0.0:8081").unwrap();
// Mutex::new(conn)
// })
// }
fn test_once_cell() { // 是一种只执行一次的容器,多用于全局变量,安全初始化,或者延迟初始化
let p = GLOBAL_DATA.lock().unwrap();
println!("{:?}", p.get(&13));
}
lazy_static! {
static ref HASHMAP: Mutex<HashMap<u32, String>> = {
let mut m = HashMap::new();
m.insert(0,"foo".to_string());
m.insert(1,"bar".to_string());
Mutex::new(m)
};
}
fn test_lazy_static() {
println!("{:?}",HASHMAP.lock());
}
static mut DBB: Option<Mutex<HashMap<u32,String>>> = None;
fn init_db() {
unsafe {
let mp:HashMap<u32,String> = HashMap::new();
DBB = Some(Mutex::new(mp));
}
}
| 24.210526 | 77 | 0.588043 |
90c45807d908ec2d0a9ca4941766ce6ca64f8445
| 7,793 |
extern crate cgmath;
use super::shader::*;
use super::bvh::*;
use super::transformable::*;
use utilities::math::*;
#[cfg(all(target_feature = "avx"))]
use utilities::simd::{
SimdFloat4,
SimdRay,
intrin,
__m128, __m256
};
use std::rc::Rc;
use std::f32;
use self::cgmath::Transform;
//TODO this file into
//intersectable.rs
//transformable.rs
//triangle.rs
pub enum IntersectionOrderKind {
FirstIntersection,
AnyIntersection
}
pub struct IntersectionArgs<'a> {
#[cfg(target_feature = "avx")]
pub ray: &'a SimdRay,
#[cfg(not(target_feature = "avx"))]
pub ray: &'a RayUnit,
pub record: &'a mut IntersectionRecord,
pub intersection_order: IntersectionOrderKind
}
pub trait Intersectable {
/// check for intersection between ray and surface.
/// if there is an intersection, fills record with intersection information
/// only if the new intersection's t is less than the old intersection's t and return true
/// if there is no intersection, leave record alone and return false
fn intersect(&self, args: IntersectionArgs) -> bool;
}
#[derive(Debug)]
pub struct Triangle {
pub positions: [Vec3; 3],
pub normals: [Vec3; 3],
pub shader: Rc<Shader>
}
impl HasSurfaceArea for Triangle {
fn surface_area(&self) -> f32 {
(self.positions[1] - self.positions[0]).cross(self.positions[2] - self.positions[0])
.magnitude() / 2.0
}
}
impl Clone for Triangle {
fn clone(&self) -> Triangle {
Triangle {
positions: [self.positions[0].clone(),
self.positions[1].clone(),
self.positions[2].clone()],
normals: [self.normals[0].clone(),
self.normals[1].clone(),
self.normals[2].clone()],
shader: self.shader.clone()
}
}
}
impl MakesAABoundingBox for Triangle {
fn make_aa_bounding_box(&self) -> AABoundingBox {
AABoundingBox {
lower: self.positions[0]
.min_elem_wise(&self.positions[1])
.min_elem_wise(&self.positions[2]),
upper: self.positions[0]
.max_elem_wise(&self.positions[1])
.max_elem_wise(&self.positions[2])
}
}
}
impl Transformable for Triangle {
/// Attempts to transform if the transform is invertible.
/// If the transform is not invertible, normals will be invalid.
fn transform_in_place(&mut self, transform: &Matrix4) {
for position in self.positions.iter_mut() {
*position = (transform * position.extend(1.0)).truncate();
}
let normal_transform: Matrix4 = {
let mut nt = transform.clone();
nt.w = Vec4::new(0.0, 0.0, 0.0, 1.0);
nt.invert().unwrap_or(<Matrix4 as One>::one()).transpose()
};
for normal in self.normals.iter_mut() {
*normal = normal_transform.transform_vector(*normal);
}
}
}
pub struct TriangleWithAABoundingBox {
pub triangle: Triangle,
aa_bounding_box: AABoundingBox,
surface_area: f32
}
impl TriangleWithAABoundingBox {
pub fn new_from_triangle(triangle: &Triangle) -> TriangleWithAABoundingBox {
TriangleWithAABoundingBox {
triangle: triangle.clone(),
aa_bounding_box: triangle.make_aa_bounding_box(),
surface_area: triangle.surface_area()
}
}
}
impl HasAABoundingBox for TriangleWithAABoundingBox {
fn aa_bounding_box_ref(&self) -> &AABoundingBox {
&self.aa_bounding_box
}
}
impl HasSurfaceArea for TriangleWithAABoundingBox {
fn surface_area(&self) -> f32 {
self.surface_area
}
}
#[derive(Debug)]
#[cfg(not(target_feature = "avx"))]
pub struct IntersectableTriangle {
triangle: Rc<Triangle>,
position_0: Vec3,
edge1: Vec3,
edge2: Vec3
}
#[derive(Debug)]
#[cfg(target_feature = "avx")]
pub struct IntersectableTriangle {
triangle: Rc<Triangle>,
position_0: SimdFloat4,
edge1: SimdFloat4,
edge2: SimdFloat4,
}
impl IntersectableTriangle {
#[cfg(not(target_feature = "avx"))]
pub fn new_from_triangle(triangle: &Triangle) -> IntersectableTriangle {
let triangle_ptr = Rc::new(triangle.clone());
let edge1 = triangle.positions[1].accurate_subtraction(&triangle.positions[0]);
let edge2 = triangle.positions[2].accurate_subtraction(&triangle.positions[0]);
IntersectableTriangle {
triangle: triangle_ptr,
position_0: triangle.positions[0],
edge1: edge1,
edge2: edge2,
}
}
#[cfg(target_feature = "avx")]
pub fn new_from_triangle(triangle: &Triangle) -> IntersectableTriangle {
let triangle_ptr = Rc::new(triangle.clone());
let edge1 = triangle.positions[1].accurate_subtraction(&triangle.positions[0]);
let edge2 = triangle.positions[2].accurate_subtraction(&triangle.positions[0]);
IntersectableTriangle {
triangle: triangle_ptr,
position_0: triangle.positions[0].into(),
edge1: edge1.into(),
edge2: edge2.into(),
}
}
}
impl Intersectable for IntersectableTriangle {
// TODO consider creating a simd version of this SoA style and benchmark
fn intersect(&self, args: IntersectionArgs) -> bool {
let ray = args.ray;
let record = args.record;
let ray_normalized_direction = if_avx!(
avx = ray.direction,
noavx = *ray.direction.value()
);
let cross = if_avx!(
avx = |a: SimdFloat4, b| a.vec3_cross(b),
noavx = |a: Vec3, b| a.cross(b)
);
let dot = if_avx! (
avx = |a: SimdFloat4, b| a.vec3_dot(b),
noavx = |a: Vec3, b| a.dot(b)
);
let edge1 = self.edge1;
let edge2 = self.edge2;
let h = cross(ray_normalized_direction, edge2);
let a = dot(edge1, h);
if apprx_eq(a, 0.0, f32::EPSILON) {
return false;
}
let f = 1.0 / a;
let s = ray.position - self.position_0;
let u = f * dot(s, h);
if u < 0.0 || u > 1.0 {
return false;
}
let q = cross(s, edge1);
let v = f * dot(ray_normalized_direction, q);
if v < 0.0 || u + v > 1.0 {
return false;
}
//let t = f * dot(edge2, q);
let n = cross(edge1, edge2);
let t = dot(-s, n) / dot(ray_normalized_direction, n);
if t < ray.t_range.start || ray.t_range.end <= t || t >= record.t {
return false;
}
let beta = u;
let gamma = v;
let alpha = 1.0 - beta - gamma;
let t_multiplier = if_avx!(
avx = SimdFloat4::new(t,t,t,t),
noavx = t
);
let position = (ray.position + t_multiplier * ray_normalized_direction).into();
*record = IntersectionRecord {
position,
normal: self.triangle.normals[0] * alpha +
self.triangle.normals[1] * beta +
self.triangle.normals[2] * gamma,
t: t,
shader: Some(self.triangle.shader.clone())
};
return true;
}
}
#[derive(Clone, Debug)]
pub struct IntersectionRecord {
pub shader: Option<Rc<Shader>>,
pub position: Vec3,
pub normal: Vec3, // TODO change this to UnitVec3
pub t: f32
}
impl IntersectionRecord {
pub fn no_intersection() -> IntersectionRecord {
IntersectionRecord {
shader: None,
position: Vec3{x: 0., y: 0., z: 0.},
normal: Vec3{x: 0., y: 0., z: 0.},
t: f32::INFINITY
}
}
pub fn intersected(&self) -> bool {
self.t.is_finite()
}
}
| 27.248252 | 94 | 0.586167 |
010d11f6804c279d77de83c6a070471558c46d62
| 1,969 |
extern crate iref;
use iref::Iri;
#[test]
fn test1() {
let buffer = "https://www.rust-lang.org/foo/bar#frag";
let iri = Iri::new(buffer).expect("parsing failed");
assert_eq!(iri.scheme(), "https");
assert_eq!(iri.authority().unwrap(), "www.rust-lang.org");
assert_eq!(iri.path(), "/foo/bar");
}
#[test]
fn test2() {
let buffer = "https://[::]/foo/bar#frag";
let iri = Iri::new(buffer).expect("parsing failed");
assert_eq!(iri.scheme(), "https");
assert_eq!(iri.authority().unwrap(), "[::]");
assert_eq!(iri.path(), "/foo/bar");
}
#[test]
fn test3() {
let buffer = "https://[::192.128.0.1]/foo/bar#frag";
let iri = Iri::new(buffer).expect("parsing failed");
assert_eq!(iri.scheme(), "https");
assert_eq!(iri.authority().unwrap(), "[::192.128.0.1]");
assert_eq!(iri.path(), "/foo/bar");
}
#[test]
#[should_panic]
fn test4() {
let buffer = "https://[::256.128.0.1]/foo/bar#frag"; // 256.128.0.1 is not a valid IPv4
Iri::new(buffer).expect("parsing failed");
}
#[test]
fn test5() {
let buffer = "https:///foo/bar#frag";
let iri = Iri::new(buffer).expect("parsing failed");
assert_eq!(iri.scheme(), "https");
assert!(iri.authority().unwrap().is_empty());
assert_eq!(iri.path(), "/foo/bar");
}
#[test]
fn test6() {
let buffer = "https:/foo/bar#frag";
let iri = Iri::new(buffer).expect("parsing failed");
assert_eq!(iri.scheme(), "https");
assert!(iri.authority().is_none());
assert_eq!(iri.path(), "/foo/bar");
}
#[test]
fn test7() {
let buffer = "https:foo/bar#frag";
let iri = Iri::new(buffer).expect("parsing failed");
assert_eq!(iri.scheme(), "https");
assert!(iri.authority().is_none());
assert_eq!(iri.path(), "foo/bar");
}
#[test]
#[should_panic]
fn test8() {
let buffer = "https:foo/bar space";
let iri = Iri::new(buffer).unwrap();
println!("{}", iri.path());
}
#[test]
fn test9() {
let iri1 = Iri::new("https:foo/bar").unwrap();
let iri2 = Iri::new("https:foo/%62%61%72").unwrap();
assert_eq!(iri1, iri2)
}
| 22.375 | 88 | 0.619604 |
efc9fc7442c17a0b7306b7047518ed9d57ab49b0
| 3,462 |
use serde::Deserialize;
use smallvec::SmallVec;
use thiserror::Error;
use crate::types::AccountData;
use super::{Filter, Memcmp, RESERVED_RANGE};
/// Filter container that guarantees filters do not conflict with each other
/// and determines the application order for each filter combination.
#[derive(Deserialize, Debug, Hash, Eq, PartialEq, Clone, Ord, PartialOrd)]
pub struct Filters {
pub(super) data_size: Option<u64>,
pub(super) memcmp: SmallVec<[Memcmp; 2]>,
}
#[derive(Debug, Error, PartialEq)]
pub enum NormalizeError {
#[error("duplicate data size")]
DuplicateDataSize,
#[error("non equal memcmp into one range")]
ConflictingMemcmp,
#[error("empty filter vec")]
Empty,
}
impl Filters {
pub fn new_normalized<T>(filters: T) -> Result<Self, NormalizeError>
where
T: IntoIterator<Item = Filter>,
{
use NormalizeError::*;
let mut amount = 0;
let mut data_size = None;
let mut memcmp_vec = SmallVec::<[Memcmp; 2]>::new();
for filter in filters {
amount += 1;
match filter {
Filter::DataSize(size) => {
// There is no point filtering for two different sizes
if data_size.replace(size).map_or(false, |old| old != size) {
return Err(DuplicateDataSize);
}
}
// TODO: check that overlapping ranges match
Filter::Memcmp(new) => {
if new.bytes.is_empty() || new.range() == RESERVED_RANGE {
const _: () = {
// Just to statically assert that reserved range is empty
let _: [u8; RESERVED_RANGE.1 - RESERVED_RANGE.0] = [];
};
// This is always true
continue;
}
let same_range = memcmp_vec
.iter()
.find(|filter| filter.range() == new.range());
match same_range {
Some(old) if old.bytes != new.bytes => return Err(ConflictingMemcmp),
Some(_) /* if old.bytes == new.bytes */ => (),
None => memcmp_vec.push(new),
}
}
}
}
if amount == 0 {
return Err(NormalizeError::Empty);
}
memcmp_vec.sort_unstable();
Ok(Self {
data_size,
memcmp: memcmp_vec,
})
}
pub fn matches(&self, data: &AccountData) -> bool {
if self
.data_size
.map_or(false, |size| data.len() as u64 != size)
{
return false;
}
self.memcmp.iter().all(|memcmp| memcmp.matches(data))
}
}
#[cfg(test)]
macro_rules! filters {
(@parse @cmp $offset:literal: [$($byte:literal),*]) => {
$crate::filter::Filter::Memcmp(Memcmp {
offset: $offset,
bytes: smallvec::smallvec![$($byte),*],
})
};
(@parse @size $datasize:literal) => { $crate::filter::Filter::DataSize($datasize) };
($(@$tag:ident $arg:literal $(: [$($add_args:literal),*])? ),*) => {{
let filters = vec![$(
filters!(@parse @$tag $arg $(: [$($add_args),*])?)
),*];
$crate::filter::Filters::new_normalized(filters)
}};
}
| 31.472727 | 94 | 0.501444 |
1c2435be40a6f69d8a9e81a75fc789367047d62c
| 1,789 |
use super::error_messages::*;
use crate::{
anyhow,
bail,
ensure,
};
use core::fmt::Debug;
#[macro_export]
macro_rules! try_or {
($cond:expr, $err:expr) => {{
if $crate::LOCATION_LOG && !$cond {
$crate::println!("\n!!! Error occurred @ {}, {}", file!(), line!())
}
try_or($cond, $err)
}};
}
#[macro_export]
macro_rules! err {
($err:expr) => {{
if $crate::LOCATION_LOG {
$crate::println!("\n!!! Error occurred @ {}, {}", file!(), line!());
}
err($err)
}};
}
#[macro_export]
macro_rules! panic_if_not {
($cond:expr) => {{
if $crate::LOCATION_LOG && !$cond {
$crate::println!("\n!!! Error occurred @ {}, {}", file!(), line!())
}
panic_if_not($cond)
}};
}
#[macro_export]
macro_rules! wrapped_err {
($err:expr, $wrapped:expr) => {{
if $crate::LOCATION_LOG {
$crate::println!("\n!!! Error occurred @ {}, {}", file!(), line!());
}
wrapped_err($err, $wrapped)
}};
}
#[macro_export]
macro_rules! unwrap_or_break {
($result:expr) => {
match $result {
Ok(r) => r,
Err(e) => {
if $crate::LOCATION_LOG {
$crate::println!("\n!!! Error occurred @ {}, {}", file!(), line!());
}
break;
}
}
};
}
pub fn try_or(cond: bool, err: Errors) -> Result<(), anyhow::Error> {
ensure!(cond, err);
Ok(())
}
pub fn err<T>(err: Errors) -> Result<T, anyhow::Error> {
bail!(err)
}
pub fn panic_if_not(cond: bool) {
assert!(cond)
}
pub fn wrapped_err<T: Debug>(err: Errors, src: WrappedError<T>) -> anyhow::Error {
anyhow!("\n\tStreams Error: {}\n\t\tCause: {:?}", err, src.0)
}
| 22.3625 | 88 | 0.48351 |
4bc9fa78afcf43fa1ba8878a7b89bedd6da5b083
| 26,034 |
#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::Rtsr {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
}
#[doc = r" Value of the field"]
pub struct Tr0R {
bits: u8,
}
impl Tr0R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr1R {
bits: u8,
}
impl Tr1R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr2R {
bits: u8,
}
impl Tr2R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr3R {
bits: u8,
}
impl Tr3R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr4R {
bits: u8,
}
impl Tr4R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr5R {
bits: u8,
}
impl Tr5R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr6R {
bits: u8,
}
impl Tr6R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr7R {
bits: u8,
}
impl Tr7R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr8R {
bits: u8,
}
impl Tr8R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr9R {
bits: u8,
}
impl Tr9R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr10R {
bits: u8,
}
impl Tr10R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr11R {
bits: u8,
}
impl Tr11R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr12R {
bits: u8,
}
impl Tr12R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr13R {
bits: u8,
}
impl Tr13R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr14R {
bits: u8,
}
impl Tr14R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr15R {
bits: u8,
}
impl Tr15R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr16R {
bits: u8,
}
impl Tr16R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr17R {
bits: u8,
}
impl Tr17R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr18R {
bits: u8,
}
impl Tr18R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr19R {
bits: u8,
}
impl Tr19R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr20R {
bits: u8,
}
impl Tr20R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr21R {
bits: u8,
}
impl Tr21R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct Tr22R {
bits: u8,
}
impl Tr22R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _Tr0W<'a> {
w: &'a mut W,
}
impl<'a> _Tr0W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr1W<'a> {
w: &'a mut W,
}
impl<'a> _Tr1W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 1;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr2W<'a> {
w: &'a mut W,
}
impl<'a> _Tr2W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 2;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr3W<'a> {
w: &'a mut W,
}
impl<'a> _Tr3W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 3;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr4W<'a> {
w: &'a mut W,
}
impl<'a> _Tr4W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr5W<'a> {
w: &'a mut W,
}
impl<'a> _Tr5W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 5;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr6W<'a> {
w: &'a mut W,
}
impl<'a> _Tr6W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 6;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr7W<'a> {
w: &'a mut W,
}
impl<'a> _Tr7W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 7;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr8W<'a> {
w: &'a mut W,
}
impl<'a> _Tr8W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 8;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr9W<'a> {
w: &'a mut W,
}
impl<'a> _Tr9W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 9;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr10W<'a> {
w: &'a mut W,
}
impl<'a> _Tr10W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 10;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr11W<'a> {
w: &'a mut W,
}
impl<'a> _Tr11W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 11;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr12W<'a> {
w: &'a mut W,
}
impl<'a> _Tr12W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr13W<'a> {
w: &'a mut W,
}
impl<'a> _Tr13W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 13;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr14W<'a> {
w: &'a mut W,
}
impl<'a> _Tr14W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 14;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr15W<'a> {
w: &'a mut W,
}
impl<'a> _Tr15W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 15;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr16W<'a> {
w: &'a mut W,
}
impl<'a> _Tr16W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr17W<'a> {
w: &'a mut W,
}
impl<'a> _Tr17W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 17;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr18W<'a> {
w: &'a mut W,
}
impl<'a> _Tr18W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 18;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr19W<'a> {
w: &'a mut W,
}
impl<'a> _Tr19W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 19;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr20W<'a> {
w: &'a mut W,
}
impl<'a> _Tr20W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr21W<'a> {
w: &'a mut W,
}
impl<'a> _Tr21W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _Tr22W<'a> {
w: &'a mut W,
}
impl<'a> _Tr22W<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, bits: u8) -> &'a mut W {
const MASK: u8 = 1;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((bits & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Rising trigger event configuration of line 0"]
#[inline(always)]
pub fn tr0(&self) -> Tr0R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr0R { bits }
}
#[doc = "Bit 1 - Rising trigger event configuration of line 1"]
#[inline(always)]
pub fn tr1(&self) -> Tr1R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr1R { bits }
}
#[doc = "Bit 2 - Rising trigger event configuration of line 2"]
#[inline(always)]
pub fn tr2(&self) -> Tr2R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 2;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr2R { bits }
}
#[doc = "Bit 3 - Rising trigger event configuration of line 3"]
#[inline(always)]
pub fn tr3(&self) -> Tr3R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 3;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr3R { bits }
}
#[doc = "Bit 4 - Rising trigger event configuration of line 4"]
#[inline(always)]
pub fn tr4(&self) -> Tr4R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr4R { bits }
}
#[doc = "Bit 5 - Rising trigger event configuration of line 5"]
#[inline(always)]
pub fn tr5(&self) -> Tr5R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 5;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr5R { bits }
}
#[doc = "Bit 6 - Rising trigger event configuration of line 6"]
#[inline(always)]
pub fn tr6(&self) -> Tr6R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 6;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr6R { bits }
}
#[doc = "Bit 7 - Rising trigger event configuration of line 7"]
#[inline(always)]
pub fn tr7(&self) -> Tr7R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 7;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr7R { bits }
}
#[doc = "Bit 8 - Rising trigger event configuration of line 8"]
#[inline(always)]
pub fn tr8(&self) -> Tr8R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr8R { bits }
}
#[doc = "Bit 9 - Rising trigger event configuration of line 9"]
#[inline(always)]
pub fn tr9(&self) -> Tr9R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr9R { bits }
}
#[doc = "Bit 10 - Rising trigger event configuration of line 10"]
#[inline(always)]
pub fn tr10(&self) -> Tr10R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr10R { bits }
}
#[doc = "Bit 11 - Rising trigger event configuration of line 11"]
#[inline(always)]
pub fn tr11(&self) -> Tr11R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr11R { bits }
}
#[doc = "Bit 12 - Rising trigger event configuration of line 12"]
#[inline(always)]
pub fn tr12(&self) -> Tr12R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr12R { bits }
}
#[doc = "Bit 13 - Rising trigger event configuration of line 13"]
#[inline(always)]
pub fn tr13(&self) -> Tr13R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr13R { bits }
}
#[doc = "Bit 14 - Rising trigger event configuration of line 14"]
#[inline(always)]
pub fn tr14(&self) -> Tr14R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr14R { bits }
}
#[doc = "Bit 15 - Rising trigger event configuration of line 15"]
#[inline(always)]
pub fn tr15(&self) -> Tr15R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr15R { bits }
}
#[doc = "Bit 16 - Rising trigger event configuration of line 16"]
#[inline(always)]
pub fn tr16(&self) -> Tr16R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr16R { bits }
}
#[doc = "Bit 17 - Rising trigger event configuration of line 17"]
#[inline(always)]
pub fn tr17(&self) -> Tr17R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 17;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr17R { bits }
}
#[doc = "Bit 18 - Rising trigger event configuration of line 18"]
#[inline(always)]
pub fn tr18(&self) -> Tr18R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 18;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr18R { bits }
}
#[doc = "Bit 19 - Rising trigger event configuration of line 19"]
#[inline(always)]
pub fn tr19(&self) -> Tr19R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 19;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr19R { bits }
}
#[doc = "Bit 20 - Rising trigger event configuration of line 20"]
#[inline(always)]
pub fn tr20(&self) -> Tr20R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr20R { bits }
}
#[doc = "Bit 21 - Rising trigger event configuration of line 21"]
#[inline(always)]
pub fn tr21(&self) -> Tr21R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr21R { bits }
}
#[doc = "Bit 22 - Rising trigger event configuration of line 22"]
#[inline(always)]
pub fn tr22(&self) -> Tr22R {
let bits = {
const MASK: u8 = 1;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) as u8
};
Tr22R { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bit 0 - Rising trigger event configuration of line 0"]
#[inline(always)]
pub fn tr0(&mut self) -> _Tr0W {
_Tr0W { w: self }
}
#[doc = "Bit 1 - Rising trigger event configuration of line 1"]
#[inline(always)]
pub fn tr1(&mut self) -> _Tr1W {
_Tr1W { w: self }
}
#[doc = "Bit 2 - Rising trigger event configuration of line 2"]
#[inline(always)]
pub fn tr2(&mut self) -> _Tr2W {
_Tr2W { w: self }
}
#[doc = "Bit 3 - Rising trigger event configuration of line 3"]
#[inline(always)]
pub fn tr3(&mut self) -> _Tr3W {
_Tr3W { w: self }
}
#[doc = "Bit 4 - Rising trigger event configuration of line 4"]
#[inline(always)]
pub fn tr4(&mut self) -> _Tr4W {
_Tr4W { w: self }
}
#[doc = "Bit 5 - Rising trigger event configuration of line 5"]
#[inline(always)]
pub fn tr5(&mut self) -> _Tr5W {
_Tr5W { w: self }
}
#[doc = "Bit 6 - Rising trigger event configuration of line 6"]
#[inline(always)]
pub fn tr6(&mut self) -> _Tr6W {
_Tr6W { w: self }
}
#[doc = "Bit 7 - Rising trigger event configuration of line 7"]
#[inline(always)]
pub fn tr7(&mut self) -> _Tr7W {
_Tr7W { w: self }
}
#[doc = "Bit 8 - Rising trigger event configuration of line 8"]
#[inline(always)]
pub fn tr8(&mut self) -> _Tr8W {
_Tr8W { w: self }
}
#[doc = "Bit 9 - Rising trigger event configuration of line 9"]
#[inline(always)]
pub fn tr9(&mut self) -> _Tr9W {
_Tr9W { w: self }
}
#[doc = "Bit 10 - Rising trigger event configuration of line 10"]
#[inline(always)]
pub fn tr10(&mut self) -> _Tr10W {
_Tr10W { w: self }
}
#[doc = "Bit 11 - Rising trigger event configuration of line 11"]
#[inline(always)]
pub fn tr11(&mut self) -> _Tr11W {
_Tr11W { w: self }
}
#[doc = "Bit 12 - Rising trigger event configuration of line 12"]
#[inline(always)]
pub fn tr12(&mut self) -> _Tr12W {
_Tr12W { w: self }
}
#[doc = "Bit 13 - Rising trigger event configuration of line 13"]
#[inline(always)]
pub fn tr13(&mut self) -> _Tr13W {
_Tr13W { w: self }
}
#[doc = "Bit 14 - Rising trigger event configuration of line 14"]
#[inline(always)]
pub fn tr14(&mut self) -> _Tr14W {
_Tr14W { w: self }
}
#[doc = "Bit 15 - Rising trigger event configuration of line 15"]
#[inline(always)]
pub fn tr15(&mut self) -> _Tr15W {
_Tr15W { w: self }
}
#[doc = "Bit 16 - Rising trigger event configuration of line 16"]
#[inline(always)]
pub fn tr16(&mut self) -> _Tr16W {
_Tr16W { w: self }
}
#[doc = "Bit 17 - Rising trigger event configuration of line 17"]
#[inline(always)]
pub fn tr17(&mut self) -> _Tr17W {
_Tr17W { w: self }
}
#[doc = "Bit 18 - Rising trigger event configuration of line 18"]
#[inline(always)]
pub fn tr18(&mut self) -> _Tr18W {
_Tr18W { w: self }
}
#[doc = "Bit 19 - Rising trigger event configuration of line 19"]
#[inline(always)]
pub fn tr19(&mut self) -> _Tr19W {
_Tr19W { w: self }
}
#[doc = "Bit 20 - Rising trigger event configuration of line 20"]
#[inline(always)]
pub fn tr20(&mut self) -> _Tr20W {
_Tr20W { w: self }
}
#[doc = "Bit 21 - Rising trigger event configuration of line 21"]
#[inline(always)]
pub fn tr21(&mut self) -> _Tr21W {
_Tr21W { w: self }
}
#[doc = "Bit 22 - Rising trigger event configuration of line 22"]
#[inline(always)]
pub fn tr22(&mut self) -> _Tr22W {
_Tr22W { w: self }
}
}
| 25.956132 | 69 | 0.503188 |
9c385385e762138f56076e451b3892e50529e427
| 1,481 |
use crate::connector::SlackClientHyperConnector;
use std::future::Future;
use futures::future::{BoxFuture, FutureExt};
use hyper::{Body, Request, Response};
pub use command_events::*;
pub use interaction_events::*;
pub use oauth::*;
pub use push_events::*;
use slack_morphism::listener::SlackClientEventsListenerEnvironment;
pub use slack_morphism::signature_verifier::*;
use std::sync::Arc;
mod command_events;
mod interaction_events;
mod oauth;
mod push_events;
pub struct SlackClientEventsHyperListener {
pub environment: Arc<SlackClientEventsListenerEnvironment<SlackClientHyperConnector>>,
}
impl SlackClientEventsHyperListener {
pub fn new(
environment: Arc<SlackClientEventsListenerEnvironment<SlackClientHyperConnector>>,
) -> Self {
Self { environment }
}
}
pub fn chain_service_routes_fn<'a, R, D, FR, FD>(
route: R,
default: D,
) -> impl Fn(
Request<Body>,
) -> BoxFuture<'a, Result<Response<Body>, Box<dyn std::error::Error + Send + Sync + 'a>>>
+ 'a
+ Send
+ Clone
where
R: Fn(Request<Body>, D) -> FR + 'a + Clone + Send,
D: Fn(Request<Body>) -> FD + 'a + Clone + Send,
FR: Future<Output = Result<Response<Body>, Box<dyn std::error::Error + Send + Sync + 'a>>>
+ 'a
+ Send,
FD: Future<Output = Result<Response<Body>, Box<dyn std::error::Error + Send + Sync + 'a>>>
+ 'a
+ Send,
{
move |req: Request<Body>| route(req, default.clone()).boxed()
}
| 27.425926 | 94 | 0.66239 |
5befa2224b45f8f94ee6171cf781d64a7ea7ebbc
| 4,610 |
use std::collections::HashMap;
use super::{super::GqlContext, Orchestrator};
use crate::libraries::helpers::keys;
use juniper::{
graphql_object, FieldResult, GraphQLEnum, GraphQLInputObject, GraphQLObject, GraphQLScalarValue,
};
use redis::AsyncCommands;
#[derive(GraphQLEnum)]
pub enum SessionState {
Active,
Terminated,
}
#[derive(GraphQLScalarValue)]
pub struct Date(String);
#[derive(GraphQLObject)]
pub struct DictionaryEntry {
pub key: String,
pub value: String,
}
#[derive(GraphQLInputObject)]
pub struct InputDictionaryEntry {
pub key: String,
pub value: String,
}
#[derive(GraphQLObject)]
pub struct SessionStatusTransitions {
queued_at: Option<Date>,
pending_at: Option<Date>,
alive_at: Option<Date>,
terminated_at: Option<Date>,
}
impl SessionStatusTransitions {
pub async fn new(session_id: &str, context: &GqlContext) -> FieldResult<Self> {
let metadata: HashMap<String, String> = context
.redis
.lock()
.await
.hgetall(keys::session::status(session_id))
.await?;
Ok(Self {
queued_at: metadata.get("queuedAt").map(|s| Date(s.to_owned())),
pending_at: metadata.get("pendingAt").map(|s| Date(s.to_owned())),
alive_at: metadata.get("aliveAt").map(|s| Date(s.to_owned())),
terminated_at: metadata.get("terminatedAt").map(|s| Date(s.to_owned())),
})
}
}
#[derive(GraphQLObject)]
pub struct SessionCapabilities {
requested: Option<String>,
actual: Option<String>,
}
impl SessionCapabilities {
pub async fn new(session_id: &str, context: &GqlContext) -> FieldResult<Self> {
let metadata: HashMap<String, String> = context
.redis
.lock()
.await
.hgetall(keys::session::capabilities(session_id))
.await?;
Ok(Self {
requested: metadata.get("requested").map(|s| s.to_owned()),
actual: metadata.get("actual").map(|s| s.to_owned()),
})
}
}
pub struct Session {
id: String,
}
impl Session {
pub fn new(session_id: String) -> Self {
Self { id: session_id }
}
async fn storage_id(&self, context: &GqlContext) -> FieldResult<Option<String>> {
Ok(context
.redis
.lock()
.await
.get(keys::session::storage(&self.id))
.await?)
}
pub async fn metadata(&self, context: &GqlContext) -> FieldResult<Vec<DictionaryEntry>> {
let dictionary: Vec<(String, String)> = context
.redis
.lock()
.await
.hgetall(keys::session::metadata(&self.id))
.await?;
Ok(dictionary
.into_iter()
.map(|(key, value)| DictionaryEntry { key, value })
.collect())
}
}
#[graphql_object(context = GqlContext)]
impl Session {
fn id(&self) -> &str {
self.id.as_str()
}
async fn status(&self, context: &GqlContext) -> FieldResult<SessionStatusTransitions> {
SessionStatusTransitions::new(&self.id, context).await
}
async fn capabilities(&self, context: &GqlContext) -> FieldResult<SessionCapabilities> {
SessionCapabilities::new(&self.id, context).await
}
async fn metadata(&self, context: &GqlContext) -> FieldResult<Vec<DictionaryEntry>> {
self.metadata(context).await
}
async fn alive(&self, context: &GqlContext) -> FieldResult<bool> {
Ok(context
.redis
.lock()
.await
.exists(keys::session::heartbeat::node(&self.id))
.await?)
}
async fn slot(&self, context: &GqlContext) -> FieldResult<Option<String>> {
Ok(context
.redis
.lock()
.await
.get(keys::session::slot(&self.id))
.await?)
}
async fn orchestrator(&self, context: &GqlContext) -> FieldResult<Option<Orchestrator>> {
let key = keys::session::orchestrator(&self.id);
let orchestrator_id: Option<String> =
context.redis.lock().await.rpoplpush(&key, &key).await?;
Ok(orchestrator_id.map(Orchestrator::new))
}
async fn storage(&self, context: &GqlContext) -> FieldResult<Option<String>> {
self.storage_id(context).await
}
async fn videoURL(&self, context: &GqlContext) -> FieldResult<Option<String>> {
Ok(self
.storage_id(context)
.await?
.map(|storage_id| format!("/storage/{}/{}.m3u8", storage_id, &self.id)))
}
}
| 27.60479 | 100 | 0.592191 |
8a08c52ca49f73485e9cb462f1be699bff7ca1d1
| 863 |
// Copyright 2018 The Fuchsia Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
pub mod psk;
pub mod sae;
use crate::rsna::{Dot11VerifiedKeyFrame, UpdateSink};
use failure;
use zerocopy::ByteSlice;
#[derive(Debug, PartialEq)]
pub enum Method {
Psk(psk::Psk),
}
impl Method {
pub fn from_config(cfg: Config) -> Result<Method, failure::Error> {
match cfg {
Config::ComputedPsk(psk) => Ok(Method::Psk(psk)),
}
}
// Unused as only PSK is supported so far.
pub fn on_eapol_key_frame<B: ByteSlice>(
&self,
_update_sink: &mut UpdateSink,
_frame: Dot11VerifiedKeyFrame<B>,
) -> Result<(), failure::Error> {
Ok(())
}
}
#[derive(Debug, PartialEq)]
pub enum Config {
ComputedPsk(psk::Psk),
}
| 22.710526 | 73 | 0.636153 |
903fa2a2ef07cc78bcdbdf0041f99056f11e89bc
| 5,148 |
//! Function object impl
use crate::avm2::activation::Activation;
use crate::avm2::function::Executable;
use crate::avm2::method::Method;
use crate::avm2::object::script_object::{ScriptObject, ScriptObjectData};
use crate::avm2::object::{ClassObject, Object, ObjectPtr, TObject};
use crate::avm2::scope::ScopeChain;
use crate::avm2::value::Value;
use crate::avm2::Error;
use gc_arena::{Collect, GcCell, MutationContext};
use std::cell::{Ref, RefMut};
/// An Object which can be called to execute its function code.
#[derive(Collect, Debug, Clone, Copy)]
#[collect(no_drop)]
pub struct FunctionObject<'gc>(GcCell<'gc, FunctionObjectData<'gc>>);
#[derive(Collect, Debug, Clone)]
#[collect(no_drop)]
pub struct FunctionObjectData<'gc> {
/// Base script object
base: ScriptObjectData<'gc>,
/// Executable code
exec: Executable<'gc>,
/// Attached prototype (note: not the same thing as base object's proto)
prototype: Option<Object<'gc>>,
}
impl<'gc> FunctionObject<'gc> {
/// Construct a function from an ABC method and the current closure scope.
///
/// This associated constructor will also create and initialize an empty
/// `Object` prototype for the function.
pub fn from_function(
activation: &mut Activation<'_, 'gc, '_>,
method: Method<'gc>,
scope: ScopeChain<'gc>,
) -> Result<FunctionObject<'gc>, Error> {
let this = Self::from_method(activation, method, scope, None, None);
let es3_proto = ScriptObject::object(
activation.context.gc_context,
activation.avm2().prototypes().object,
);
this.0.write(activation.context.gc_context).prototype = Some(es3_proto);
Ok(this)
}
/// Construct a method from an ABC method and the current closure scope.
///
/// The given `reciever`, if supplied, will override any user-specified
/// `this` parameter.
pub fn from_method(
activation: &mut Activation<'_, 'gc, '_>,
method: Method<'gc>,
scope: ScopeChain<'gc>,
receiver: Option<Object<'gc>>,
subclass_object: Option<ClassObject<'gc>>,
) -> FunctionObject<'gc> {
let fn_proto = activation.avm2().prototypes().function;
let fn_class = activation.avm2().classes().function;
let exec = Executable::from_method(method, scope, receiver, subclass_object);
FunctionObject(GcCell::allocate(
activation.context.gc_context,
FunctionObjectData {
base: ScriptObjectData::base_new(Some(fn_proto), Some(fn_class)),
exec,
prototype: None,
},
))
}
pub fn prototype(&self) -> Option<Object<'gc>> {
self.0.read().prototype
}
pub fn set_prototype(&self, proto: Object<'gc>, mc: MutationContext<'gc, '_>) {
self.0.write(mc).prototype = Some(proto);
}
}
impl<'gc> TObject<'gc> for FunctionObject<'gc> {
fn base(&self) -> Ref<ScriptObjectData<'gc>> {
Ref::map(self.0.read(), |read| &read.base)
}
fn base_mut(&self, mc: MutationContext<'gc, '_>) -> RefMut<ScriptObjectData<'gc>> {
RefMut::map(self.0.write(mc), |write| &mut write.base)
}
fn as_ptr(&self) -> *const ObjectPtr {
self.0.as_ptr() as *const ObjectPtr
}
fn to_string(&self, _mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> {
Ok("function Function() {}".into())
}
fn to_locale_string(&self, mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> {
self.to_string(mc)
}
fn value_of(&self, _mc: MutationContext<'gc, '_>) -> Result<Value<'gc>, Error> {
Ok(Value::Object(Object::from(*self)))
}
fn as_executable(&self) -> Option<Ref<Executable<'gc>>> {
Some(Ref::map(self.0.read(), |r| &r.exec))
}
fn as_function_object(&self) -> Option<FunctionObject<'gc>> {
Some(*self)
}
fn call(
self,
receiver: Option<Object<'gc>>,
arguments: &[Value<'gc>],
activation: &mut Activation<'_, 'gc, '_>,
) -> Result<Value<'gc>, Error> {
self.0
.read()
.exec
.exec(receiver, arguments, activation, self.into())
}
fn construct(
self,
activation: &mut Activation<'_, 'gc, '_>,
arguments: &[Value<'gc>],
) -> Result<Object<'gc>, Error> {
let prototype = self.prototype().unwrap();
let instance = prototype.derive(activation)?;
self.call(Some(instance), arguments, activation)?;
Ok(instance)
}
fn derive(&self, activation: &mut Activation<'_, 'gc, '_>) -> Result<Object<'gc>, Error> {
let this: Object<'gc> = Object::FunctionObject(*self);
let base = ScriptObjectData::base_new(Some(this), None);
let exec = self.0.read().exec.clone();
Ok(FunctionObject(GcCell::allocate(
activation.context.gc_context,
// todo: should this be None?
FunctionObjectData {
base,
exec,
prototype: None,
},
))
.into())
}
}
| 31.582822 | 94 | 0.596348 |
bf80845d1705729eb8a23ea1af9828d401e5d85c
| 3,326 |
// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files
// DO NOT EDIT
use crate::Device;
use crate::Object;
use glib::object::ObjectType as ObjectType_;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
#[doc(alias = "NMDeviceBridge")]
pub struct DeviceBridge(Object<ffi::NMDeviceBridge, ffi::NMDeviceBridgeClass>) @extends Device, Object;
match fn {
type_ => || ffi::nm_device_bridge_get_type(),
}
}
impl DeviceBridge {
/// Whether the device has carrier.
///
/// # Returns
///
/// [`true`] if the device has carrier
#[doc(alias = "nm_device_bridge_get_carrier")]
#[doc(alias = "get_carrier")]
pub fn is_carrier(&self) -> bool {
unsafe { from_glib(ffi::nm_device_bridge_get_carrier(self.to_glib_none().0)) }
}
/// Gets the devices currently enslaved to `self`.
///
/// # Returns
///
/// the [`glib::PtrArray`][crate::glib::PtrArray] containing
/// `NMDevices` that are slaves of `self`. This is the internal
/// copy used by the device, and must not be modified.
#[doc(alias = "nm_device_bridge_get_slaves")]
#[doc(alias = "get_slaves")]
pub fn slaves(&self) -> Vec<Device> {
unsafe {
FromGlibPtrContainer::from_glib_none(ffi::nm_device_bridge_get_slaves(
self.to_glib_none().0,
))
}
}
#[doc(alias = "carrier")]
pub fn connect_carrier_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_carrier_trampoline<F: Fn(&DeviceBridge) + 'static>(
this: *mut ffi::NMDeviceBridge,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::carrier\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_carrier_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
#[doc(alias = "slaves")]
pub fn connect_slaves_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_slaves_trampoline<F: Fn(&DeviceBridge) + 'static>(
this: *mut ffi::NMDeviceBridge,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) {
let f: &F = &*(f as *const F);
f(&from_glib_borrow(this))
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::slaves\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_slaves_trampoline::<F> as *const (),
)),
Box_::into_raw(f),
)
}
}
}
impl fmt::Display for DeviceBridge {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("DeviceBridge")
}
}
| 31.67619 | 107 | 0.546603 |
ed76a610b3c9040c090807dd5a687998deb4bb6f
| 19,301 |
extern crate core;
pub mod iter;
use std::iter::Zip;
use std::slice;
use std::vec;
use std::ops::{Range, Index, IndexMut};
use core::ptr;
#[derive(Debug)]
pub struct Ommap<K, V> {
keys: Vec<K>,
values: Vec<V>,
}
impl<K: Ord, V> Ommap<K, V> {
/// Constructs a new, empty `Ommap<K, V>`.
pub fn new() -> Self {
Ommap {
keys: Vec::new(),
values: Vec::new(),
}
}
/// Get the index of a given key.
///
/// Returns `None` if there is no entry for the key.
#[inline]
fn index(&self, key: &K) -> Option<usize> {
if self.in_inner_bounds(key) {
return self.keys.binary_search(key).ok();
}
None
}
/// Get the first index associated with the given key next to the given index (inclusive).
#[inline]
fn first_index(&self, key: &K, index: usize) -> usize {
self.keys[..index].iter()
.rev()
.take_while(|&k| k == key)
.fold(index, |acc, _| acc - 1 )
}
/// Get the last index associated with the given key next to the given index (exclusive).
#[inline]
fn last_index_exclusive(&self, key: &K, index: usize) -> usize {
self.keys[index..].iter()
.take_while(|&k| k == key)
.fold(index, |acc, _| acc + 1 )
}
/// Get the range associated with the given key.
///
/// Returns `None` if there is no entry for the key.
#[inline]
fn range(&self, key: &K) -> Option<Range<usize>> {
if let Some(index) = self.index(key) {
return Some(Range {
start: self.first_index(key, index),
end: self.last_index_exclusive(key, index),
});
}
None
}
#[inline]
fn in_inner_bounds(&self, key: &K) -> bool {
if let Some(first) = self.keys.first() {
return first <= key && key <= self.keys.last().unwrap();
}
false
}
/// Returns the number of elements in the map.
#[inline]
pub fn len(&self) -> usize {
self.keys.len()
}
/// Returns `true` if the map contains no elements.
pub fn is_empty(&self) -> bool {
self.keys.is_empty()
}
/// Shortens the vector, keeping the first `len` elements and dropping
/// the rest.
///
/// If `len` is greater than the vector's current length, this has no
/// effect.
#[inline]
pub fn truncate(&mut self, len: usize) {
self.keys.truncate(len);
self.values.truncate(len);
}
/// Inserts an element into the map at the key's position maintaining sorted order.
///
/// If there is already an entry for that key (or multiple) it will be inserted right after
/// maintaining insertion order.
pub fn push(&mut self, key: K, value: V) {
if self.keys.is_empty() || *self.keys.last().unwrap() <= key {
self.keys.push(key);
self.values.push(value);
} else {
let index = match self.keys.binary_search(&key) {
Ok(index) => self.last_index_exclusive(&key, index),
Err(index) => index,
};
self.keys.insert(index, key);
self.values.insert(index, value);
}
}
pub fn pop(&mut self, key: &K) -> Option<V> {
if !self.in_inner_bounds(key) {
return None;
}
if self.keys.last().unwrap() == key {
self.keys.pop();
return self.values.pop();
}
let index = match self.keys.binary_search(key) {
Ok(index) => self.last_index_exclusive(key, index) - 1,
Err(_) => return None,
};
self.keys.remove(index);
Some(self.values.remove(index))
}
/// Returns the first value associated with the key, or None if it doesn't exist.
pub fn first(&self, key: &K) -> Option<&V> {
if let Some(index) = self.index(key) {
return Some(&self.values[self.first_index(key, index)]);
}
None
}
/// Returns the last value associated with the key, or None if it doesn't exist.
pub fn last(&self, key: &K) -> Option<&V> {
if let Some(index) = self.index(key) {
return Some(&self.values[self.last_index_exclusive(key, index) - 1]);
}
None
}
/// Removes all elements associated with the given key preserving sorted order.
///
/// Returns all removed elements if there where some otherwise `None`.
pub fn remove(&mut self, key: &K) -> Option<Vec<V>> {
if let Some(range) = self.range(key) {
self.keys.drain(range.clone());
return Some(self.values.drain(range).collect());
}
None
}
/// Removes all elements associated with the given keys preserving sorted order.
///
/// Assumes the given keys are in sorted order.
pub fn remove_multi(&mut self, keys: &[K]) {
if keys.is_empty() || !self.in_inner_bounds(keys.first().unwrap())
|| !self.in_inner_bounds(keys.last().unwrap())
{
return;
}
if let Some(start) = keys.iter()
.map(|key| (key, self.keys.binary_search(key)))
.find(|&(_,search_result)| search_result.is_ok())
.map(|(key,search_result)| self.first_index(key, search_result.ok().unwrap()))
{
let len = self.len();
let mut del = 0;
{
let mut iter = keys.iter().peekable();
for i in start..len {
while let Some(&k) = iter.peek() {
if *k < self.keys[i] {
iter.next();
} else {
break;
}
}
if iter.peek().is_some() && **iter.peek().unwrap() == self.keys[i] {
del += 1;
} else if del > 0 {
let j = i - del;
self.keys.swap(j, i);
self.values.swap(j, i);
}
}
}
if del > 0 {
self.truncate(len - del);
}
}
}
/// Collects each key(s) as a tuple of the keys last index and quantity in a `Vec`.
fn index_count(&self, elem: &[(K, V)]) -> Vec<(usize, usize)> {
let mut vec = Vec::new();
let mut iter = elem.iter().peekable();
let mut cnt = 1;
while let Some(key) = iter.next().map(|&(ref key,_)| key) {
if let Some(peek) = iter.peek().map(|&&(ref key,_)| key) {
if key == peek {
cnt += 1;
continue;
}
}
let index = match self.keys.binary_search(key) {
Ok(index) => self.last_index_exclusive(key, index),
Err(index) => index,
};
vec.push((index, cnt.clone()));
cnt = 1;
}
vec
}
/// Inserts all elements into the map at theirs key position maintaining sorted order.
///
/// If there is already an entry (or multiple) for any key the corresponding element
/// will be inserted right after maintaining insertion order.
pub fn insert_multi(&mut self, elem: Vec<(K, V)>) {
debug_assert!(is_sorted(&elem));
let len = self.len();
let elem_count = elem.len();
let new_len = len + elem_count;
self.keys.reserve_exact(elem_count);
self.values.reserve_exact(elem_count);
let mut index_count_iter = self.index_count(&elem).into_iter().rev();
let mut elem_iter = elem.into_iter().rev();
let mut remaining = elem_count as isize;
let mut end_index = len;
unsafe {
while let Some((index, index_count)) = index_count_iter.next() {
let key_ptr = self.keys.as_mut_ptr().offset(index as isize);
let value_ptr = self.values.as_mut_ptr().offset(index as isize);
if index < end_index {
let count = end_index - index;
ptr::copy(key_ptr, key_ptr.offset(remaining), count);
ptr::copy(value_ptr, value_ptr.offset(remaining), count);
end_index -= count;
}
for _ in 0..index_count {
remaining -= 1;
let (key, value) = elem_iter.next().unwrap();
ptr::write(key_ptr.offset(remaining), key);
ptr::write(value_ptr.offset(remaining), value);
}
}
self.keys.set_len(new_len);
self.values.set_len(new_len);
}
}
/// Gets all elements associated with the given key as `slice`.
///
/// If there isn't an entry for the given key the returned slice will be empty.
pub fn get<'a>(&'a self, key: &K) -> &'a [V] {
if self.values.is_empty() {
&self.values
} else if let Some(range) = self.range(key) {
&self.values[range]
} else {
&self.values[..0]
}
}
/// Gets all elements associated with the given key as mutable `slice`.
///
/// If there isn't an entry for the given key the returned slice will be empty.
pub fn get_mut<'a>(&'a mut self, key: &K) -> &'a mut [V] {
if self.values.is_empty() {
&mut self.values
} else if let Some(range) = self.range(key) {
&mut self.values[range]
} else {
&mut self.values[..0]
}
}
}
impl<K: Ord, V> From<Vec<(K, V)>> for Ommap<K, V> {
fn from(other: Vec<(K, V)>) -> Self {
debug_assert!(is_sorted(&other));
let (keys, values) = other.into_iter().unzip();
Ommap {
keys: keys,
values: values,
}
}
}
impl<K: Ord, V> Index<K> for Ommap<K,V> {
type Output = [V];
fn index<'a>(&'a self, key: K) -> &'a Self::Output {
self.get(&key)
}
}
impl<K: Ord, V> IndexMut<K> for Ommap<K,V> {
fn index_mut<'a>(&'a mut self, key: K) -> &'a mut Self::Output {
self.get_mut(&key)
}
}
fn is_sorted<K: Ord, V>(xs: &[(K, V)]) -> bool {
xs.windows(2).all(|xs| xs[0].0 <= xs[1].0)
}
/////////////////////////////////////
// Iterators
/////////////////////////////////////
impl<K, V> IntoIterator for Ommap<K, V> {
type Item = (K, V);
type IntoIter = Zip<vec::IntoIter<K>, vec::IntoIter<V>>;
fn into_iter(self) -> Self::IntoIter {
self.keys.into_iter().zip(self.values.into_iter())
}
}
impl<'a, K, V> IntoIterator for &'a Ommap<K, V> {
type Item = (&'a K, &'a V);
type IntoIter = Zip<slice::Iter<'a, K>, slice::Iter<'a, V>>;
fn into_iter(self) -> Self::IntoIter {
self.keys.iter().zip(self.values.iter())
}
}
impl<'a, K, V> IntoIterator for &'a mut Ommap<K, V> {
type Item = (&'a K, &'a mut V);
type IntoIter = Zip<slice::Iter<'a, K>, slice::IterMut<'a, V>>;
fn into_iter(self) -> Self::IntoIter {
self.keys.iter().zip(self.values.iter_mut())
}
}
impl<K, V> Ommap<K, V> {
pub fn iter<'a>(&'a self) -> Zip<slice::Iter<'a, K>, slice::Iter<'a, V>> {
self.into_iter()
}
pub fn iter_mut<'a>(&'a mut self) -> Zip<slice::Iter<'a, K>, slice::IterMut<'a, V>> {
self.into_iter()
}
pub fn keys<'a>(&'a self) -> slice::Iter<'a, K> {
self.keys.iter()
}
pub fn values<'a>(&'a self) -> slice::Iter<'a, V> {
self.values.iter()
}
pub fn values_mut<'a>(&'a mut self) -> slice::IterMut<'a, V> {
self.values.iter_mut()
}
}
/////////////////////////////////////
// Tests
/////////////////////////////////////
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn first_last() {
let map = Ommap::from(vec!((1, 0), (3, 1), (3, 2), (3, 3), (5, 0)));
assert_eq!(map.first(&1), Some(&0));
assert_eq!(map.first(&3), Some(&1));
assert_eq!(map.first(&5), Some(&0));
assert_eq!(map.last(&1), Some(&0));
assert_eq!(map.last(&3), Some(&3));
assert_eq!(map.last(&5), Some(&0));
}
#[test]
fn insert_multi() {
let mut map = Ommap::new();
map.insert_multi(Vec::new());
assert!(map.is_empty());
map.insert_multi(vec!((1, 1)));
{
let mut iter = map.iter();
assert_eq!(iter.next(), Some((&1, &1)));
assert_eq!(iter.next(), None);
}
map.insert_multi(vec!((1, 12), (2, 2), (3, 3)));
{
let mut iter = map.iter();
assert_eq!(iter.next(), Some((&1, &1)));
assert_eq!(iter.next(), Some((&1, &12)));
assert_eq!(iter.next(), Some((&2, &2)));
assert_eq!(iter.next(), Some((&3, &3)));
assert_eq!(iter.next(), None);
}
map.insert_multi(vec!((0, 0), (2, 22), (4, 4), (4, 42)));
let mut iter = map.iter();
assert_eq!(iter.next(), Some((&0, &0)));
assert_eq!(iter.next(), Some((&1, &1)));
assert_eq!(iter.next(), Some((&1, &12)));
assert_eq!(iter.next(), Some((&2, &2)));
assert_eq!(iter.next(), Some((&2, &22)));
assert_eq!(iter.next(), Some((&3, &3)));
assert_eq!(iter.next(), Some((&4, &4)));
assert_eq!(iter.next(), Some((&4, &42)));
assert_eq!(iter.next(), None);
}
#[test]
fn is_sorted() {
let mut xs = [(1,"zwei"),(3,"drei"),(0,"eins")];
assert!(!super::is_sorted(&xs));
xs.sort();
assert!(super::is_sorted(&xs));
}
#[test]
fn pop() {
let mut map = Ommap::new();
map.push(3, 3);
map.push(2, 2_1);
map.push(1, 1);
map.push(2, 2_2);
assert_eq!(map.pop(&1), Some(1));
assert_eq!(map.pop(&1), None);
assert_eq!(map.pop(&3), Some(3));
assert_eq!(map.pop(&3), None);
assert_eq!(map.pop(&2), Some(2_2));
assert_eq!(map.pop(&2), Some(2_1));
assert_eq!(map.pop(&2), None);
}
#[test]
fn get() {
let mut map = Ommap::new();
assert_eq!(map.get(&42), &[]);
map.push(3, 3);
map.push(2, 2_1);
map.push(1, 1);
map.push(2, 2_2);
map.push(4, 4);
map.push(2, 2_3);
let mut iter = map.get(&2).iter();
assert_eq!(iter.next(), Some(&2_1));
assert_eq!(iter.next(), Some(&2_2));
assert_eq!(iter.next(), Some(&2_3));
assert_eq!(iter.next(), None);
assert_eq!(map.get(&42), &[]);
}
#[test]
fn get_mut() {
let mut map = Ommap::new();
assert_eq!(map.get(&42), &mut []);
map.push(3, 3);
map.push(2, 2_1);
map.push(1, 1);
map.push(2, 2_2);
map.push(4, 4);
map.push(2, 2_3);
{
let mut iter = map.get_mut(&2).iter_mut();
assert_eq!(iter.next(), Some(&mut 2_1));
assert_eq!(iter.next(), Some(&mut 2_2));
assert_eq!(iter.next(), Some(&mut 2_3));
assert_eq!(iter.next(), None);
}
assert_eq!(map.get(&42), &mut []);
}
#[test]
fn index() {
let map = Ommap::from(vec!((1u8, 'a'), (1u8, 'b')));
assert_eq!(&map[1u8], &['a', 'b']);
assert_eq!(&map[2u8], &[]);
}
#[test]
fn index_mut() {
let mut map = Ommap::from(vec!((1u8, 'a'), (1u8, 'b')));
assert_eq!(&mut map[1u8], &mut ['a', 'b']);
assert_eq!(&mut map[2u8], &mut []);
}
#[test]
fn remove() {
let mut map = Ommap::new();
map.push(3, 3);
map.push(2, 2_1);
map.push(1, 1);
map.push(2, 2_2);
map.push(2, 2_3);
let v = map.remove(&2).unwrap();
let mut iter = v.iter();
assert_eq!(iter.next(), Some(&21));
assert_eq!(iter.next(), Some(&22));
assert_eq!(iter.next(), Some(&23));
assert_eq!(iter.next(), None);
let mut iter = map.iter();
assert_eq!(iter.next(), Some((&1, &1)));
assert_eq!(iter.next(), Some((&3, &3)));
assert_eq!(iter.next(), None);
}
#[test]
fn remove_multi() {
let mut map = Ommap::new();
map.push(3, 3);
map.push(4, 4_1);
map.push(2, 2_1);
map.push(5, 5);
map.push(4, 4_2);
map.push(1, 1);
map.push(2, 2_2);
map.push(2, 2_3);
map.remove_multi(&[2,4]);
let mut iter = map.iter();
assert_eq!(iter.next(), Some((&1, &1)));
assert_eq!(iter.next(), Some((&3, &3)));
assert_eq!(iter.next(), Some((&5, &5)));
assert_eq!(iter.next(), None);
}
//#[test]
fn remove_insert_on_heavy_load() {
let count = 1_000_000;
let mut map = Ommap::new();
let mut is = Vec::with_capacity(count);
let mut rs = Vec::with_capacity(count);
for i in 0..count {
is.push((i, i));
rs.push(i);
}
map.insert_multi(is);
assert_eq!(map.len(), count);
map.remove_multi(&rs);
assert_eq!(map.len(), 0);
}
#[test]
fn into_iter() {
let mut map = Ommap::new();
map.push(3, 'c');
map.push(1, 'a');
map.push(2, 'b');
let mut iter = map.into_iter();
assert_eq!(iter.next(), Some((1, 'a')));
assert_eq!(iter.next(), Some((2, 'b')));
assert_eq!(iter.next(), Some((3, 'c')));
}
#[test]
fn iter() {
let mut map = Ommap::new();
map.push(3, 'c');
map.push(2, 'b');
map.push(1, 'a');
let mut iter = map.iter();
assert_eq!(iter.next(), Some((&1, &'a')));
assert_eq!(iter.next(), Some((&2, &'b')));
assert_eq!(iter.next(), Some((&3, &'c')));
}
#[test]
fn iter_mut() {
let mut map = Ommap::new();
map.push(1, 'a');
map.push(3, 'c');
map.push(2, 'b');
let mut iter = map.iter_mut();
assert_eq!(iter.next(), Some((&1, &mut 'a')));
assert_eq!(iter.next(), Some((&2, &mut 'b')));
assert_eq!(iter.next(), Some((&3, &mut 'c')));
}
#[test]
fn values() {
let mut map = Ommap::new();
map.push(3, 'c');
map.push(2, 'b');
map.push(1, 'a');
let mut iter = map.values();
assert_eq!(iter.next(), Some(&'a'));
assert_eq!(iter.next(), Some(&'b'));
assert_eq!(iter.next(), Some(&'c'));
}
#[test]
fn values_mut() {
let mut map = Ommap::new();
map.push(3, 'c');
map.push(2, 'b');
map.push(1, 'a');
let mut iter = map.values_mut();
assert_eq!(iter.next(), Some(&mut 'a'));
assert_eq!(iter.next(), Some(&mut 'b'));
assert_eq!(iter.next(), Some(&mut 'c'));
}
#[test]
fn keys() {
let mut map = Ommap::new();
map.push(3, 'c');
map.push(2, 'b');
map.push(1, 'a');
let mut iter = map.keys();
assert_eq!(iter.next(), Some(&1));
assert_eq!(iter.next(), Some(&2));
assert_eq!(iter.next(), Some(&3));
}
}
| 29.199697 | 95 | 0.482877 |
7a3d90ab7d1a84be62aa71f3a53fccbd1e49111a
| 11,074 |
use std::fmt;
use std::future::Future;
use std::pin::Pin;
use std::sync::Arc;
use futures::channel::oneshot::Sender as SyncSender;
use crate::actor::{Actor, AsyncContext};
use crate::address::Addr;
use crate::context::Context;
use crate::fut::ActorFuture;
use crate::WrapFuture;
/// Describes how to handle messages of a specific type.
///
/// Implementing `Handler` is a general way to handle incoming
/// messages, streams, and futures.
///
/// The type `M` is a message which can be handled by the actor.
#[allow(unused_variables)]
pub trait Handler<M>
where
Self: Actor,
M: Message,
{
/// The type of value that this handler will return.
type Result: MessageResponse<Self, M>;
/// This method is called for every message received by this actor.
fn handle(&mut self, msg: M, ctx: &mut Self::Context) -> Self::Result;
}
/// Represent message that can be handled by an actor.
pub trait Message {
/// The type of value that this message will resolved with if it is
/// successful.
type Result: 'static;
}
/// Allow users to use `Arc<M>` as a message without having to re-impl `Message`
impl<M, R: 'static> Message for Arc<M>
where
M: Message<Result = R>,
{
type Result = R;
}
/// Allow users to use `Box<M>` as a message without having to re-impl `Message`
impl<M, R: 'static> Message for Box<M>
where
M: Message<Result = R>,
{
type Result = R;
}
/// A helper type that implements the `MessageResponse` trait.
pub struct MessageResult<M: Message>(pub M::Result);
/// A specialized actor future for asynchronous message handling.
pub type ResponseActFuture<A, I> = Box<dyn ActorFuture<Output = I, Actor = A>>;
/// A specialized future for asynchronous message handling.
pub type ResponseFuture<I> = Pin<Box<dyn Future<Output = I>>>;
/// A trait that defines a message response channel.
pub trait ResponseChannel<M: Message>: 'static {
fn is_canceled(&self) -> bool;
fn send(self, response: M::Result);
}
/// A trait which defines message responses.
pub trait MessageResponse<A: Actor, M: Message> {
fn handle<R: ResponseChannel<M>>(self, ctx: &mut A::Context, tx: Option<R>);
}
impl<M: Message + 'static> ResponseChannel<M> for SyncSender<M::Result>
where
M::Result: Send,
{
fn is_canceled(&self) -> bool {
SyncSender::is_canceled(self)
}
fn send(self, response: M::Result) {
let _ = Self::send(self, response);
}
}
impl<M: Message + 'static> ResponseChannel<M> for () {
fn is_canceled(&self) -> bool {
true
}
fn send(self, _: M::Result) {}
}
impl<A, M> MessageResponse<A, M> for MessageResult<M>
where
A: Actor,
M: Message,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
if let Some(tx) = tx {
tx.send(self.0);
}
}
}
impl<A, M, I: 'static, E: 'static> MessageResponse<A, M> for Result<I, E>
where
A: Actor,
M: Message<Result = Self>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
if let Some(tx) = tx {
tx.send(self);
}
}
}
impl<A, M, I: 'static> MessageResponse<A, M> for Arc<I>
where
A: Actor,
M: Message<Result = Arc<I>>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
if let Some(tx) = tx {
tx.send(self);
}
}
}
impl<A, M, I: 'static> MessageResponse<A, M> for Option<I>
where
A: Actor,
M: Message<Result = Self>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
if let Some(tx) = tx {
tx.send(self);
}
}
}
impl<A, M, B> MessageResponse<A, M> for Addr<B>
where
A: Actor,
M: Message<Result = Self>,
B: Actor<Context = Context<B>>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
if let Some(tx) = tx {
tx.send(self);
}
}
}
impl<A, M, I: 'static, E: 'static> MessageResponse<A, M>
for ResponseActFuture<A, Result<I, E>>
where
A: Actor,
M: Message<Result = Result<I, E>>,
A::Context: AsyncContext<A>,
{
fn handle<R: ResponseChannel<M>>(self, ctx: &mut A::Context, tx: Option<R>) {
ctx.spawn(self.then(move |res, this, _| {
if let Some(tx) = tx {
tx.send(res);
}
async {}.into_actor(this)
}));
}
}
/// MessageResponse trait impl to enbale the use of any I: 'static with Actor Handlers
/// Usage with Result<I,E>:
/// ```
/// # pub struct MyActorAsync {}
/// # impl Actor for MyActorAsync { type Context = actix::Context<Self>; }
/// # use actix::prelude::*;
/// # use core::pin::Pin;
///
/// pub struct MyQuestion{}
/// impl Message for MyQuestion {
/// type Result = Result<u8,u8>;
/// }
/// impl Handler<MyQuestion> for MyActorAsync {
/// type Result = Pin<Box<dyn std::future::Future<Output = Result<u8,u8> >>>;
/// fn handle(&mut self, question: MyQuestion, _ctx: &mut Context<Self>) -> Self::Result {
/// Box::pin(async {Ok(5)})
/// }
/// }
/// ```
/// Usage with Option<I>:
/// ```
/// # pub struct MyActorAsync {}
/// # impl Actor for MyActorAsync { type Context = actix::Context<Self>; }
/// # use actix::prelude::*;
/// # use core::pin::Pin;
/// pub struct MyQuestion{}
/// impl Message for MyQuestion {
/// type Result = Option<u8>;
/// }
/// impl Handler<MyQuestion> for MyActorAsync {
/// type Result = Pin<Box<dyn std::future::Future<Output = Option<u8>>>>;
/// fn handle(&mut self, question: MyQuestion, _ctx: &mut Context<Self>) -> Self::Result {
/// Box::pin(async {Some(5)})
/// }
/// }
/// ```
/// Usage with any I: 'static
/// ```
/// # pub struct MyActorAsync {}
/// # impl Actor for MyActorAsync { type Context = actix::Context<Self>; }
/// # use actix::prelude::*;
/// # use core::pin::Pin;
/// pub struct MyQuestion{}
/// impl Message for MyQuestion {
/// type Result = u8;
/// }
/// impl Handler<MyQuestion> for MyActorAsync {
/// type Result = Pin<Box<dyn std::future::Future<Output = u8>>>;
/// fn handle(&mut self, question: MyQuestion, _ctx: &mut Context<Self>) -> Self::Result {
/// Box::pin(async {5})
/// }
/// }
/// ```
impl<A, M, I: 'static> MessageResponse<A, M> for ResponseFuture<I>
where
A: Actor,
M::Result: Send,
M: Message<Result = I>,
A::Context: AsyncContext<A>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
actix_rt::spawn(async move {
if let Some(tx) = tx {
tx.send(self.await)
}
});
}
}
enum ResponseTypeItem<I, E> {
Result(Result<I, E>),
Fut(Box<dyn Future<Output = Result<I, E>> + Unpin>),
}
/// Helper type for representing different type of message responses
pub struct Response<I, E> {
item: ResponseTypeItem<I, E>,
}
impl<I, E> fmt::Debug for Response<I, E> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut fmt = fmt.debug_struct("Response");
match self.item {
ResponseTypeItem::Result(_) => fmt.field("item", &"Result(_)".to_string()),
ResponseTypeItem::Fut(_) => fmt.field("item", &"Fut(_)".to_string()),
}
.finish()
}
}
impl<I, E> Response<I, E> {
/// Creates an asynchronous response.
pub fn fut<T>(fut: T) -> Self
where
T: Future<Output = Result<I, E>> + Unpin + 'static,
{
Self {
item: ResponseTypeItem::Fut(Box::new(fut)),
}
}
/// Creates a response.
pub fn reply(val: Result<I, E>) -> Self {
Self {
item: ResponseTypeItem::Result(val),
}
}
}
impl<A, M, I: 'static, E: 'static> MessageResponse<A, M> for Response<I, E>
where
A: Actor,
M: Message<Result = Result<I, E>>,
A::Context: AsyncContext<A>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
match self.item {
ResponseTypeItem::Fut(fut) => {
actix_rt::spawn(async move {
if let Some(tx) = tx {
tx.send(fut.await);
}
});
}
ResponseTypeItem::Result(res) => {
if let Some(tx) = tx {
tx.send(res);
}
}
}
}
}
enum ActorResponseTypeItem<A, I, E> {
Result(Result<I, E>),
Fut(Box<dyn ActorFuture<Output = Result<I, E>, Actor = A>>),
}
/// A helper type for representing different types of message responses.
pub struct ActorResponse<A, I, E> {
item: ActorResponseTypeItem<A, I, E>,
}
impl<A, I, E> fmt::Debug for ActorResponse<A, I, E> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut fmt = fmt.debug_struct("ActorResponse");
match self.item {
ActorResponseTypeItem::Result(_) => {
fmt.field("item", &"Result(_)".to_string())
}
ActorResponseTypeItem::Fut(_) => fmt.field("item", &"Fut(_)".to_string()),
}
.finish()
}
}
impl<A: Actor, I, E> ActorResponse<A, I, E> {
/// Creates a response.
pub fn reply(val: Result<I, E>) -> Self {
Self {
item: ActorResponseTypeItem::Result(val),
}
}
/// Creates an asynchronous response.
pub fn r#async<T>(fut: T) -> Self
where
T: ActorFuture<Output = Result<I, E>, Actor = A> + 'static,
{
Self {
item: ActorResponseTypeItem::Fut(Box::new(fut)),
}
}
}
impl<A, M, I: 'static, E: 'static> MessageResponse<A, M> for ActorResponse<A, I, E>
where
A: Actor,
M: Message<Result = Result<I, E>>,
A::Context: AsyncContext<A>,
{
fn handle<R: ResponseChannel<M>>(self, ctx: &mut A::Context, tx: Option<R>) {
match self.item {
ActorResponseTypeItem::Fut(fut) => {
let fut = fut.then(move |res, this, _| {
if let Some(tx) = tx {
tx.send(res)
}
async {}.into_actor(this)
});
ctx.spawn(fut);
}
ActorResponseTypeItem::Result(res) => {
if let Some(tx) = tx {
tx.send(res);
}
}
}
}
}
macro_rules! SIMPLE_RESULT {
($type:ty) => {
impl<A, M> MessageResponse<A, M> for $type
where
A: Actor,
M: Message<Result = $type>,
{
fn handle<R: ResponseChannel<M>>(self, _: &mut A::Context, tx: Option<R>) {
if let Some(tx) = tx {
tx.send(self);
}
}
}
};
}
SIMPLE_RESULT!(());
SIMPLE_RESULT!(u8);
SIMPLE_RESULT!(u16);
SIMPLE_RESULT!(u32);
SIMPLE_RESULT!(u64);
SIMPLE_RESULT!(usize);
SIMPLE_RESULT!(i8);
SIMPLE_RESULT!(i16);
SIMPLE_RESULT!(i32);
SIMPLE_RESULT!(i64);
SIMPLE_RESULT!(isize);
SIMPLE_RESULT!(f32);
SIMPLE_RESULT!(f64);
SIMPLE_RESULT!(String);
SIMPLE_RESULT!(bool);
| 27.142157 | 94 | 0.563663 |
8aa0ccd6066cf14dfe0549d2448457c25358a40c
| 2,417 |
//! A collection of node-specific RPC methods.
//! Substrate provides the `sc-rpc` crate, which defines the core RPC layer
//! used by Substrate nodes. This file extends those RPC definitions with
//! capabilities that are specific to this project's runtime configuration.
#![warn(missing_docs)]
use std::sync::Arc;
use projekt_blakchain::{opaque::Block, AccountId, Balance, Index, BlockNumber};
/// Smart Contract RPC
use pallet_contracts_rpc::{Contracts, ContractsApi};
use sp_api::ProvideRuntimeApi;
use sp_blockchain::{Error as BlockChainError, HeaderMetadata, HeaderBackend};
use sp_block_builder::BlockBuilder;
pub use sc_rpc_api::DenyUnsafe;
use sp_transaction_pool::TransactionPool;
/// Full client dependencies.
pub struct FullDeps<C, P> {
/// The client instance to use.
pub client: Arc<C>,
/// Transaction pool instance.
pub pool: Arc<P>,
/// Whether to deny unsafe calls
pub deny_unsafe: DenyUnsafe,
}
/// Instantiate all full RPC extensions.
pub fn create_full<C, P>(
deps: FullDeps<C, P>,
) -> jsonrpc_core::IoHandler<sc_rpc::Metadata> where
C: ProvideRuntimeApi<Block>,
C: HeaderBackend<Block> + HeaderMetadata<Block, Error=BlockChainError> + 'static,
C: Send + Sync + 'static,
C::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,
C::Api: pallet_contracts_rpc::ContractsRuntimeApi<Block, AccountId, Balance, BlockNumber>,
C::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>,
C::Api: BlockBuilder<Block>,
P: TransactionPool + 'static,
{
use substrate_frame_rpc_system::{FullSystem, SystemApi};
use pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi};
let mut io = jsonrpc_core::IoHandler::default();
let FullDeps {
client,
pool,
deny_unsafe,
} = deps;
io.extend_with(
SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe))
);
io.extend_with(
TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone()))
);
// Extend this RPC with a custom API by using the following syntax.
// `YourRpcStruct` should have a reference to a client, which is needed
// to call into the runtime.
// `io.extend_with(YourRpcTrait::to_delegate(YourRpcStruct::new(ReferenceToClient, ...)));`
// Contracts RPC API extension
// Contracts RPC API extension
io.extend_with(
ContractsApi::to_delegate(Contracts::new(client.clone()))
);
io
}
| 31.802632 | 92 | 0.750103 |
16cb251f2a02bbb3a3d303ad4676c4dab7419abf
| 848 |
extern crate clap;
extern crate xor_utils;
use clap::{App, Arg};
use std::io;
use std::io::{Write};
fn main() {
let matches = App::new("xor-genkeys")
.version("0.2.0")
.author("Gavyn Riebau")
.about("Generates sets of ascii values that can be used as guessed keys when decrypting xor encrypted content")
.arg(Arg::with_name("LENGTH")
.help("The assumed key length")
.default_value("1")
.takes_value(true))
.get_matches();
let length_str = matches.value_of("LENGTH").unwrap();
let length = length_str.parse::<u32>().expect("Failed to parse LENGTH into u32. LENGTH must be numeric");
let keys = xor_utils::gen_ascii_keys(length);
for key in keys {
println!("{}", key);
}
io::stdout().flush().expect("Failed to flush stdout.");
}
| 28.266667 | 119 | 0.608491 |
db8daa7c34b848a88beba564963b5d4810108b2e
| 2,736 |
use std::result::Result;
use backend::Backend;
use backend::command::Command;
use provider::error::Error;
use provider::Output;
use provider::package::shell::ShellProvider;
#[derive(Clone, Debug)]
pub struct Apt;
impl ShellProvider for Apt {
fn is_installed(&self,
name: &str,
version: Option<&str>,
b: &Backend)
-> Result<Output, Error> {
let c = match version {
Some(v) => {
let mut c =
Command::new(&format!("dpkg-query -f '${{Status}} ${{Version}}' -W {}", name));
c.pipe(&format!("grep -E '^(install|hold) ok installed {}$'", v));
c
}
None => {
let mut c = Command::new(&format!("dpkg-query -f '${{Status}}' -W {}", name));
c.pipe("grep -E '^(install|hold) ok installed$'");
c
}
};
let success = match b.run_command(c) {
Ok(r) => r.success,
Err(_) => false,
};
Ok(Output::Bool(success))
}
fn version(&self, name: &str, version: Option<&str>, b: &Backend) -> Result<Output, Error> {
let v = match version {
Some(v) => v.to_owned(),
None => {
let mut c =
Command::new(&format!("dpkg-query -f '${{Status}} ${{Version}}' -W {}", name));
c.pipe("sed -n 's/^install ok installed //p'");
let res = try!(b.run_command(c));
res.stdout
}
};
Ok(Output::Text(v))
}
fn install(&self, name: &str, version: Option<&str>, b: &Backend) -> Result<Output, Error> {
let package = match version {
Some(v) => [name, v].join("="),
None => name.to_owned(),
};
let c = Command::new(&format!("DEBIAN_FRONTEND='noninteractive' apt-get -y -o \
Dpkg::Options::='--force-confdef' -o \
Dpkg::Options::='--force-confold' install {}",
package));
let res = try!(b.run_command(c));
Ok(Output::Bool(res.success))
}
fn remove(&self, name: &str, _version: Option<&str>, b: &Backend) -> Result<Output, Error> {
let c = Command::new(&format!("DEBIAN_FRONTEND='noninteractive' apt-get -y remove {}",
name));
let success = match b.run_command(c) {
Ok(r) => r.success,
Err(_) => false,
};
Ok(Output::Bool(success))
}
fn box_clone(&self) -> Box<ShellProvider> {
Box::new((*self).clone())
}
}
| 33.777778 | 99 | 0.454678 |
ab0d22940e968a190a84576eb30bea03a89ce3af
| 3,865 |
//! A trait used to interact with the internal state of nodes within the [`Bracket`]
//!
//! [`Bracket`]: crate::bracket::Bracket
use crate::error::Error;
use anyhow::Context;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
/// An enum used to control the state of a [`GeneticNode`]
///
/// [`GeneticNode`]: crate::bracket::genetic_node
#[derive(Debug, Serialize, Deserialize, PartialEq, Clone, Copy)]
pub enum GeneticState {
/// The node and it's data have not finished initializing
Initialize,
/// The node is currently simulating a round against target data to determine the fitness of the population
Simulate,
/// The node is currently mutating members of it's population and breeding new members
Mutate,
/// The node has finished processing for a given number of iterations
Finish,
}
/// A trait used to interact with the internal state of nodes within the [`Bracket`]
///
/// [`Bracket`]: crate::bracket::Bracket
pub trait GeneticNode {
/// Initializes a new instance of a [`GeneticState`].
///
/// # Examples
/// TODO
fn initialize() -> Result<Box<Self>, Error>;
fn simulate(&mut self) -> Result<(), Error>;
/// Mutates members in a population and/or crossbreeds them to produce new offspring.
///
/// # Examples
/// TODO
fn mutate(&mut self) -> Result<(), Error>;
fn merge(left: &Self, right: &Self) -> Result<Box<Self>, Error>;
}
/// Used externally to wrap a node implementing the [`GeneticNode`] trait. Processes state transitions for the given node as
/// well as signal recovery. Transition states are given by [`GeneticState`]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct GeneticNodeWrapper<T>
where
T: Clone,
{
pub node: Option<T>,
state: GeneticState,
generation: u64,
pub total_generations: u64,
id: uuid::Uuid,
}
impl<T> GeneticNodeWrapper<T>
where
T: GeneticNode + Debug + Clone,
{
pub fn get_id(&self) -> uuid::Uuid {
self.id
}
pub fn new(total_generations: u64) -> Self {
GeneticNodeWrapper {
node: None,
state: GeneticState::Initialize,
generation: 0,
total_generations,
id: uuid::Uuid::new_v4(),
}
}
pub fn from(data: T, total_generations: u64, id: uuid::Uuid) -> Self {
GeneticNodeWrapper {
node: Some(data),
state: GeneticState::Simulate,
generation: 0,
total_generations,
id,
}
}
pub fn state(&self) -> &GeneticState {
&self.state
}
pub fn process_node(&mut self) -> Result<GeneticState, Error> {
match (&self.state, &self.node) {
(GeneticState::Initialize, _) => {
self.node = Some(*T::initialize()?);
self.state = GeneticState::Simulate;
}
(GeneticState::Simulate, Some(_)) => {
self.node
.as_mut()
.unwrap()
.simulate()
.with_context(|| format!("Error simulating node: {:?}", self))?;
self.state = if self.generation >= self.total_generations {
GeneticState::Finish
} else {
GeneticState::Mutate
};
}
(GeneticState::Mutate, Some(_)) => {
self.node
.as_mut()
.unwrap()
.mutate()
.with_context(|| format!("Error mutating node: {:?}", self))?;
self.generation += 1;
self.state = GeneticState::Simulate;
}
(GeneticState::Finish, Some(_)) => (),
_ => panic!("Error processing node {:?}", self.node),
}
Ok(self.state)
}
}
| 29.96124 | 124 | 0.560414 |
ff11c878673b09f6f226e1eeb4e2381ec687da41
| 3,865 |
// This file is part of Sulis, a turn based RPG written in Rust.
// Copyright 2019 Jared Stephen
//
// Sulis is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Sulis is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Sulis. If not, see <http://www.gnu.org/licenses/>
use crate::{EntityState, PropState};
use sulis_core::util::Point;
use sulis_module::area::Transition;
pub trait Locatable {
fn size(&self) -> (f32, f32);
fn pos(&self) -> (f32, f32);
}
impl Locatable for EntityState {
fn size(&self) -> (f32, f32) {
(self.size.width as f32, self.size.height as f32)
}
fn pos(&self) -> (f32, f32) {
(self.location.x as f32, self.location.y as f32)
}
}
impl Locatable for PropState {
fn size(&self) -> (f32, f32) {
(self.prop.size.width as f32, self.prop.size.height as f32)
}
fn pos(&self) -> (f32, f32) {
(self.location.x as f32, self.location.y as f32)
}
}
impl Locatable for Transition {
fn size(&self) -> (f32, f32) {
(self.size.width as f32, self.size.height as f32)
}
fn pos(&self) -> (f32, f32) {
(self.from.x as f32, self.from.y as f32)
}
}
impl Locatable for Point {
fn size(&self) -> (f32, f32) {
(1.0, 1.0)
}
fn pos(&self) -> (f32, f32) {
(self.x as f32, self.y as f32)
}
}
impl Locatable for (f32, f32) {
fn size(&self) -> (f32, f32) {
(1.0, 1.0)
}
fn pos(&self) -> (f32, f32) {
(self.0, self.1)
}
}
pub fn center_i32<T: Locatable>(target: &T) -> (i32, i32) {
let (x, y) = center(target);
(x as i32, y as i32)
}
pub fn center<T: Locatable>(target: &T) -> (f32, f32) {
let (x, y) = target.pos();
let (w, h) = target.size();
(x + w / 2.0, y + h / 2.0)
}
pub fn dist(parent: &impl Locatable, target: &impl Locatable) -> f32 {
let (cx, cy) = center(parent);
let (tx, ty) = center(target);
let (w, h) = target.size();
// closest distance from cx, cy to axis aligned rect centered
// on tx, ty
let mut dx = (cx - tx).abs() - w / 2.0;
let mut dy = (cy - ty).abs() - h / 2.0;
if dx < 0.0 {
dx = 0.0;
}
if dy < 0.0 {
dy = 0.0;
}
dx.hypot(dy)
}
pub fn is_within(parent: &impl Locatable, target: &impl Locatable, max_dist: f32) -> bool {
dist(parent, target) <= max_dist
}
pub fn is_within_attack_dist<T: Locatable>(parent: &EntityState, target: &T) -> bool {
let dist = parent.actor.stats.attack_distance();
is_within(parent, target, dist)
}
pub fn is_within_touch_dist<T: Locatable>(parent: &EntityState, target: &T) -> bool {
let dist = parent.actor.stats.touch_distance();
is_within(parent, target, dist)
}
pub fn is_threat(attacker: &EntityState, defender: &EntityState) -> bool {
let a = attacker;
let d = defender;
if !a.actor.stats.attack_is_melee() || a.actor.stats.attack_disabled || a.actor.is_dead() {
return false;
}
if !a.is_hostile(d) {
return false;
}
is_within_attack_dist(a, d)
}
pub fn can_attack(attacker: &EntityState, defender: &EntityState) -> bool {
let a = attacker;
let d = defender;
if !a.actor.has_ap_to_attack() || a.actor.stats.attack_disabled || a.actor.is_dead() {
return false;
}
if !a.is_hostile(d) || d.actor.stats.hidden {
return false;
}
is_within_attack_dist(a, d)
}
| 25.427632 | 95 | 0.602587 |
c1935f17fd1239926d1f461720e71dd258ee89ca
| 21,673 |
//
//! Copyright 2020 Alibaba Group Holding Limited.
//!
//! Licensed under the Apache License, Version 2.0 (the "License");
//! you may not use this file except in compliance with the License.
//! You may obtain a copy of the License at
//!
//! http://www.apache.org/licenses/LICENSE-2.0
//!
//! Unless required by applicable law or agreed to in writing, software
//! distributed under the License is distributed on an "AS IS" BASIS,
//! WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//! See the License for the specific language governing permissions and
//! limitations under the License.
use maxgraph_store::db::api::{Vertex, Edge, GraphStorage, GraphResult, EdgeResultIter, PropIter, ValueRef, ValueType, EdgeDirection, VertexWrapper, VertexResultIter, GraphConfigBuilder, GraphConfig, GraphDef};
use std::sync::Arc;
use maxgraph_store::api::{GlobalGraphQuery, SnapshotId, PartitionVertexIds, LabelId, Condition, PropId, VertexId, PartitionId, PartitionLabeledVertexIds};
use maxgraph_store::db::graph::vertex::VertexImpl;
use maxgraph_store::db::graph::edge::EdgeImpl;
use store::v2::edge_iterator::EdgeIterator;
use store::{LocalStoreVertex, LocalStoreEdge};
use maxgraph_store::api::graph_schema::Schema;
use std::collections::{HashMap, HashSet};
use std::vec::IntoIter;
use maxgraph_store::api::prelude::Property;
use std::iter::FromIterator;
use itertools::Itertools;
use maxgraph_store::api::graph_partition::GraphPartitionManager;
use maxgraph_store::config::StoreConfig;
use maxgraph_store::db::graph::store::GraphStore;
use store::v2::global_graph_schema::GlobalGraphSchema;
pub struct GlobalGraph {
graph_partitions: HashMap<PartitionId, Arc<GraphStore>>,
total_partition: u32,
partition_to_server: HashMap<PartitionId, u32>,
}
unsafe impl Send for GlobalGraph {}
unsafe impl Sync for GlobalGraph {}
impl GlobalGraph {
pub fn empty(total_partition: u32) -> Self {
GlobalGraph {
graph_partitions: HashMap::new(),
total_partition,
partition_to_server: HashMap::new(),
}
}
pub fn add_partition(&mut self, partition_id: PartitionId, graph_store: Arc<GraphStore>) {
self.graph_partitions.insert(partition_id, graph_store);
}
pub fn update_partition_routing(&mut self, partition_id: PartitionId, server_id: u32) {
self.partition_to_server.insert(partition_id, server_id);
}
fn convert_label_id(label_id: Option<LabelId>) -> Option<i32> {
match label_id {
None => {None},
Some(u_label_id) => { Some(u_label_id as i32) },
}
}
fn convert_condition(condition: Option<&Condition>) -> Option<Arc<maxgraph_store::db::api::Condition>> {
match condition {
None => {None},
Some(_) => {unimplemented!()},
}
}
fn parse_vertex<V: Vertex>(vertex_wrapper: V, output_prop_ids: Option<&Vec<PropId>>)
-> LocalStoreVertex {
let mut vertex = LocalStoreVertex::new(vertex_wrapper.get_id(), vertex_wrapper.get_label() as u32);
let mut property_iter = vertex_wrapper.get_properties_iter();
let prop_set;
let prop_filter = if let Some(prop_ids) = output_prop_ids {
prop_set = HashSet::<&u32>::from_iter(prop_ids);
Some(&prop_set)
} else {
None
};
while let Some((property_id, value_ref)) = property_iter.next() {
if let Some(filter) = prop_filter {
if !filter.contains(&(property_id as u32)) {
continue;
}
}
vertex.add_property(property_id as u32, Self::parse_val_ref(value_ref).unwrap());
}
vertex
}
fn parse_edge<EE: Edge>(item: EE, output_prop_ids: Option<&Vec<PropId>>) -> LocalStoreEdge {
let src_vertex = LocalStoreVertex::new(item.get_src_id(), item.get_kind().src_vertex_label_id as u32);
let dst_vertex = LocalStoreVertex::new(item.get_dst_id(), item.get_kind().dst_vertex_label_id as u32);
let mut edge = LocalStoreEdge::new(src_vertex, dst_vertex, item.get_kind().edge_label_id as u32, item.get_id().inner_id);
let mut property_iter = item.get_properties_iter();
let prop_set;
let prop_filter = if let Some(prop_ids) = output_prop_ids {
prop_set = HashSet::<&u32>::from_iter(prop_ids);
Some(&prop_set)
} else {
None
};
while let Some((property_id, value_ref)) = property_iter.next() {
if let Some(filter) = prop_filter {
if !filter.contains(&(property_id as u32)) {
continue;
}
}
edge.add_property(property_id as u32, Self::parse_val_ref(value_ref).unwrap());
}
return edge;
}
fn parse_val_ref(val_ref: ValueRef) -> GraphResult<Property> {
let p = match val_ref.get_type() {
ValueType::Bool => {
Property::Bool(val_ref.get_bool()?)
},
ValueType::Char => {
Property::Char(val_ref.get_char()?)
},
ValueType::Short => {
Property::Short(val_ref.get_short()?)
},
ValueType::Int => {
Property::Int(val_ref.get_int()?)
},
ValueType::Long => {
Property::Long(val_ref.get_long()?)
},
ValueType::Float => {
Property::Float(val_ref.get_float()?)
},
ValueType::Double => {
Property::Double(val_ref.get_double()?)
},
ValueType::String => {
Property::String(String::from(val_ref.get_str()?))
},
ValueType::Bytes => {
Property::Bytes(Vec::from(val_ref.get_bytes()?))
},
ValueType::IntList => {
Property::ListInt(val_ref.get_int_list()?.iter().collect())
},
ValueType::LongList => {
Property::ListLong(val_ref.get_long_list()?.iter().collect())
},
ValueType::FloatList => {
Property::ListFloat(val_ref.get_float_list()?.iter().collect())
},
ValueType::DoubleList => {
Property::ListDouble(val_ref.get_double_list()?.iter().collect())
},
ValueType::StringList => {
Property::ListString(val_ref.get_str_list()?.iter().map(String::from).collect())
},
};
Ok(p)
}
fn get_edges_iter<'a>(&'a self, si: SnapshotId, partition_id: PartitionId, src_id: VertexId, label: Option<LabelId>,
condition: Option<&Condition>, direction: EdgeDirection) -> GraphResult<Option<Box<dyn EdgeResultIter<E=EdgeImpl> + 'a>>> {
Ok(match self.graph_partitions.get(&partition_id) {
None => {
None
},
Some(partition) => {
match direction {
EdgeDirection::In => {
Some(partition.get_in_edges(si, src_id, Self::convert_label_id(label), Self::convert_condition(condition))?)
},
EdgeDirection::Out => {
Some(partition.get_out_edges(si, src_id, Self::convert_label_id(label), Self::convert_condition(condition))?)
},
EdgeDirection::Both => {
unimplemented!()
},
}
},
})
}
fn get_vertex(&self, si: SnapshotId, partition_id: PartitionId, id: VertexId, label_id: Option<LabelId>)
-> GraphResult<Option<VertexWrapper<VertexImpl>>> {
Ok(match self.graph_partitions.get(&partition_id) {
None => {
None
},
Some(partition) => {
partition.get_vertex(si, id, Self::convert_label_id(label_id))?
},
})
}
fn get_edge_iter_vec<'a>(&'a self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>,
condition: Option<&Condition>, direction: EdgeDirection)
-> GraphResult<Vec<(i64, Vec<Box<dyn EdgeResultIter<E=EdgeImpl> + 'a>>)>> {
let mut res = vec![];
for (partition_id, vertex_ids) in src_ids {
for vertex_id in vertex_ids {
let mut edge_iters = vec![];
if edge_labels.is_empty() {
if let Some(iter_res) = self.get_edges_iter(si, partition_id, vertex_id, None, condition, direction)? {
edge_iters.push(iter_res);
};
} else {
for label_id in edge_labels {
if let Some(iter_res) = self.get_edges_iter(si, partition_id, vertex_id, Some(*label_id), condition, direction)? {
edge_iters.push(iter_res);
}
}
}
res.push((vertex_id, edge_iters));
}
}
Ok(res)
}
fn scan_vertex_iter<'a>(&'a self, si: SnapshotId, partition_id: PartitionId, label: Option<LabelId>, condition: Option<&Condition>)
-> GraphResult<Option<Box<dyn VertexResultIter<V=VertexImpl> + 'a>>> {
Ok(match self.graph_partitions.get(&partition_id) {
None => {
None
},
Some(partition) => {
Some(partition.query_vertices(si, Self::convert_label_id(label), Self::convert_condition(condition))?)
},
})
}
fn scan_vertex_iter_vec<'a>(&'a self, si: SnapshotId, labels: &Vec<LabelId>, partitions: &Vec<PartitionId>, condition: Option<&Condition>)
-> GraphResult<Vec<Box<dyn VertexResultIter<V=VertexImpl> + 'a>>> {
let mut res = vec![];
let partition_ids = if partitions.is_empty() {
self.graph_partitions.keys().map(|x| *x).collect_vec()
} else {
partitions.clone()
};
for partition_id in partition_ids {
if labels.is_empty() {
if let Some(iter) = self.scan_vertex_iter(si, partition_id, None, condition)? {
res.push(iter);
}
} else {
for label_id in labels {
if let Some(iter) = self.scan_vertex_iter(si, partition_id, Some(*label_id), condition)? {
res.push(iter);
}
}
}
}
Ok(res)
}
fn scan_edge_iter<'a>(&'a self, si: SnapshotId, partition_id: PartitionId, label: Option<LabelId>, condition: Option<&Condition>)
-> GraphResult<Option<Box<dyn EdgeResultIter<E=EdgeImpl> + 'a>>> {
Ok(match self.graph_partitions.get(&partition_id) {
None => {
None
},
Some(partition) => {
Some(partition.query_edges(si, Self::convert_label_id(label), Self::convert_condition(condition))?)
},
})
}
fn scan_edge_iter_vec<'a>(&'a self, si: SnapshotId, labels: &Vec<LabelId>, partitions: &Vec<PartitionId>, condition: Option<&Condition>)
-> GraphResult<Vec<Box<dyn EdgeResultIter<E=EdgeImpl> + 'a>>> {
let mut res = vec![];
let partition_ids = if partitions.is_empty() {
self.graph_partitions.keys().map(|x| *x).collect_vec()
} else {
partitions.clone()
};
for partition_id in partition_ids {
if labels.is_empty() {
if let Some(iter) = self.scan_edge_iter(si, partition_id, None, condition)? {
res.push(iter);
}
} else {
for label_id in labels {
if let Some(iter) = self.scan_edge_iter(si, partition_id, Some(*label_id), condition)? {
res.push(iter);
}
}
}
}
Ok(res)
}
fn get_limit(raw_limit: usize) -> usize {
if raw_limit > 0 {
raw_limit
} else {
usize::max_value()
}
}
}
impl GlobalGraphQuery for GlobalGraph {
type V = LocalStoreVertex;
type E = LocalStoreEdge;
type VI = IntoIter<LocalStoreVertex>;
type EI = IntoIter<LocalStoreEdge>;
fn get_out_vertex_ids(&self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>, condition: Option<&Condition>,
dedup_prop_ids: Option<&Vec<PropId>>, limit: usize) -> Box<dyn Iterator<Item=(VertexId, Self::VI)>> {
let res = self.get_edge_iter_vec(si, src_ids, edge_labels, condition, EdgeDirection::Out).unwrap();
Box::new(
res.into_iter().map(|(vertex_id, edge_iter_vec)|
(
vertex_id,
EdgeIterator::new(&edge_iter_vec)
.map(|item| LocalStoreVertex::new(item.get_dst_id(), item.get_kind().dst_vertex_label_id as u32))
.take(Self::get_limit(limit))
.collect::<Vec<LocalStoreVertex>>().into_iter()
)
).collect::<Vec<(i64, IntoIter<LocalStoreVertex>)>>().into_iter()
)
}
fn get_out_edges(&self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>, condition: Option<&Condition>,
dedup_prop_ids: Option<&Vec<PropId>>, output_prop_ids: Option<&Vec<PropId>>, limit: usize) -> Box<dyn Iterator<Item=(VertexId, Self::EI)>> {
let res = self.get_edge_iter_vec(si, src_ids, edge_labels, condition, EdgeDirection::Out).unwrap();
Box::new(res.into_iter().map(|(vertex_id, edge_iter_vec)| {
(
vertex_id,
EdgeIterator::new(&edge_iter_vec)
.map(|e| Self::parse_edge(e, output_prop_ids))
.take(Self::get_limit(limit))
.collect::<Vec<LocalStoreEdge>>()
.into_iter()
)
}).collect::<Vec<(i64, IntoIter<LocalStoreEdge>)>>().into_iter())
}
fn get_in_vertex_ids(&self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>, condition: Option<&Condition>,
dedup_prop_ids: Option<&Vec<PropId>>, limit: usize) -> Box<dyn Iterator<Item=(i64, Self::VI)>> {
let res = self.get_edge_iter_vec(si, src_ids, edge_labels, condition, EdgeDirection::In).unwrap();
Box::new(
res.into_iter().map(|(vertex_id, edge_iter_vec)|
(
vertex_id,
EdgeIterator::new(&edge_iter_vec)
.map(|item| LocalStoreVertex::new(item.get_src_id(), item.get_kind().src_vertex_label_id as u32))
.take(Self::get_limit(limit))
.collect::<Vec<LocalStoreVertex>>().into_iter()
)
).collect::<Vec<(i64, IntoIter<LocalStoreVertex>)>>().into_iter()
)
}
fn get_in_edges(&self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>, condition: Option<&Condition>,
dedup_prop_ids: Option<&Vec<PropId>>, output_prop_ids: Option<&Vec<PropId>>, limit: usize) -> Box<dyn Iterator<Item=(VertexId, Self::EI)>> {
let res = self.get_edge_iter_vec(si, src_ids, edge_labels, condition, EdgeDirection::In).unwrap();
Box::new(res.into_iter().map(|(vertex_id, edge_iter_vec)| {
(
vertex_id,
EdgeIterator::new(&edge_iter_vec)
.map(|e| Self::parse_edge(e, output_prop_ids))
.take(Self::get_limit(limit))
.collect::<Vec<LocalStoreEdge>>()
.into_iter()
)
}).collect::<Vec<(i64, IntoIter<LocalStoreEdge>)>>().into_iter())
}
fn count_out_edges(&self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>, condition: Option<&Condition>) -> Box<dyn Iterator<Item=(i64, usize)>> {
let res = self.get_edge_iter_vec(si, src_ids, edge_labels, condition, EdgeDirection::Out).unwrap();
Box::new(res.into_iter().map(|(vertex_id, edge_iter_vec)| {
(
vertex_id,
EdgeIterator::new(&edge_iter_vec).count()
)
}).collect::<Vec<(i64, usize)>>().into_iter())
}
fn count_in_edges(&self, si: SnapshotId, src_ids: Vec<PartitionVertexIds>, edge_labels: &Vec<LabelId>, condition: Option<&Condition>) -> Box<dyn Iterator<Item=(i64, usize)>> {
let res = self.get_edge_iter_vec(si, src_ids, edge_labels, condition, EdgeDirection::In).unwrap();
Box::new(res.into_iter().map(|(vertex_id, edge_iter_vec)| {
(
vertex_id,
EdgeIterator::new(&edge_iter_vec).count()
)
}).collect::<Vec<(i64, usize)>>().into_iter())
}
fn get_vertex_properties(&self, si: SnapshotId, ids: Vec<PartitionLabeledVertexIds>, output_prop_ids: Option<&Vec<PropId>>) -> Self::VI {
let mut id_iter = ids.into_iter().flat_map(move |(partition, label_id_vec)| {
label_id_vec.into_iter().flat_map(move |(label_id, ids)| {
ids.into_iter().map(move |id| {
(partition, label_id, id)
})
})
});
let mut res = vec![];
while let Some((partition, label_id, id)) = id_iter.next() {
if let Some(v) = self.get_vertex(si, partition, id, label_id).unwrap() {
res.push(Self::parse_vertex(v, output_prop_ids));
}
}
return res.into_iter();
}
fn get_edge_properties(&self, si: SnapshotId, ids: Vec<PartitionLabeledVertexIds>, output_prop_ids: Option<&Vec<PropId>>)
-> Self::EI {
unimplemented!()
}
fn get_all_vertices(&self, si: SnapshotId, labels: &Vec<LabelId>, condition: Option<&Condition>, dedup_prop_ids: Option<&Vec<PropId>>,
output_prop_ids: Option<&Vec<PropId>>, limit: usize, partition_ids: &Vec<PartitionId>) -> Self::VI {
let iter_vec = self.scan_vertex_iter_vec(si, labels, partition_ids, condition).unwrap();
let real_limit = Self::get_limit(limit);
let mut res = vec![];
for mut iter in iter_vec {
while let Some(v) = iter.next() {
res.push(Self::parse_vertex(v, output_prop_ids));
if res.len() >= real_limit {
return res.into_iter();
}
}
}
return res.into_iter();
}
fn get_all_edges(&self, si: SnapshotId, labels: &Vec<LabelId>, condition: Option<&Condition>, dedup_prop_ids: Option<&Vec<PropId>>,
output_prop_ids: Option<&Vec<PropId>>, limit: usize, partition_ids: &Vec<PartitionId>) -> Self::EI {
let iter_vec = self.scan_edge_iter_vec(si, labels, partition_ids, condition).unwrap();
let real_limit = Self::get_limit(limit);
let mut res = vec![];
for mut iter in iter_vec {
while let Some(e) = iter.next() {
res.push(Self::parse_edge(e, output_prop_ids));
if res.len() >= real_limit {
return res.into_iter();
}
}
}
return res.into_iter();
}
fn count_all_vertices(&self, si: i64, labels: &Vec<u32>, condition: Option<&Condition>, partition_ids: &Vec<u32>) -> u64 {
let iter_vec = self.scan_vertex_iter_vec(si, labels, partition_ids, condition).unwrap();
let mut count = 0;
for mut iter in iter_vec {
while let Some(v) = iter.next() {
count += 1;
}
}
return count;
}
fn count_all_edges(&self, si: i64, labels: &Vec<u32>, condition: Option<&Condition>, partition_ids: &Vec<u32>) -> u64 {
let iter_vec = self.scan_edge_iter_vec(si, labels, partition_ids, condition).unwrap();
let mut count = 0;
for mut iter in iter_vec {
while let Some(v) = iter.next() {
count += 1;
}
}
return count;
}
fn translate_vertex_id(&self, vertex_id: VertexId) -> VertexId {
vertex_id
}
fn get_schema(&self, si: i64) -> Option<Arc<dyn Schema>> {
let partition = self.graph_partitions.values().nth(0)?;
let graph_def = partition.get_graph_def().ok()?;
Some(Arc::new(GlobalGraphSchema::new(graph_def)))
}
}
impl GraphPartitionManager for GlobalGraph {
fn get_partition_id(&self, vid: i64) -> i32 {
let partition_count = self.total_partition;
floor_mod(vid, partition_count as i64) as i32
}
fn get_server_id(&self, partition_id: u32) -> Option<u32> {
self.partition_to_server.get(&partition_id).map(|x| *x)
}
fn get_process_partition_list(&self) -> Vec<u32> {
self.graph_partitions.keys().into_iter().map(|x|*x).collect::<Vec<u32>>()
}
fn get_vertex_id_by_primary_key(&self, label_id: u32, key: &String) -> Option<(u32, i64)> {
// TODO check
None
}
}
fn floor_div(x: i64, y: i64) -> i64 {
let mut r = x / y;
// if the signs are different and modulo not zero, round down
if (x ^ y) < 0 && (r * y != x) {
r = r - 1;
}
r
}
fn floor_mod(x: i64, y: i64) -> i64 {
x - floor_div(x, y) * y
}
| 42.16537 | 209 | 0.566696 |
d5718ff8aef27537dee8f21af1ee80841993a7a8
| 7,338 |
use crate::{cmd::Cmd, utils};
use cast::trace::CallTraceDecoder;
use clap::Parser;
use ethers::{
abi::Address,
prelude::{Middleware, Provider},
types::H256,
};
use forge::{
debug::DebugArena,
executor::{builder::Backend, opts::EvmOpts, DeployResult, ExecutorBuilder, RawCallResult},
trace::{identifier::EtherscanIdentifier, CallTraceArena, CallTraceDecoderBuilder, TraceKind},
};
use foundry_config::Config;
use foundry_utils::RuntimeOrHandle;
use std::{
collections::{BTreeMap, HashMap},
str::FromStr,
time::Duration,
};
use ui::{TUIExitReason, Tui, Ui};
use yansi::Paint;
#[derive(Debug, Clone, Parser)]
pub struct RunArgs {
#[clap(help = "The transaction hash.")]
tx: String,
#[clap(short, long, env = "ETH_RPC_URL")]
rpc_url: String,
#[clap(long, short = 'd', help = "Debugs the transaction.")]
debug: bool,
#[clap(
long,
short = 'q',
help = "Executes the transaction only with the state from the previous block. May result in different results than the live execution!"
)]
quick: bool,
#[clap(
long,
help = "Labels address in the trace. 0xd8dA6BF26964aF9D7eEd9e03E53415D37aA96045:vitalik.eth"
)]
label: Vec<String>,
}
impl Cmd for RunArgs {
type Output = ();
fn run(self) -> eyre::Result<Self::Output> {
RuntimeOrHandle::new().block_on(self.run_tx())
}
}
impl RunArgs {
async fn run_tx(self) -> eyre::Result<()> {
let figment = Config::figment();
let mut evm_opts = figment.extract::<EvmOpts>()?;
let config = Config::from_provider(figment).sanitized();
let provider =
Provider::try_from(self.rpc_url.as_str()).expect("could not instantiate provider");
if let Some(tx) =
provider.get_transaction(H256::from_str(&self.tx).expect("invalid tx hash")).await?
{
let tx_block_number = tx.block_number.expect("no block number").as_u64();
let tx_hash = tx.hash();
evm_opts.fork_url = Some(self.rpc_url);
evm_opts.fork_block_number = Some(tx_block_number - 1);
// Set up the execution environment
let env = evm_opts.evm_env().await;
let db =
Backend::new(utils::get_fork(&evm_opts, &config.rpc_storage_caching), &env).await;
let builder = ExecutorBuilder::new()
.with_config(env)
.with_spec(crate::utils::evm_spec(&config.evm_version));
let mut executor = builder.build(db);
// Set the state to the moment right before the transaction
if !self.quick {
println!("Executing previous transactions from the block.");
let block_txes = provider.get_block_with_txs(tx_block_number).await?;
for past_tx in block_txes.unwrap().transactions.into_iter() {
if past_tx.hash().eq(&tx_hash) {
break
}
executor.set_gas_limit(past_tx.gas);
if let Some(to) = past_tx.to {
executor
.call_raw_committing(past_tx.from, to, past_tx.input.0, past_tx.value)
.unwrap();
} else {
executor.deploy(past_tx.from, past_tx.input.0, past_tx.value).unwrap();
}
}
}
// Execute our transaction
let mut result = {
executor.set_tracing(true).set_gas_limit(tx.gas);
if self.debug {
executor.set_debugger(true);
}
if let Some(to) = tx.to {
let RawCallResult { reverted, gas, traces, debug: run_debug, .. } =
executor.call_raw_committing(tx.from, to, tx.input.0, tx.value)?;
RunResult {
success: !reverted,
traces: vec![(TraceKind::Execution, traces.unwrap_or_default())],
debug: run_debug.unwrap_or_default(),
gas,
}
} else {
let DeployResult { gas, traces, debug: run_debug, .. }: DeployResult =
executor.deploy(tx.from, tx.input.0, tx.value).unwrap();
RunResult {
success: true,
traces: vec![(TraceKind::Execution, traces.unwrap_or_default())],
debug: run_debug.unwrap_or_default(),
gas,
}
}
};
let etherscan_identifier = EtherscanIdentifier::new(
evm_opts.get_remote_chain_id(),
config.etherscan_api_key,
Config::foundry_etherscan_cache_dir(evm_opts.get_chain_id()),
Duration::from_secs(24 * 60 * 60),
);
let labeled_addresses: BTreeMap<Address, String> = self
.label
.iter()
.filter_map(|label_str| {
let mut iter = label_str.split(':');
if let Some(addr) = iter.next() {
if let (Ok(address), Some(label)) = (Address::from_str(addr), iter.next()) {
return Some((address, label.to_string()))
}
}
None
})
.collect();
let mut decoder = CallTraceDecoderBuilder::new().with_labels(labeled_addresses).build();
for (_, trace) in &mut result.traces {
decoder.identify(trace, ðerscan_identifier);
}
if self.debug {
run_debugger(result, decoder)?;
} else {
print_traces(&mut result, decoder)?;
}
}
Ok(())
}
}
fn run_debugger(result: RunResult, decoder: CallTraceDecoder) -> eyre::Result<()> {
// TODO Get source from etherscan
let source_code: BTreeMap<u32, String> = BTreeMap::new();
let calls: Vec<DebugArena> = vec![result.debug];
let flattened = calls.last().expect("we should have collected debug info").flatten(0);
let tui = Tui::new(flattened, 0, decoder.contracts, HashMap::new(), source_code)?;
match tui.start().expect("Failed to start tui") {
TUIExitReason::CharExit => Ok(()),
}
}
fn print_traces(result: &mut RunResult, decoder: CallTraceDecoder) -> eyre::Result<()> {
if result.traces.is_empty() {
eyre::bail!("Unexpected error: No traces. Please report this as a bug: https://github.com/foundry-rs/foundry/issues/new?assignees=&labels=T-bug&template=BUG-FORM.yml");
}
println!("Traces:");
for (_, trace) in &mut result.traces {
decoder.decode(trace);
println!("{trace}");
}
println!();
if result.success {
println!("{}", Paint::green("Script ran successfully."));
} else {
println!("{}", Paint::red("Script failed."));
}
println!("Gas used: {}", result.gas);
Ok(())
}
struct RunResult {
pub success: bool,
pub traces: Vec<(TraceKind, CallTraceArena)>,
pub debug: DebugArena,
pub gas: u64,
}
| 34.777251 | 176 | 0.541701 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.